aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/.gitignore2
-rw-r--r--lib/asn1/doc/src/Makefile4
-rw-r--r--lib/asn1/doc/src/asn1_getting_started.xml1290
-rw-r--r--lib/asn1/doc/src/asn1_introduction.xml99
-rw-r--r--lib/asn1/doc/src/asn1_overview.xml49
-rw-r--r--lib/asn1/doc/src/asn1_spec.xmlsrc522
-rw-r--r--lib/asn1/doc/src/asn1_ug.xml1417
-rw-r--r--lib/asn1/doc/src/asn1ct.xml334
-rw-r--r--lib/asn1/doc/src/asn1rt.xml26
-rw-r--r--lib/asn1/doc/src/notes.xml15
-rw-r--r--lib/asn1/doc/src/part.xml9
-rw-r--r--lib/asn1/doc/src/ref_man.xml6
-rw-r--r--lib/asn1/src/Makefile1
-rw-r--r--lib/asn1/src/asn1.app.src2
-rw-r--r--lib/asn1/src/asn1_db.erl8
-rw-r--r--lib/asn1/src/asn1_records.hrl16
-rw-r--r--lib/asn1/src/asn1ct.erl71
-rw-r--r--lib/asn1/src/asn1ct_check.erl5271
-rw-r--r--lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl13
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl155
-rw-r--r--lib/asn1/src/asn1ct_gen.erl91
-rw-r--r--lib/asn1/src/asn1ct_gen_ber_bin_v2.erl109
-rw-r--r--lib/asn1/src/asn1ct_imm.erl47
-rw-r--r--lib/asn1/src/asn1ct_parser.yrl1177
-rw-r--r--lib/asn1/src/asn1ct_parser2.erl2517
-rw-r--r--lib/asn1/src/asn1ct_tok.erl332
-rw-r--r--lib/asn1/src/asn1ct_value.erl5
-rw-r--r--lib/asn1/test/Makefile10
-rw-r--r--lib/asn1/test/asn1_SUITE.erl120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn18
-rw-r--r--lib/asn1/test/asn1_SUITE_data/BadValueSet.asn19
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoExtension.asn16
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn127
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn160
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Constraints.py44
-rw-r--r--lib/asn1/test/asn1_SUITE_data/CoverParser.asn157
-rw-r--r--lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn31
-rw-r--r--lib/asn1/test/asn1_SUITE_data/EnumExt.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Example.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Export1.asn7
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Exporting.asn118
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn130
-rw-r--r--lib/asn1/test/asn1_SUITE_data/IllegalExport.asn17
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Importing.asn120
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObj.asn188
-rw-r--r--lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1136
-rw-r--r--lib/asn1/test/asn1_SUITE_data/MissingEnd.asn15
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn11
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ParamBasic.asn133
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Prim.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SelectionType.asn8
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Seq.py5
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn6
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn110
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Syntax.py10
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ValueTest.asn96
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1466
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn159
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1463
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1248
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1543
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/External-References.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Notation.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1183
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn156
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1174
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1202
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1326
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn161
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1391
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1166
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1300
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1308
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1415
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1447
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1528
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1292
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1495
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1409
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1608
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1122
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn)0
-rw-r--r--lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1 (renamed from lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn)0
-rw-r--r--lib/asn1/test/asn1_test_lib.erl25
-rw-r--r--lib/asn1/test/error_SUITE.erl725
-rw-r--r--lib/asn1/test/syntax_SUITE.erl340
-rw-r--r--lib/asn1/test/testChoExtension.erl5
-rw-r--r--lib/asn1/test/testConstraints.erl22
-rw-r--r--lib/asn1/test/testDoubleEllipses.erl14
-rw-r--r--lib/asn1/test/testEnumExt.erl28
-rw-r--r--lib/asn1/test/testExtensibilityImplied.erl29
-rw-r--r--lib/asn1/test/testImporting.erl34
-rw-r--r--lib/asn1/test/testInfObj.erl39
-rw-r--r--lib/asn1/test/testInfObjExtract.erl72
-rw-r--r--lib/asn1/test/testParamBasic.erl8
-rw-r--r--lib/asn1/test/testPrim.erl5
-rw-r--r--lib/asn1/test/testPrimStrings.erl3
-rw-r--r--lib/asn1/test/testRfcs.erl75
-rw-r--r--lib/asn1/test/testSelectionTypes.erl28
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl175
-rw-r--r--lib/asn1/test/testValueTest.erl114
-rw-r--r--lib/asn1/test/testX420.erl93
-rw-r--r--lib/asn1/test/test_compile_options.erl39
-rw-r--r--lib/asn1/vsn.mk3
-rw-r--r--lib/common_test/doc/src/event_handler_chapter.xml9
-rw-r--r--lib/common_test/doc/src/install_chapter.xml63
-rw-r--r--lib/common_test/doc/src/notes.xml220
-rw-r--r--lib/common_test/doc/src/run_test_chapter.xml25
-rw-r--r--lib/common_test/install.sh.in53
-rw-r--r--lib/common_test/priv/Makefile.in5
-rw-r--r--lib/common_test/priv/run_test.in63
-rw-r--r--lib/common_test/src/Makefile2
-rw-r--r--lib/common_test/src/common_test.app.src11
-rw-r--r--lib/common_test/src/ct.erl59
-rw-r--r--lib/common_test/src/ct_config.erl3
-rw-r--r--lib/common_test/src/ct_conn_log_h.erl8
-rw-r--r--lib/common_test/src/ct_cover.erl127
-rw-r--r--lib/common_test/src/ct_framework.erl125
-rw-r--r--lib/common_test/src/ct_gen_conn.erl5
-rw-r--r--lib/common_test/src/ct_logs.erl73
-rw-r--r--lib/common_test/src/ct_master.erl13
-rw-r--r--lib/common_test/src/ct_master_logs.erl8
-rw-r--r--lib/common_test/src/ct_netconfc.erl165
-rw-r--r--lib/common_test/src/ct_release_test.erl137
-rw-r--r--lib/common_test/src/ct_run.erl154
-rw-r--r--lib/common_test/src/ct_telnet.erl190
-rw-r--r--lib/common_test/src/ct_telnet_client.erl16
-rw-r--r--lib/common_test/src/ct_testspec.erl34
-rw-r--r--lib/common_test/src/ct_util.hrl1
-rw-r--r--lib/common_test/src/ct_webtool.erl1207
-rw-r--r--lib/common_test/src/ct_webtool_sup.erl74
-rw-r--r--lib/common_test/src/cth_surefire.erl16
-rw-r--r--lib/common_test/src/vts.erl10
-rw-r--r--lib/common_test/test/ct_auto_compile_SUITE.erl48
-rw-r--r--lib/common_test/test/ct_config_SUITE_data/config/test/config_dynamic_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_config_SUITE_data/config/test/config_server.erl2
-rw-r--r--lib/common_test/test/ct_cover_SUITE.erl79
-rw-r--r--lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_error_SUITE.erl3
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_12_SUITE.erl10
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_13_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_14_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_7_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_8_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_1_SUITE.erl18
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_2_SUITE.erl5
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_helper.erl2
-rw-r--r--lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl2
-rw-r--r--lib/common_test/test/ct_event_handler_SUITE.erl38
-rw-r--r--lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl105
-rw-r--r--lib/common_test/test/ct_gen_conn_SUITE_data/conn_SUITE.erl34
-rw-r--r--lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl6
-rw-r--r--lib/common_test/test/ct_groups_test_1_SUITE.erl20
-rw-r--r--lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_12_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_groups_test_2_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_groups_test_2_SUITE_data/groups_2/groups_22_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_hooks_SUITE.erl32
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl9
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_update_config_SUITE.erl8
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/cth_log_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl17
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl4
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl21
-rw-r--r--lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl38
-rw-r--r--lib/common_test/test/ct_netconfc_SUITE_data/ns.erl7
-rw-r--r--lib/common_test/test/ct_pre_post_test_io_SUITE.erl12
-rw-r--r--lib/common_test/test/ct_repeat_1_SUITE.erl39
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_sequence_1_SUITE.erl21
-rw-r--r--lib/common_test/test/ct_shell_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_skip_SUITE_data/skip/test/auto_skip_4_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_smoke_test_SUITE.erl10
-rw-r--r--lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_telnet_SUITE.erl4
-rw-r--r--lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl56
-rw-r--r--lib/common_test/test/ct_test_server_if_1_SUITE.erl25
-rw-r--r--lib/common_test/test/ct_test_server_if_1_SUITE_data/test_server_if/test/ts_if_1_SUITE.erl8
-rw-r--r--lib/common_test/test/ct_test_support.erl36
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE.erl8
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl28
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl2
-rw-r--r--lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl27
-rw-r--r--lib/common_test/test/telnet_server.erl20
-rw-r--r--lib/common_test/vsn.mk2
-rw-r--r--lib/compiler/doc/src/compile.xml506
-rw-r--r--lib/compiler/doc/src/notes.xml52
-rw-r--r--lib/compiler/doc/src/ref_man.xml2
-rw-r--r--lib/compiler/src/Makefile7
-rw-r--r--lib/compiler/src/beam_a.erl7
-rw-r--r--lib/compiler/src/beam_asm.erl52
-rw-r--r--lib/compiler/src/beam_block.erl34
-rw-r--r--lib/compiler/src/beam_bool.erl78
-rw-r--r--lib/compiler/src/beam_bsm.erl22
-rw-r--r--lib/compiler/src/beam_clean.erl33
-rw-r--r--lib/compiler/src/beam_dead.erl729
-rw-r--r--lib/compiler/src/beam_dict.erl80
-rw-r--r--lib/compiler/src/beam_flatten.erl4
-rw-r--r--lib/compiler/src/beam_jump.erl114
-rw-r--r--lib/compiler/src/beam_listing.erl13
-rw-r--r--lib/compiler/src/beam_peep.erl15
-rw-r--r--lib/compiler/src/beam_split.erl5
-rw-r--r--lib/compiler/src/beam_trim.erl6
-rw-r--r--lib/compiler/src/beam_type.erl54
-rw-r--r--lib/compiler/src/beam_utils.erl80
-rw-r--r--lib/compiler/src/beam_validator.erl528
-rw-r--r--lib/compiler/src/beam_z.erl21
-rw-r--r--lib/compiler/src/cerl.erl88
-rw-r--r--lib/compiler/src/cerl_clauses.erl46
-rw-r--r--lib/compiler/src/cerl_inline.erl61
-rw-r--r--lib/compiler/src/cerl_sets.erl206
-rw-r--r--lib/compiler/src/cerl_trees.erl2
-rw-r--r--lib/compiler/src/compile.erl130
-rw-r--r--lib/compiler/src/compiler.app.src6
-rw-r--r--lib/compiler/src/core_lib.erl64
-rw-r--r--lib/compiler/src/core_lint.erl2
-rw-r--r--lib/compiler/src/core_parse.hrl3
-rw-r--r--lib/compiler/src/core_parse.yrl98
-rw-r--r--lib/compiler/src/core_pp.erl22
-rw-r--r--lib/compiler/src/core_scan.erl6
-rw-r--r--lib/compiler/src/erl_bifs.erl1
-rw-r--r--lib/compiler/src/sys_core_fold.erl1739
-rw-r--r--lib/compiler/src/sys_core_fold_lists.erl386
-rw-r--r--lib/compiler/src/sys_core_inline.erl8
-rw-r--r--lib/compiler/src/sys_pre_expand.erl19
-rw-r--r--lib/compiler/src/v3_codegen.erl197
-rw-r--r--lib/compiler/src/v3_core.erl512
-rw-r--r--lib/compiler/src/v3_kernel.erl187
-rw-r--r--lib/compiler/src/v3_life.erl186
-rw-r--r--lib/compiler/test/Makefile14
-rw-r--r--lib/compiler/test/andor_SUITE.erl22
-rw-r--r--lib/compiler/test/beam_utils_SUITE.erl236
-rw-r--r--lib/compiler/test/beam_validator_SUITE.erl256
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bad_catch_try.S8
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S6
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S47
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/bin_match.S64
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S38
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/dead_code.S25
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beambin17460 -> 0 bytes
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_range.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_state.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S14
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S26
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S29
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S3
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/stack.S4
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/state_after_fault_in_catch.S2
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/undef_label.S22
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/uninit.S16
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/unsafe_catch.S8
-rw-r--r--lib/compiler/test/beam_validator_SUITE_data/xrange.S4
-rw-r--r--lib/compiler/test/bs_bit_binaries_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_construct_SUITE.erl2
-rw-r--r--lib/compiler/test/bs_match_SUITE.erl76
-rw-r--r--lib/compiler/test/bs_shadowed_size_var.core25
-rw-r--r--lib/compiler/test/compilation_SUITE.erl65
-rw-r--r--lib/compiler/test/compile_SUITE.erl96
-rw-r--r--lib/compiler/test/compile_SUITE_data/dialyzer_test.erl39
-rw-r--r--lib/compiler/test/core_SUITE.erl14
-rw-r--r--lib/compiler/test/core_SUITE_data/bs_shadowed_size_var.core66
-rw-r--r--lib/compiler/test/core_SUITE_data/map_core_test.core12
-rw-r--r--lib/compiler/test/core_fold_SUITE.erl97
-rw-r--r--lib/compiler/test/core_fold_SUITE_data/nested_call_in_case.core (renamed from lib/compiler/test/nested_call_in_case.core)3
-rw-r--r--lib/compiler/test/core_fold_SUITE_data/unused_multiple_values_error.core (renamed from lib/compiler/test/unused_multiple_values_error.core)0
-rw-r--r--lib/compiler/test/error_SUITE.erl12
-rw-r--r--lib/compiler/test/float_SUITE.erl2
-rw-r--r--lib/compiler/test/guard_SUITE.erl294
-rw-r--r--lib/compiler/test/lc_SUITE.erl102
-rw-r--r--lib/compiler/test/map_SUITE.erl1033
-rw-r--r--lib/compiler/test/match_SUITE.erl51
-rw-r--r--lib/compiler/test/misc_SUITE.erl37
-rw-r--r--lib/compiler/test/receive_SUITE.erl38
-rw-r--r--lib/compiler/test/record_SUITE.erl8
-rw-r--r--lib/compiler/test/test_lib.erl29
-rw-r--r--lib/compiler/test/trycatch_SUITE.erl20
-rw-r--r--lib/compiler/test/warnings_SUITE.erl128
-rw-r--r--lib/compiler/test/z_SUITE.erl62
-rw-r--r--lib/compiler/vsn.mk2
-rw-r--r--lib/cosEvent/src/cosEvent.app.src2
-rw-r--r--lib/cosEvent/src/oe_CosEventComm_PullerS_impl.erl5
-rw-r--r--lib/cosEvent/vsn.mk3
-rw-r--r--lib/cosEventDomain/src/cosEventDomain.app.src2
-rw-r--r--lib/cosEventDomain/src/cosEventDomainApp.erl34
-rw-r--r--lib/cosEventDomain/vsn.mk3
-rw-r--r--lib/cosFileTransfer/src/cosFileTransfer.app.src2
-rw-r--r--lib/cosFileTransfer/src/cosFileTransferApp.erl7
-rw-r--r--lib/cosFileTransfer/test/fileTransfer_SUITE.erl5
-rw-r--r--lib/cosFileTransfer/vsn.mk2
-rw-r--r--lib/cosNotification/src/CosNotification_Common.erl47
-rw-r--r--lib/cosNotification/src/CosNotification_Definitions.hrl6
-rw-r--r--lib/cosNotification/src/PullerSupplier_impl.erl4
-rw-r--r--lib/cosNotification/src/cosNotification.app.src2
-rw-r--r--lib/cosNotification/src/cosNotificationApp.erl16
-rw-r--r--lib/cosNotification/src/cosNotification_eventDB.erl32
-rw-r--r--lib/cosNotification/test/notify_test_impl.erl4
-rw-r--r--lib/cosNotification/vsn.mk2
-rw-r--r--lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl6
-rw-r--r--lib/cosProperty/src/cosProperty.app.src2
-rw-r--r--lib/cosProperty/src/cosProperty.erl7
-rw-r--r--lib/cosProperty/vsn.mk2
-rw-r--r--lib/cosTime/src/CosTime_TimeService_impl.erl2
-rw-r--r--lib/cosTime/src/cosTime.app.src2
-rw-r--r--lib/cosTime/src/cosTime.erl5
-rw-r--r--lib/cosTime/src/cosTimeApp.hrl2
-rw-r--r--lib/cosTime/vsn.mk3
-rw-r--r--lib/cosTransactions/src/CosTransactions_TransactionFactory_impl.erl6
-rw-r--r--lib/cosTransactions/src/ETraP_Common.erl17
-rw-r--r--lib/cosTransactions/src/ETraP_Server_impl.erl5
-rw-r--r--lib/cosTransactions/src/cosTransactions.app.src2
-rw-r--r--lib/cosTransactions/vsn.mk2
-rw-r--r--lib/crypto/c_src/crypto.c145
-rw-r--r--lib/crypto/doc/src/crypto.xml36
-rw-r--r--lib/crypto/doc/src/notes.xml17
-rw-r--r--lib/crypto/src/crypto.erl34
-rw-r--r--lib/crypto/test/crypto_SUITE.erl214
-rw-r--r--lib/crypto/test/old_crypto_SUITE.erl6
-rw-r--r--lib/crypto/vsn.mk2
-rw-r--r--lib/debugger/doc/src/notes.xml15
-rw-r--r--lib/debugger/src/dbg_icmd.erl2
-rw-r--r--lib/debugger/src/dbg_ieval.erl50
-rw-r--r--lib/debugger/src/dbg_iload.erl381
-rw-r--r--lib/debugger/src/dbg_wx_settings.erl10
-rw-r--r--lib/debugger/src/dbg_wx_trace.erl2
-rw-r--r--lib/debugger/src/debugger.app.src4
-rw-r--r--lib/debugger/src/int.erl4
-rw-r--r--lib/debugger/test/int_eval_SUITE.erl2
-rw-r--r--lib/debugger/test/map_SUITE.erl1311
-rw-r--r--lib/debugger/vsn.mk2
-rw-r--r--lib/dialyzer/doc/src/dialyzer.xml98
-rw-r--r--lib/dialyzer/doc/src/notes.xml15
-rw-r--r--lib/dialyzer/src/dialyzer.app.src6
-rw-r--r--lib/dialyzer/src/dialyzer.erl47
-rw-r--r--lib/dialyzer/src/dialyzer.hrl12
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl159
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl18
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl125
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl21
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl32
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl125
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl158
-rw-r--r--lib/dialyzer/src/dialyzer_options.erl16
-rw-r--r--lib/dialyzer/src/dialyzer_races.erl17
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl46
-rw-r--r--lib/dialyzer/src/dialyzer_timing.erl10
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl14
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl328
-rw-r--r--lib/dialyzer/test/dialyzer_SUITE.erl41
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/plt_SUITE.erl159
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/race_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl19
-rw-r--r--lib/dialyzer/test/small_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes36
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/maps_sum4
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/request12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/suppress_request6
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl528
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl525
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl15
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/ditrap.erl47
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/inv_mult.erl15
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl23
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/maps_sum.erl31
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/request1.erl12
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl50
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression1.erl33
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression2.erl32
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/suppression3.erl17
-rw-r--r--lib/dialyzer/test/underspecs_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/user_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/vsn.mk2
-rw-r--r--lib/diameter/doc/src/diameter.xml171
-rw-r--r--lib/diameter/doc/src/diameter_dict.xml9
-rw-r--r--lib/diameter/doc/src/notes.xml254
-rw-r--r--lib/diameter/doc/src/seealso.ent8
-rw-r--r--lib/diameter/examples/code/GNUmakefile4
-rw-r--r--lib/diameter/examples/code/client.erl53
-rw-r--r--lib/diameter/examples/code/node.erl174
-rw-r--r--lib/diameter/examples/code/peer.erl150
-rw-r--r--lib/diameter/examples/code/relay.erl43
-rw-r--r--lib/diameter/examples/code/server.erl46
-rw-r--r--lib/diameter/examples/code/server_cb.erl4
-rw-r--r--lib/diameter/include/diameter_gen.hrl61
-rw-r--r--lib/diameter/src/base/diameter.erl11
-rw-r--r--lib/diameter/src/base/diameter_capx.erl40
-rw-r--r--lib/diameter/src/base/diameter_codec.erl115
-rw-r--r--lib/diameter/src/base/diameter_config.erl44
-rw-r--r--lib/diameter/src/base/diameter_lib.erl156
-rw-r--r--lib/diameter/src/base/diameter_peer.erl27
-rw-r--r--lib/diameter/src/base/diameter_peer_fsm.erl279
-rw-r--r--lib/diameter/src/base/diameter_reg.erl7
-rw-r--r--lib/diameter/src/base/diameter_service.erl143
-rw-r--r--lib/diameter/src/base/diameter_service_sup.erl4
-rw-r--r--lib/diameter/src/base/diameter_session.erl4
-rw-r--r--lib/diameter/src/base/diameter_stats.erl6
-rw-r--r--lib/diameter/src/base/diameter_sup.erl4
-rw-r--r--lib/diameter/src/base/diameter_sync.erl5
-rw-r--r--lib/diameter/src/base/diameter_traffic.erl452
-rw-r--r--lib/diameter/src/base/diameter_types.erl188
-rw-r--r--lib/diameter/src/base/diameter_watchdog.erl69
-rw-r--r--lib/diameter/src/compiler/diameter_codegen.erl4
-rw-r--r--lib/diameter/src/compiler/diameter_forms.hrl14
-rw-r--r--lib/diameter/src/diameter.appup.src80
-rw-r--r--lib/diameter/src/modules.mk4
-rw-r--r--lib/diameter/src/transport/diameter_sctp.erl274
-rw-r--r--lib/diameter/src/transport/diameter_tcp.erl37
-rw-r--r--lib/diameter/src/transport/diameter_transport_sup.erl4
-rw-r--r--lib/diameter/test/diameter_3xxx_SUITE.erl200
-rw-r--r--lib/diameter/test/diameter_app_SUITE.erl97
-rw-r--r--lib/diameter/test/diameter_capx_SUITE.erl18
-rw-r--r--lib/diameter/test/diameter_codec_SUITE.erl188
-rw-r--r--lib/diameter/test/diameter_codec_test.erl24
-rw-r--r--lib/diameter/test/diameter_config_SUITE.erl25
-rw-r--r--lib/diameter/test/diameter_ct.erl6
-rw-r--r--lib/diameter/test/diameter_dpr_SUITE.erl38
-rw-r--r--lib/diameter/test/diameter_event_SUITE.erl9
-rw-r--r--lib/diameter/test/diameter_examples_SUITE.erl12
-rw-r--r--lib/diameter/test/diameter_gen_sctp_SUITE.erl39
-rw-r--r--lib/diameter/test/diameter_gen_tcp_SUITE.erl67
-rw-r--r--lib/diameter/test/diameter_pool_SUITE.erl133
-rw-r--r--lib/diameter/test/diameter_relay_SUITE.erl115
-rw-r--r--lib/diameter/test/diameter_tls_SUITE.erl18
-rw-r--r--lib/diameter/test/diameter_traffic_SUITE.erl375
-rw-r--r--lib/diameter/test/diameter_transport_SUITE.erl100
-rw-r--r--lib/diameter/test/diameter_util.erl80
-rw-r--r--lib/diameter/test/diameter_watchdog_SUITE.erl5
-rw-r--r--lib/diameter/test/modules.mk4
-rw-r--r--lib/diameter/vsn.mk6
-rw-r--r--lib/edoc/doc/overview.edoc25
-rw-r--r--lib/edoc/include/edoc_doclet.hrl11
-rw-r--r--lib/edoc/priv/edoc.dtd9
-rw-r--r--lib/edoc/priv/stylesheet.css4
-rw-r--r--lib/edoc/src/edoc.app.src2
-rw-r--r--lib/edoc/src/edoc.erl193
-rw-r--r--lib/edoc/src/edoc.hrl7
-rw-r--r--lib/edoc/src/edoc_data.erl33
-rw-r--r--lib/edoc/src/edoc_doclet.erl106
-rw-r--r--lib/edoc/src/edoc_extract.erl25
-rw-r--r--lib/edoc/src/edoc_layout.erl36
-rw-r--r--lib/edoc/src/edoc_lib.erl171
-rw-r--r--lib/edoc/src/edoc_macros.erl6
-rw-r--r--lib/edoc/src/edoc_parser.yrl14
-rw-r--r--lib/edoc/src/edoc_refs.erl72
-rw-r--r--lib/edoc/src/edoc_run.erl30
-rw-r--r--lib/edoc/src/edoc_specs.erl45
-rw-r--r--lib/edoc/src/edoc_tags.erl24
-rw-r--r--lib/edoc/src/otpsgml_layout.erl24
-rw-r--r--lib/edoc/test/edoc_SUITE.erl21
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy (renamed from lib/common_test/priv/bin/.gitignore)0
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl1
-rw-r--r--lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl1
-rw-r--r--lib/edoc/vsn.mk2
-rw-r--r--lib/eldap/doc/src/eldap.xml28
-rw-r--r--lib/eldap/doc/src/notes.xml65
-rw-r--r--lib/eldap/src/eldap.erl5
-rw-r--r--lib/eldap/test/Makefile5
-rw-r--r--lib/eldap/test/eldap_basic_SUITE.erl1148
-rw-r--r--lib/eldap/test/eldap_basic_SUITE_data/RANDbin0 -> 512 bytes
-rw-r--r--lib/eldap/test/eldap_connections_SUITE.erl147
-rw-r--r--lib/eldap/test/eldap_misc_SUITE.erl51
-rw-r--r--lib/eldap/test/make_certs.erl357
-rw-r--r--lib/eldap/vsn.mk2
-rw-r--r--lib/erl_docgen/priv/bin/specs_gen.escript2
-rwxr-xr-xlib/erl_docgen/priv/bin/xml_from_edoc.escript2
-rw-r--r--lib/erl_docgen/priv/css/otp_doc.css33
-rw-r--r--lib/erl_docgen/priv/dtd/application.dtd2
-rw-r--r--lib/erl_docgen/priv/dtd/book.dtd4
-rw-r--r--lib/erl_docgen/priv/dtd/chapter.dtd4
-rw-r--r--lib/erl_docgen/priv/dtd/common.dtd2
-rw-r--r--lib/erl_docgen/priv/dtd/common.refs.dtd6
-rw-r--r--lib/erl_docgen/priv/dtd/part.dtd2
-rw-r--r--lib/erl_docgen/priv/dtd/report.dtd4
-rw-r--r--lib/erl_docgen/priv/xsl/db_html.xsl39
-rw-r--r--lib/erl_docgen/priv/xsl/db_man.xsl24
-rw-r--r--lib/erl_docgen/priv/xsl/db_pdf.xsl29
-rw-r--r--lib/erl_docgen/priv/xsl/db_pdf_params.xsl35
-rw-r--r--lib/erl_docgen/src/docgen_otp_specs.erl12
-rw-r--r--lib/erl_docgen/src/erl_docgen.app.src2
-rw-r--r--lib/erl_docgen/vsn.mk2
-rw-r--r--lib/erl_interface/doc/src/ei.xml3
-rw-r--r--lib/erl_interface/doc/src/erl_eterm.xml6
-rw-r--r--lib/erl_interface/src/decode/decode_big.c21
-rw-r--r--lib/erl_interface/src/encode/encode_double.c12
-rw-r--r--lib/erl_interface/src/legacy/erl_eterm.c12
-rw-r--r--lib/erl_interface/src/misc/eidef.h21
-rw-r--r--lib/erl_interface/test/ei_encode_SUITE.erl2
-rw-r--r--lib/et/src/Makefile2
-rw-r--r--lib/et/src/et_collector.erl2
-rw-r--r--lib/et/src/et_selector.erl2
-rw-r--r--lib/eunit/doc/overview.edoc6
-rw-r--r--lib/eunit/include/eunit.hrl6
-rw-r--r--lib/eunit/src/eunit.app.src2
-rw-r--r--lib/eunit/src/eunit.erl2
-rw-r--r--lib/eunit/src/eunit_autoexport.erl11
-rw-r--r--lib/eunit/src/eunit_data.erl2
-rw-r--r--lib/eunit/src/eunit_internal.hrl4
-rw-r--r--lib/eunit/src/eunit_lib.erl42
-rw-r--r--lib/eunit/src/eunit_proc.erl7
-rw-r--r--lib/eunit/src/eunit_surefire.erl26
-rw-r--r--lib/eunit/src/eunit_tty.erl24
-rw-r--r--lib/eunit/test/Makefile4
-rw-r--r--lib/eunit/test/eunit_SUITE.erl38
-rw-r--r--lib/eunit/test/tlatin.erl15
-rw-r--r--lib/eunit/test/tutf8.erl15
-rw-r--r--lib/eunit/vsn.mk2
-rw-r--r--lib/hipe/cerl/cerl_pmatch.erl10
-rw-r--r--lib/hipe/cerl/cerl_to_icode.erl4
-rw-r--r--lib/hipe/cerl/erl_bif_types.erl18
-rw-r--r--lib/hipe/cerl/erl_types.erl872
-rw-r--r--lib/hipe/doc/src/notes.xml49
-rw-r--r--lib/hipe/icode/hipe_beam_to_icode.erl15
-rw-r--r--lib/hipe/llvm/hipe_llvm_main.erl2
-rw-r--r--lib/hipe/main/hipe.app.src6
-rw-r--r--lib/hipe/ppc/hipe_rtl_to_ppc.erl14
-rw-r--r--lib/hipe/rtl/hipe_rtl.erl18
-rw-r--r--lib/hipe/rtl/hipe_rtl_binary_match.erl29
-rw-r--r--lib/hipe/sparc/hipe_rtl_to_sparc.erl10
-rw-r--r--lib/hipe/test/bs_SUITE_data/bs_match.erl14
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_map_size.erl6
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_map_sort_literals.erl31
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_put_map_assoc.erl2
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_put_map_exact.erl4
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_update_assoc.erl2
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_update_exact.erl10
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_update_map_expressions.erl6
-rw-r--r--lib/hipe/tools/hipe_timer.erl18
-rw-r--r--lib/hipe/vsn.mk2
-rw-r--r--lib/hipe/x86/hipe_rtl_to_x86.erl15
-rw-r--r--lib/ic/test/java_client_erl_server_SUITE.erl6
-rw-r--r--lib/inets/doc/src/Makefile3
-rw-r--r--lib/inets/doc/src/http_server.xml188
-rw-r--r--lib/inets/doc/src/http_uri.xml11
-rw-r--r--lib/inets/doc/src/httpc.xml2
-rw-r--r--lib/inets/doc/src/httpd.xml26
-rw-r--r--lib/inets/doc/src/httpd_conf.xml8
-rw-r--r--lib/inets/doc/src/httpd_custom_api.xml63
-rw-r--r--lib/inets/doc/src/notes.xml127
-rw-r--r--lib/inets/doc/src/ref_man.xml3
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_random_html.erl7
-rw-r--r--lib/inets/examples/httpd_load_test/hdlt_slave.erl2
-rw-r--r--lib/inets/src/ftp/ftp.erl12
-rw-r--r--lib/inets/src/http_client/httpc_cookie.erl24
-rw-r--r--lib/inets/src/http_client/httpc_handler.erl12
-rw-r--r--lib/inets/src/http_lib/http_internal.hrl3
-rw-r--r--lib/inets/src/http_lib/http_request.erl26
-rw-r--r--lib/inets/src/http_lib/http_uri.erl35
-rw-r--r--lib/inets/src/http_server/Makefile4
-rw-r--r--lib/inets/src/http_server/httpd_conf.erl47
-rw-r--r--lib/inets/src/http_server/httpd_custom.erl69
-rw-r--r--lib/inets/src/http_server/httpd_request.erl230
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl39
-rw-r--r--lib/inets/src/http_server/httpd_response.erl49
-rw-r--r--lib/inets/src/http_server/mod_alias.erl14
-rw-r--r--lib/inets/src/http_server/mod_include.erl598
-rw-r--r--lib/inets/src/inets_app/Makefile6
-rw-r--r--lib/inets/src/inets_app/inets.app.src8
-rw-r--r--lib/inets/src/inets_app/inets_lib.erl49
-rw-r--r--lib/inets/src/inets_app/inets_time_compat.erl71
-rw-r--r--lib/inets/src/inets_app/inets_trace.erl32
-rw-r--r--lib/inets/src/tftp/tftp_logger.erl6
-rw-r--r--lib/inets/src/tftp/tftp_sup.erl4
-rw-r--r--lib/inets/test/erl_make_certs.erl10
-rw-r--r--lib/inets/test/ftp_suite_lib.erl8
-rw-r--r--lib/inets/test/http_format_SUITE.erl16
-rw-r--r--lib/inets/test/httpc_SUITE.erl74
-rw-r--r--lib/inets/test/httpd_SUITE.erl150
-rw-r--r--lib/inets/test/httpd_SUITE_data/server_root/config/mime.types4
-rw-r--r--lib/inets/test/httpd_time_test.erl49
-rw-r--r--lib/inets/test/inets_SUITE.erl19
-rw-r--r--lib/inets/test/inets_app_test.erl67
-rw-r--r--lib/inets/test/inets_test_lib.erl23
-rw-r--r--lib/inets/test/old_httpd_SUITE.erl6
-rw-r--r--lib/inets/test/uri_SUITE.erl37
-rw-r--r--lib/inets/vsn.mk4
-rw-r--r--lib/jinterface/doc/src/jinterface_users_guide.xml8
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java46
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java46
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java5
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java5
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java46
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java48
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java207
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java26
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java29
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java30
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java96
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java13
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java17
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java115
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerSocketTransport.java68
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerTransport.java46
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransport.java89
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransportFactory.java56
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransport.java49
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransportFactory.java124
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/java_files8
-rw-r--r--lib/jinterface/test/jinterface_SUITE.erl32
-rw-r--r--lib/jinterface/test/jinterface_SUITE_data/CoreMatchBind.java584
-rw-r--r--lib/jinterface/test/jinterface_SUITE_data/Makefile.src4
-rw-r--r--lib/jinterface/test/jinterface_SUITE_data/TransportFactoryTest.java90
-rw-r--r--lib/jinterface/test/jitu.erl5
-rw-r--r--lib/kernel/doc/src/error_logger.xml31
-rw-r--r--lib/kernel/doc/src/gen_sctp.xml2
-rw-r--r--lib/kernel/doc/src/gen_tcp.xml15
-rw-r--r--lib/kernel/doc/src/heart.xml10
-rw-r--r--lib/kernel/doc/src/inet.xml10
-rw-r--r--lib/kernel/doc/src/kernel_app.xml14
-rw-r--r--lib/kernel/doc/src/notes.xml48
-rw-r--r--lib/kernel/doc/src/os.xml48
-rw-r--r--lib/kernel/src/application_controller.erl17
-rw-r--r--lib/kernel/src/auth.erl4
-rw-r--r--lib/kernel/src/code.erl53
-rw-r--r--lib/kernel/src/dist_util.erl6
-rw-r--r--lib/kernel/src/erl_distribution.erl1
-rw-r--r--lib/kernel/src/erts_debug.erl64
-rw-r--r--lib/kernel/src/file_io_server.erl114
-rw-r--r--lib/kernel/src/gen_udp.erl2
-rw-r--r--lib/kernel/src/global.erl18
-rw-r--r--lib/kernel/src/heart.erl2
-rw-r--r--lib/kernel/src/hipe_unified_loader.erl25
-rw-r--r--lib/kernel/src/inet.erl28
-rw-r--r--lib/kernel/src/inet_config.erl19
-rw-r--r--lib/kernel/src/inet_db.erl3
-rw-r--r--lib/kernel/src/inet_parse.erl30
-rw-r--r--lib/kernel/src/inet_res.erl14
-rw-r--r--lib/kernel/src/inet_sctp.erl19
-rw-r--r--lib/kernel/src/inet_tcp_dist.erl46
-rw-r--r--lib/kernel/src/kernel.app.src2
-rw-r--r--lib/kernel/src/kernel.appup.src6
-rw-r--r--lib/kernel/src/kernel.erl6
-rw-r--r--lib/kernel/src/os.erl19
-rw-r--r--lib/kernel/src/pg2.erl11
-rw-r--r--lib/kernel/src/standard_error.erl155
-rw-r--r--lib/kernel/src/user_drv.erl52
-rw-r--r--lib/kernel/test/Makefile3
-rw-r--r--lib/kernel/test/application_SUITE.erl5
-rw-r--r--lib/kernel/test/code_SUITE.erl11
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl124
-rw-r--r--lib/kernel/test/erl_distribution_wb_SUITE.erl15
-rw-r--r--lib/kernel/test/error_logger_SUITE.erl22
-rw-r--r--lib/kernel/test/error_logger_warn_SUITE.erl79
-rw-r--r--lib/kernel/test/file_SUITE.erl169
-rw-r--r--lib/kernel/test/gen_tcp_api_SUITE.erl32
-rw-r--r--lib/kernel/test/gen_tcp_misc_SUITE.erl1629
-rw-r--r--lib/kernel/test/heart_SUITE.erl8
-rw-r--r--lib/kernel/test/inet_SUITE.erl40
-rw-r--r--lib/kernel/test/interactive_shell_SUITE.erl10
-rw-r--r--lib/kernel/test/prim_file_SUITE.erl2
-rw-r--r--lib/kernel/test/rpc_SUITE.erl41
-rw-r--r--lib/kernel/test/standard_error_SUITE.erl38
-rw-r--r--lib/kernel/test/zlib_SUITE.erl41
-rw-r--r--lib/kernel/vsn.mk2
-rw-r--r--lib/megaco/doc/src/megaco.xml4
-rw-r--r--lib/megaco/src/app/megaco.app.src6
-rw-r--r--lib/megaco/src/app/megaco.appup.src7
-rw-r--r--lib/megaco/src/engine/megaco_trans_sender.erl3
-rw-r--r--lib/megaco/vsn.mk4
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap5.xmlsrc1
-rw-r--r--lib/mnesia/doc/src/mnesia.xml22
-rw-r--r--lib/mnesia/doc/src/notes.xml29
-rw-r--r--lib/mnesia/src/mnesia.app.src2
-rw-r--r--lib/mnesia/src/mnesia.erl62
-rw-r--r--lib/mnesia/src/mnesia.hrl11
-rw-r--r--lib/mnesia/src/mnesia_bup.erl229
-rw-r--r--lib/mnesia/src/mnesia_checkpoint.erl40
-rw-r--r--lib/mnesia/src/mnesia_controller.erl77
-rw-r--r--lib/mnesia/src/mnesia_dumper.erl157
-rw-r--r--lib/mnesia/src/mnesia_event.erl3
-rw-r--r--lib/mnesia/src/mnesia_frag.erl59
-rw-r--r--lib/mnesia/src/mnesia_index.erl12
-rw-r--r--lib/mnesia/src/mnesia_late_loader.erl6
-rw-r--r--lib/mnesia/src/mnesia_lib.erl136
-rw-r--r--lib/mnesia/src/mnesia_loader.erl71
-rw-r--r--lib/mnesia/src/mnesia_locker.erl29
-rw-r--r--lib/mnesia/src/mnesia_log.erl46
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl42
-rw-r--r--lib/mnesia/src/mnesia_recover.erl67
-rw-r--r--lib/mnesia/src/mnesia_schema.erl49
-rw-r--r--lib/mnesia/src/mnesia_snmp_hook.erl17
-rw-r--r--lib/mnesia/src/mnesia_subscr.erl33
-rw-r--r--lib/mnesia/src/mnesia_text.erl22
-rw-r--r--lib/mnesia/src/mnesia_tm.erl196
-rw-r--r--lib/mnesia/test/mnesia_config_backup.erl3
-rw-r--r--lib/mnesia/test/mnesia_config_test.erl22
-rw-r--r--lib/mnesia/test/mnesia_evil_backup.erl19
-rw-r--r--lib/mnesia/test/mnesia_evil_coverage_test.erl6
-rw-r--r--lib/mnesia/test/mnesia_recovery_test.erl13
-rw-r--r--lib/mnesia/test/mnesia_test_lib.hrl10
-rw-r--r--lib/mnesia/test/mnesia_trans_access_test.erl6
-rw-r--r--lib/mnesia/vsn.mk2
-rw-r--r--lib/observer/doc/src/crashdump_ug.xml31
-rw-r--r--lib/observer/doc/src/notes.xml15
-rw-r--r--lib/observer/doc/src/observer_ug.xml23
-rw-r--r--lib/observer/src/Makefile2
-rw-r--r--lib/observer/src/cdv_bin_cb.erl6
-rw-r--r--lib/observer/src/cdv_detail_wx.erl10
-rw-r--r--lib/observer/src/cdv_dist_cb.erl6
-rw-r--r--lib/observer/src/cdv_ets_cb.erl73
-rw-r--r--lib/observer/src/cdv_fun_cb.erl2
-rw-r--r--lib/observer/src/cdv_gen_cb.erl4
-rw-r--r--lib/observer/src/cdv_html_wx.erl2
-rw-r--r--lib/observer/src/cdv_mod_cb.erl6
-rw-r--r--lib/observer/src/cdv_port_cb.erl8
-rw-r--r--lib/observer/src/cdv_proc_cb.erl11
-rw-r--r--lib/observer/src/cdv_sched_cb.erl117
-rw-r--r--lib/observer/src/cdv_term_cb.erl4
-rw-r--r--lib/observer/src/cdv_timer_cb.erl2
-rw-r--r--lib/observer/src/cdv_virtual_list_wx.erl97
-rw-r--r--lib/observer/src/cdv_wx.erl10
-rw-r--r--lib/observer/src/crashdump_viewer.erl205
-rw-r--r--lib/observer/src/crashdump_viewer.hrl28
-rw-r--r--lib/observer/src/observer.app.src4
-rw-r--r--lib/observer/src/observer_alloc_wx.erl256
-rw-r--r--lib/observer/src/observer_html_lib.erl10
-rw-r--r--lib/observer/src/observer_lib.erl20
-rw-r--r--lib/observer/src/observer_perf_wx.erl273
-rw-r--r--lib/observer/src/observer_pro_wx.erl2
-rw-r--r--lib/observer/src/observer_procinfo.erl78
-rw-r--r--lib/observer/src/observer_sys_wx.erl87
-rw-r--r--lib/observer/src/observer_wx.erl138
-rw-r--r--lib/observer/src/ttb.erl2
-rw-r--r--lib/observer/test/observer_SUITE.erl35
-rw-r--r--lib/observer/vsn.mk2
-rw-r--r--lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_impl.erl10
-rw-r--r--lib/orber/src/cdr_decode.erl16
-rw-r--r--lib/orber/src/corba.erl6
-rw-r--r--lib/orber/src/orber.app.src4
-rw-r--r--lib/orber/src/orber_ifr_utils.erl7
-rw-r--r--lib/orber/src/orber_objectkeys.erl12
-rw-r--r--lib/orber/src/orber_socket.erl6
-rw-r--r--lib/orber/src/orber_web_server.erl8
-rw-r--r--lib/orber/test/cdrcoding_10_SUITE.erl4
-rw-r--r--lib/orber/test/cdrcoding_11_SUITE.erl4
-rw-r--r--lib/orber/test/cdrcoding_12_SUITE.erl4
-rw-r--r--lib/orber/test/iop_ior_10_SUITE.erl4
-rw-r--r--lib/orber/test/iop_ior_11_SUITE.erl4
-rw-r--r--lib/orber/test/iop_ior_12_SUITE.erl4
-rw-r--r--lib/orber/test/multi_ORB_SUITE.erl4
-rw-r--r--lib/orber/test/orber_acl_SUITE.erl12
-rw-r--r--lib/orber/test/orber_test_lib.erl2
-rw-r--r--lib/orber/test/orber_test_server_impl.erl12
-rw-r--r--lib/orber/vsn.mk2
-rw-r--r--lib/os_mon/c_src/cpu_sup.c214
-rw-r--r--lib/os_mon/c_src/memsup.c2
-rw-r--r--lib/os_mon/doc/src/cpu_sup.xml2
-rw-r--r--lib/os_mon/doc/src/notes.xml28
-rw-r--r--lib/os_mon/src/cpu_sup.erl91
-rw-r--r--lib/os_mon/test/cpu_sup_SUITE.erl2
-rw-r--r--lib/os_mon/vsn.mk2
-rw-r--r--lib/parsetools/include/leexinc.hrl8
-rw-r--r--lib/parsetools/include/yeccpre.hrl33
-rw-r--r--lib/parsetools/src/leex.erl4
-rw-r--r--lib/parsetools/src/parsetools.app.src2
-rw-r--r--lib/parsetools/src/yecc.erl18
-rw-r--r--lib/parsetools/src/yeccgramm.yrl26
-rw-r--r--lib/parsetools/src/yeccparser.erl114
-rw-r--r--lib/parsetools/test/yecc_SUITE.erl30
-rw-r--r--lib/parsetools/vsn.mk2
-rw-r--r--lib/percept/src/percept.erl7
-rw-r--r--lib/public_key/asn1/Makefile2
-rw-r--r--lib/public_key/doc/src/Makefile3
-rw-r--r--lib/public_key/doc/src/cert_records.xml690
-rw-r--r--lib/public_key/doc/src/introduction.xml25
-rw-r--r--lib/public_key/doc/src/notes.xml15
-rw-r--r--lib/public_key/doc/src/part.xml7
-rw-r--r--lib/public_key/doc/src/public_key.xml604
-rw-r--r--lib/public_key/doc/src/public_key_records.xml754
-rw-r--r--lib/public_key/doc/src/ref_man.xml4
-rw-r--r--lib/public_key/doc/src/using_public_key.xml242
-rw-r--r--lib/public_key/src/pubkey_cert.erl23
-rw-r--r--lib/public_key/src/pubkey_cert_records.erl6
-rw-r--r--lib/public_key/src/pubkey_crl.erl12
-rw-r--r--lib/public_key/src/pubkey_pbe.erl28
-rw-r--r--lib/public_key/src/pubkey_pem.erl3
-rw-r--r--lib/public_key/src/public_key.erl160
-rw-r--r--lib/public_key/test/erl_make_certs.erl31
-rw-r--r--lib/public_key/test/public_key_SUITE.erl40
-rw-r--r--lib/public_key/test/public_key_SUITE_data/crl_signer.pem25
-rw-r--r--lib/public_key/test/public_key_SUITE_data/idp_cert.pem30
-rw-r--r--lib/public_key/test/public_key_SUITE_data/idp_crl.pem18
-rw-r--r--lib/public_key/vsn.mk2
-rw-r--r--lib/reltool/src/reltool.app.src2
-rw-r--r--lib/reltool/src/reltool_fgraph_win.erl6
-rw-r--r--lib/reltool/src/reltool_utils.erl7
-rw-r--r--lib/reltool/test/reltool_server_SUITE.erl5
-rw-r--r--lib/runtime_tools/doc/src/notes.xml16
-rw-r--r--lib/runtime_tools/src/dbg.erl60
-rw-r--r--lib/runtime_tools/src/observer_backend.erl2
-rw-r--r--lib/runtime_tools/src/percept_profile.erl4
-rw-r--r--lib/runtime_tools/src/runtime_tools.app.src2
-rw-r--r--lib/runtime_tools/src/system_information.erl5
-rw-r--r--lib/runtime_tools/test/dbg_SUITE.erl36
-rw-r--r--lib/runtime_tools/test/erts_alloc_config_SUITE.erl7
-rw-r--r--lib/runtime_tools/vsn.mk2
-rw-r--r--lib/sasl/doc/src/sasl_app.xml7
-rw-r--r--lib/sasl/src/sasl.erl8
-rw-r--r--lib/sasl/src/sasl_report_file_h.erl4
-rw-r--r--lib/sasl/test/release_handler_SUITE.erl18
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app29
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup23
-rw-r--r--lib/sasl/test/sasl_SUITE.erl31
-rw-r--r--lib/sasl/vsn.mk2
-rw-r--r--lib/snmp/doc/src/notes.xml35
-rw-r--r--lib/snmp/doc/src/snmp_app.xml2
-rw-r--r--lib/snmp/doc/src/snmp_config.xml2
-rw-r--r--lib/snmp/src/agent/snmp_shadow_table.erl9
-rw-r--r--lib/snmp/src/agent/snmp_standard_mib.erl7
-rw-r--r--lib/snmp/src/agent/snmp_target_mib.erl16
-rw-r--r--lib/snmp/src/agent/snmp_user_based_sm_mib.erl31
-rw-r--r--lib/snmp/src/agent/snmp_view_based_acm_mib.erl8
-rw-r--r--lib/snmp/src/agent/snmpa_mpd.erl21
-rw-r--r--lib/snmp/src/agent/snmpa_net_if.erl25
-rw-r--r--lib/snmp/src/agent/snmpa_usm.erl12
-rw-r--r--lib/snmp/src/agent/snmpa_vacm.erl9
-rw-r--r--lib/snmp/src/app/snmp.app.src4
-rw-r--r--lib/snmp/src/app/snmp.appup.src8
-rw-r--r--lib/snmp/src/compile/snmpc.erl7
-rw-r--r--lib/snmp/src/manager/snmpm.erl8
-rw-r--r--lib/snmp/src/manager/snmpm_mpd.erl18
-rw-r--r--lib/snmp/src/manager/snmpm_net_if.erl140
-rw-r--r--lib/snmp/src/manager/snmpm_server.erl158
-rw-r--r--lib/snmp/src/misc/snmp_misc.erl23
-rw-r--r--lib/snmp/src/misc/snmp_verbosity.erl5
-rw-r--r--lib/snmp/test/snmp_agent_test.erl86
-rw-r--r--lib/snmp/test/snmp_app_test.erl90
-rw-r--r--lib/snmp/test/snmp_appup_mgr.erl8
-rw-r--r--lib/snmp/test/snmp_conf_test.erl4
-rw-r--r--lib/snmp/test/snmp_log_test.erl29
-rw-r--r--lib/snmp/test/snmp_manager_config_test.erl8
-rw-r--r--lib/snmp/test/snmp_test_lib.erl13
-rw-r--r--lib/snmp/test/snmp_test_lib.hrl15
-rw-r--r--lib/snmp/test/snmp_test_mgr.erl8
-rw-r--r--lib/snmp/vsn.mk4
-rw-r--r--lib/ssh/doc/src/introduction.xml182
-rw-r--r--lib/ssh/doc/src/notes.xml133
-rw-r--r--lib/ssh/doc/src/ref_man.xml4
-rw-r--r--lib/ssh/doc/src/ssh.xml477
-rw-r--r--lib/ssh/doc/src/ssh_app.xml122
-rw-r--r--lib/ssh/doc/src/ssh_channel.xml296
-rw-r--r--lib/ssh/doc/src/ssh_client_key_api.xml96
-rw-r--r--lib/ssh/doc/src/ssh_connection.xml456
-rw-r--r--lib/ssh/doc/src/ssh_server_key_api.xml77
-rw-r--r--lib/ssh/doc/src/ssh_sftp.xml712
-rw-r--r--lib/ssh/doc/src/ssh_sftpd.xml56
-rw-r--r--lib/ssh/doc/src/usersguide.xml7
-rw-r--r--lib/ssh/doc/src/using_ssh.xml206
-rw-r--r--lib/ssh/examples/Makefile5
-rw-r--r--lib/ssh/examples/ssh_device.erl62
-rw-r--r--lib/ssh/src/ssh.appup.src54
-rw-r--r--lib/ssh/src/ssh.erl206
-rw-r--r--lib/ssh/src/ssh_acceptor.erl4
-rw-r--r--lib/ssh/src/ssh_auth.erl146
-rw-r--r--lib/ssh/src/ssh_auth.hrl2
-rw-r--r--lib/ssh/src/ssh_connection.erl264
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl427
-rw-r--r--lib/ssh/src/ssh_info.erl150
-rw-r--r--lib/ssh/src/ssh_sftp.erl26
-rw-r--r--lib/ssh/src/ssh_sftpd.erl122
-rw-r--r--lib/ssh/src/ssh_transport.erl225
-rw-r--r--lib/ssh/test/Makefile3
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl675
-rw-r--r--lib/ssh/test/ssh_connection_SUITE.erl52
-rw-r--r--lib/ssh/test/ssh_relay.erl407
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE.erl1
-rw-r--r--lib/ssh/test/ssh_sftpd_SUITE.erl49
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl1
-rw-r--r--lib/ssh/test/ssh_test_lib.erl15
-rw-r--r--lib/ssh/test/ssh_to_openssh_SUITE.erl23
-rw-r--r--lib/ssh/test/ssh_unicode_SUITE.erl1
-rw-r--r--lib/ssh/vsn.mk3
-rw-r--r--lib/ssl/doc/src/Makefile4
-rw-r--r--lib/ssl/doc/src/notes.xml91
-rw-r--r--lib/ssl/doc/src/refman.xml19
-rw-r--r--lib/ssl/doc/src/ssl.xml1165
-rw-r--r--lib/ssl/doc/src/ssl_app.xml92
-rw-r--r--lib/ssl/doc/src/ssl_crl_cache.xml65
-rw-r--r--lib/ssl/doc/src/ssl_crl_cache_api.xml105
-rw-r--r--lib/ssl/doc/src/ssl_distribution.xml241
-rw-r--r--lib/ssl/doc/src/ssl_introduction.xml53
-rw-r--r--lib/ssl/doc/src/ssl_protocol.xml130
-rw-r--r--lib/ssl/doc/src/ssl_session_cache_api.xml123
-rw-r--r--lib/ssl/doc/src/usersguide.xml5
-rw-r--r--lib/ssl/doc/src/using_ssl.xml103
-rw-r--r--lib/ssl/src/Makefile9
-rw-r--r--lib/ssl/src/dtls_connection.erl9
-rw-r--r--lib/ssl/src/dtls_handshake.erl4
-rw-r--r--lib/ssl/src/dtls_record.erl4
-rw-r--r--lib/ssl/src/ssl.app.src8
-rw-r--r--lib/ssl/src/ssl.appup.src2
-rw-r--r--lib/ssl/src/ssl.erl156
-rw-r--r--lib/ssl/src/ssl_alert.erl6
-rw-r--r--lib/ssl/src/ssl_alert.hrl7
-rw-r--r--lib/ssl/src/ssl_api.hrl2
-rw-r--r--lib/ssl/src/ssl_certificate.erl58
-rw-r--r--lib/ssl/src/ssl_cipher.erl77
-rw-r--r--lib/ssl/src/ssl_cipher.hrl6
-rw-r--r--lib/ssl/src/ssl_config.erl37
-rw-r--r--lib/ssl/src/ssl_connection.erl93
-rw-r--r--lib/ssl/src/ssl_connection.hrl9
-rw-r--r--lib/ssl/src/ssl_crl.erl80
-rw-r--r--lib/ssl/src/ssl_crl_cache.erl179
-rw-r--r--lib/ssl/src/ssl_crl_cache_api.erl30
-rw-r--r--lib/ssl/src/ssl_handshake.erl339
-rw-r--r--lib/ssl/src/ssl_handshake.hrl9
-rw-r--r--lib/ssl/src/ssl_internal.hrl23
-rw-r--r--lib/ssl/src/ssl_manager.erl235
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl101
-rw-r--r--lib/ssl/src/ssl_record.erl10
-rw-r--r--lib/ssl/src/ssl_tls_dist_proxy.erl9
-rw-r--r--lib/ssl/src/ssl_v3.erl5
-rw-r--r--lib/ssl/src/tls_connection.erl85
-rw-r--r--lib/ssl/src/tls_handshake.erl104
-rw-r--r--lib/ssl/src/tls_record.erl34
-rw-r--r--lib/ssl/src/tls_v1.erl10
-rw-r--r--lib/ssl/test/Makefile6
-rw-r--r--lib/ssl/test/erl_make_certs.erl18
-rw-r--r--lib/ssl/test/make_certs.erl91
-rw-r--r--lib/ssl/test/ssl_alpn_handshake_SUITE.erl414
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl132
-rw-r--r--lib/ssl/test/ssl_certificate_verify_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_cipher_SUITE.erl189
-rw-r--r--lib/ssl/test/ssl_crl_SUITE.erl598
-rw-r--r--lib/ssl/test/ssl_handshake_SUITE.erl59
-rw-r--r--lib/ssl/test/ssl_npn_handshake_SUITE.erl14
-rw-r--r--lib/ssl/test/ssl_pem_cache_SUITE.erl127
-rw-r--r--lib/ssl/test/ssl_session_cache_SUITE.erl2
-rw-r--r--lib/ssl/test/ssl_sni_SUITE.erl179
-rw-r--r--lib/ssl/test/ssl_test_lib.erl38
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl468
-rw-r--r--lib/ssl/test/ssl_upgrade_SUITE.erl164
-rw-r--r--lib/ssl/vsn.mk2
-rw-r--r--lib/stdlib/doc/src/Makefile4
-rw-r--r--lib/stdlib/doc/src/c.xml8
-rw-r--r--lib/stdlib/doc/src/calendar.xml6
-rw-r--r--lib/stdlib/doc/src/erl_anno.xml308
-rw-r--r--lib/stdlib/doc/src/erl_parse.xml98
-rw-r--r--lib/stdlib/doc/src/erl_scan.xml79
-rw-r--r--lib/stdlib/doc/src/ets.xml41
-rw-r--r--lib/stdlib/doc/src/file_sorter.xml6
-rw-r--r--lib/stdlib/doc/src/gb_sets.xml13
-rw-r--r--lib/stdlib/doc/src/gb_trees.xml13
-rw-r--r--lib/stdlib/doc/src/gen_server.xml2
-rw-r--r--lib/stdlib/doc/src/io.xml3
-rw-r--r--lib/stdlib/doc/src/io_lib.xml70
-rw-r--r--lib/stdlib/doc/src/lists.xml2
-rw-r--r--lib/stdlib/doc/src/maps.xml68
-rw-r--r--lib/stdlib/doc/src/math.xml1
-rw-r--r--lib/stdlib/doc/src/notes.xml35
-rw-r--r--lib/stdlib/doc/src/orddict.xml15
-rw-r--r--lib/stdlib/doc/src/proc_lib.xml2
-rw-r--r--lib/stdlib/doc/src/rand.xml246
-rw-r--r--lib/stdlib/doc/src/random.xml12
-rw-r--r--lib/stdlib/doc/src/re.xml21
-rw-r--r--lib/stdlib/doc/src/ref_man.xml4
-rw-r--r--lib/stdlib/doc/src/sets.xml22
-rw-r--r--lib/stdlib/doc/src/specs.xml2
-rw-r--r--lib/stdlib/doc/src/supervisor.xml12
-rw-r--r--lib/stdlib/doc/src/timer.xml10
-rw-r--r--lib/stdlib/doc/src/zip.xml8
-rw-r--r--lib/stdlib/src/Makefile5
-rw-r--r--lib/stdlib/src/beam_lib.erl27
-rw-r--r--lib/stdlib/src/binary.erl8
-rw-r--r--lib/stdlib/src/c.erl40
-rw-r--r--lib/stdlib/src/calendar.erl2
-rw-r--r--lib/stdlib/src/dets.erl22
-rw-r--r--lib/stdlib/src/dets_utils.erl2
-rw-r--r--lib/stdlib/src/dict.erl6
-rw-r--r--lib/stdlib/src/digraph.erl2
-rw-r--r--lib/stdlib/src/edlin.erl16
-rw-r--r--lib/stdlib/src/epp.erl171
-rw-r--r--lib/stdlib/src/erl_anno.erl458
-rw-r--r--lib/stdlib/src/erl_eval.erl55
-rw-r--r--lib/stdlib/src/erl_expand_records.erl65
-rw-r--r--lib/stdlib/src/erl_lint.erl223
-rw-r--r--lib/stdlib/src/erl_parse.yrl738
-rw-r--r--lib/stdlib/src/erl_pp.erl59
-rw-r--r--lib/stdlib/src/erl_scan.erl227
-rw-r--r--lib/stdlib/src/erl_tar.erl2
-rw-r--r--lib/stdlib/src/escript.erl36
-rw-r--r--lib/stdlib/src/ets.erl100
-rw-r--r--lib/stdlib/src/file_sorter.erl4
-rw-r--r--lib/stdlib/src/filename.erl2
-rw-r--r--lib/stdlib/src/gb_sets.erl36
-rw-r--r--lib/stdlib/src/gb_trees.erl31
-rw-r--r--lib/stdlib/src/io.erl4
-rw-r--r--lib/stdlib/src/io_lib.erl48
-rw-r--r--lib/stdlib/src/io_lib_format.erl112
-rw-r--r--lib/stdlib/src/maps.erl83
-rw-r--r--lib/stdlib/src/math.erl7
-rw-r--r--lib/stdlib/src/ms_transform.erl5
-rw-r--r--lib/stdlib/src/orddict.erl130
-rw-r--r--lib/stdlib/src/otp_internal.erl89
-rw-r--r--lib/stdlib/src/qlc.erl100
-rw-r--r--lib/stdlib/src/qlc_pt.erl662
-rw-r--r--lib/stdlib/src/rand.erl591
-rw-r--r--lib/stdlib/src/random.erl8
-rw-r--r--lib/stdlib/src/shell.erl35
-rw-r--r--lib/stdlib/src/shell_default.erl3
-rw-r--r--lib/stdlib/src/slave.erl22
-rw-r--r--lib/stdlib/src/stdlib.app.src6
-rw-r--r--lib/stdlib/src/stdlib.appup.src8
-rw-r--r--lib/stdlib/src/string.erl46
-rw-r--r--lib/stdlib/src/supervisor.erl26
-rw-r--r--lib/stdlib/src/timer.erl27
-rw-r--r--lib/stdlib/src/win32reg.erl7
-rw-r--r--lib/stdlib/src/zip.erl48
-rw-r--r--lib/stdlib/test/Makefile2
-rw-r--r--lib/stdlib/test/binary_module_SUITE.erl95
-rw-r--r--lib/stdlib/test/dict_SUITE.erl50
-rw-r--r--lib/stdlib/test/dict_test_lib.erl5
-rw-r--r--lib/stdlib/test/epp_SUITE.erl52
-rw-r--r--lib/stdlib/test/erl_anno_SUITE.erl568
-rw-r--r--lib/stdlib/test/erl_eval_SUITE.erl5
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl48
-rw-r--r--lib/stdlib/test/erl_pp_SUITE.erl97
-rw-r--r--lib/stdlib/test/erl_scan_SUITE.erl285
-rw-r--r--lib/stdlib/test/ets_SUITE.erl422
-rw-r--r--lib/stdlib/test/filename_SUITE.erl4
-rw-r--r--lib/stdlib/test/gen_event_SUITE.erl173
-rw-r--r--lib/stdlib/test/gen_fsm_SUITE.erl220
-rw-r--r--lib/stdlib/test/gen_server_SUITE.erl139
-rw-r--r--lib/stdlib/test/io_SUITE.erl22
-rw-r--r--lib/stdlib/test/io_proto_SUITE.erl301
-rw-r--r--lib/stdlib/test/lists_SUITE.erl2
-rw-r--r--lib/stdlib/test/maps_SUITE.erl81
-rw-r--r--lib/stdlib/test/qlc_SUITE.erl182
-rw-r--r--lib/stdlib/test/rand_SUITE.erl527
-rw-r--r--lib/stdlib/test/random_SUITE.erl2
-rw-r--r--lib/stdlib/test/select_SUITE.erl2
-rw-r--r--lib/stdlib/test/sets_SUITE.erl44
-rw-r--r--lib/stdlib/test/sets_test_lib.erl5
-rw-r--r--lib/stdlib/test/shell_SUITE.erl11
-rw-r--r--lib/stdlib/test/stdlib_SUITE.erl15
-rw-r--r--lib/stdlib/test/string_SUITE.erl44
-rw-r--r--lib/stdlib/test/supervisor_SUITE.erl60
-rw-r--r--lib/stdlib/test/tar_SUITE.erl2
-rw-r--r--lib/stdlib/test/timer_SUITE.erl43
-rw-r--r--lib/stdlib/test/timer_simple_SUITE.erl4
-rw-r--r--lib/stdlib/test/unicode_SUITE.erl175
-rw-r--r--lib/stdlib/test/zip_SUITE.erl46
-rw-r--r--lib/stdlib/vsn.mk2
-rw-r--r--lib/syntax_tools/doc/overview.edoc83
-rw-r--r--lib/syntax_tools/doc/src/Makefile2
-rw-r--r--lib/syntax_tools/doc/src/notes.xml15
-rw-r--r--lib/syntax_tools/doc/src/ref_man.xml13
-rw-r--r--lib/syntax_tools/examples/merl/Makefile22
-rw-r--r--lib/syntax_tools/examples/merl/basic.erl77
-rw-r--r--lib/syntax_tools/examples/merl/basic_test.erl77
-rw-r--r--lib/syntax_tools/examples/merl/basicc.erl149
-rw-r--r--lib/syntax_tools/examples/merl/lisp.erl160
-rw-r--r--lib/syntax_tools/examples/merl/lisp_test.erl98
-rw-r--r--lib/syntax_tools/examples/merl/lispc.erl102
-rw-r--r--lib/syntax_tools/examples/merl/merl_build.erl104
-rw-r--r--lib/syntax_tools/include/merl.hrl29
-rw-r--r--lib/syntax_tools/src/Makefile22
-rw-r--r--lib/syntax_tools/src/epp_dodger.erl8
-rw-r--r--lib/syntax_tools/src/erl_prettypr.erl33
-rw-r--r--lib/syntax_tools/src/erl_recomment.erl9
-rw-r--r--lib/syntax_tools/src/erl_syntax.erl208
-rw-r--r--lib/syntax_tools/src/erl_syntax_lib.erl106
-rw-r--r--lib/syntax_tools/src/erl_tidy.erl30
-rw-r--r--lib/syntax_tools/src/igor.erl23
-rw-r--r--lib/syntax_tools/src/merl.erl1230
-rw-r--r--lib/syntax_tools/src/merl_tests.erl539
-rw-r--r--lib/syntax_tools/src/merl_transform.erl262
-rw-r--r--lib/syntax_tools/src/syntax_tools.app.src5
-rw-r--r--lib/syntax_tools/test/Makefile3
-rw-r--r--lib/syntax_tools/test/merl_SUITE.erl91
-rw-r--r--lib/syntax_tools/vsn.mk2
-rw-r--r--lib/test_server/doc/src/Makefile4
-rw-r--r--lib/test_server/doc/src/example_chapter.xml28
-rw-r--r--lib/test_server/doc/src/notes.xml67
-rw-r--r--lib/test_server/doc/src/test_server.xml66
-rw-r--r--lib/test_server/include/test_server.hrl2
-rw-r--r--lib/test_server/src/erl2html2.erl101
-rw-r--r--lib/test_server/src/test_server.app.src6
-rw-r--r--lib/test_server/src/test_server.erl361
-rw-r--r--lib/test_server/src/test_server_ctrl.erl174
-rw-r--r--lib/test_server/src/test_server_node.erl5
-rw-r--r--lib/test_server/src/test_server_sup.erl80
-rw-r--r--lib/test_server/src/ts.erl604
-rw-r--r--lib/test_server/src/ts_install.erl53
-rw-r--r--lib/test_server/src/ts_install_cth.erl17
-rw-r--r--lib/test_server/src/ts_lib.erl53
-rw-r--r--lib/test_server/src/ts_make.erl12
-rw-r--r--lib/test_server/test/erl2html2_SUITE.erl66
-rw-r--r--lib/test_server/test/erl2html2_SUITE_data/include/header3.hrl1
-rw-r--r--lib/test_server/test/erl2html2_SUITE_data/m1.erl6
-rw-r--r--lib/test_server/vsn.mk2
-rw-r--r--lib/tools/doc/src/cover.xml109
-rw-r--r--lib/tools/doc/src/cprof.xml2
-rw-r--r--lib/tools/doc/src/notes.xml26
-rw-r--r--lib/tools/emacs/erlang.el122
-rw-r--r--lib/tools/emacs/test.erl.indented8
-rw-r--r--lib/tools/emacs/test.erl.orig8
-rw-r--r--lib/tools/src/cover.erl891
-rw-r--r--lib/tools/src/cover_web.erl2
-rw-r--r--lib/tools/src/eprof.erl28
-rw-r--r--lib/tools/src/lcnt.erl70
-rw-r--r--lib/tools/src/tags.erl9
-rw-r--r--lib/tools/src/tools.app.src6
-rw-r--r--lib/tools/src/xref.hrl4
-rw-r--r--lib/tools/src/xref_compiler.erl4
-rw-r--r--lib/tools/src/xref_reader.erl26
-rw-r--r--lib/tools/src/xref_scanner.erl6
-rw-r--r--lib/tools/test/cover_SUITE.erl296
-rw-r--r--lib/tools/test/lcnt_SUITE.erl10
-rw-r--r--lib/tools/vsn.mk2
-rw-r--r--lib/typer/src/typer.erl14
-rw-r--r--lib/typer/vsn.mk2
-rw-r--r--lib/webtool/doc/src/Makefile4
-rw-r--r--lib/webtool/vsn.mk2
-rw-r--r--lib/wx/api_gen/wx_gen_cpp.erl11
-rw-r--r--lib/wx/api_gen/wxapi.conf49
-rw-r--r--lib/wx/c_src/gen/wxe_events.cpp69
-rw-r--r--lib/wx/c_src/gen/wxe_funcs.cpp6
-rw-r--r--lib/wx/c_src/gen/wxe_init.cpp10
-rw-r--r--lib/wx/c_src/wxe_driver.c11
-rw-r--r--lib/wx/c_src/wxe_gl.cpp8
-rw-r--r--lib/wx/c_src/wxe_helpers.cpp167
-rw-r--r--lib/wx/c_src/wxe_helpers.h30
-rw-r--r--lib/wx/c_src/wxe_impl.cpp312
-rw-r--r--lib/wx/c_src/wxe_impl.h9
-rw-r--r--lib/wx/configure.in21
-rw-r--r--lib/wx/doc/src/notes.xml15
-rw-r--r--lib/wx/examples/demo/demo.erl10
-rw-r--r--lib/wx/examples/demo/demo_html_tagger.erl8
-rw-r--r--lib/wx/include/wx.hrl52
-rw-r--r--lib/wx/src/wxe_server.erl14
-rw-r--r--lib/wx/test/wx_class_SUITE.erl14
-rw-r--r--lib/wx/test/wx_event_SUITE.erl50
-rw-r--r--lib/wx/vsn.mk2
-rw-r--r--lib/xmerl/src/xmerl.app.src2
-rw-r--r--lib/xmerl/src/xmerl.erl2
-rw-r--r--lib/xmerl/vsn.mk2
1244 files changed, 64456 insertions, 30865 deletions
diff --git a/lib/.gitignore b/lib/.gitignore
index 4125111ebd..58c49adce0 100644
--- a/lib/.gitignore
+++ b/lib/.gitignore
@@ -546,6 +546,8 @@ snmp/doc/intex.html
/syntax_tools/doc/src/erl_syntax.xml
/syntax_tools/doc/src/erl_syntax_lib.xml
/syntax_tools/doc/src/erl_tidy.xml
+/syntax_tools/doc/src/merl.xml
+/syntax_tools/doc/src/merl_transform.xml
/syntax_tools/doc/src/igor.xml
/syntax_tools/doc/src/prettypr.xml
diff --git a/lib/asn1/doc/src/Makefile b/lib/asn1/doc/src/Makefile
index 3b3e1bd8f9..f26508295c 100644
--- a/lib/asn1/doc/src/Makefile
+++ b/lib/asn1/doc/src/Makefile
@@ -48,7 +48,9 @@ XML_HTML_FILE = \
notes_history.xml
XML_CHAPTER_FILES = \
- asn1_ug.xml \
+ asn1_introduction.xml \
+ asn1_getting_started.xml \
+ asn1_overview.xml \
asn1_spec.xml \
notes.xml
diff --git a/lib/asn1/doc/src/asn1_getting_started.xml b/lib/asn1/doc/src/asn1_getting_started.xml
new file mode 100644
index 0000000000..1a9c279191
--- /dev/null
+++ b/lib/asn1/doc/src/asn1_getting_started.xml
@@ -0,0 +1,1290 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>1997</year><year>2013</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Getting Started</title>
+ <prepared>Kenneth Lundin</prepared>
+ <docno></docno>
+ <date>1999-03-25</date>
+ <rev>D</rev>
+ <file>asn1_getting_started.xml</file>
+ </header>
+
+ <section>
+ <title>Example</title>
+ <p>The following example demonstrates the basic functionality used to
+ run the Erlang ASN.1 compiler.</p>
+ <p>Create a file named <c>People.asn</c> containing the following:</p>
+ <pre>
+People DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+ Person ::= SEQUENCE {
+ name PrintableString,
+ location INTEGER {home(0),field(1),roving(2)},
+ age INTEGER OPTIONAL
+ }
+END </pre>
+ <p>This file must be compiled before it can be used.
+ The ASN.1 compiler checks that the syntax is correct and that the
+ text represents proper ASN.1 code before generating an abstract
+ syntax tree. The code-generator then uses the abstract syntax
+ tree to generate code.</p>
+ <p>The generated Erlang files are placed in the current directory or
+ in the directory specified with option <c>{outdir,Dir}</c>.</p>
+ <p>The following shows how the compiler
+ can be called from the Erlang shell:</p>
+
+ <pre>
+1><input> asn1ct:compile("People", [ber]).</input>
+ok
+2> </pre>
+
+ <p>Option <c>verbose</c> can be added to get information
+ about the generated files:</p>
+ <pre>
+2><input> asn1ct:compile("People", [ber,verbose]).</input>
+Erlang ASN.1 compiling "People.asn"
+--{generated,"People.asn1db"}--
+--{generated,"People.hrl"}--
+--{generated,"People.erl"}--
+ok
+3> </pre>
+
+ <p>ASN.1 module <c>People</c> is now accepted and the
+ abstract syntax tree is saved in file <c>People.asn1db</c>.
+ The generated Erlang code is compiled using the Erlang compiler
+ and loaded into the Erlang runtime system. There is now an API
+ for <c>encode/2</c> and <c>decode/2</c> in module
+ <c>People</c>, which is called like:<br></br>
+ <c><![CDATA['People':encode(<Type name>, <Value>)]]></c>
+ <br></br>
+ or<br></br>
+<c><![CDATA['People':decode(<Type name>, <Value>)]]></c></p>
+
+ <p>Assume that there is a network
+ application that receives instances of the ASN.1 defined
+ type <c>Person</c>, modifies, and sends them back again:</p>
+
+ <code type="none">
+receive
+ {Port,{data,Bytes}} ->
+ case 'People':decode('Person',Bytes) of
+ {ok,P} ->
+ {ok,Answer} = 'People':encode('Person',mk_answer(P)),
+ Port ! {self(),{command,Answer}};
+ {error,Reason} ->
+ exit({error,Reason})
+ end
+ end, </code>
+ <p>In this example, a series of bytes is received from an
+ external source and the bytes are then decoded into a valid
+ Erlang term. This was achieved with the call
+ <c>'People':decode('Person',Bytes)</c>, which returned
+ an Erlang value of the ASN.1 type <c>Person</c>. Then an answer was
+ constructed and encoded using
+ <c>'People':encode('Person',Answer)</c>, which takes an
+ instance of a defined ASN.1 type and transforms it to a
+ binary according to the BER or PER encoding rules.</p>
+ <p>The encoder and decoder can also be run from the shell:</p>
+ <pre>
+2> <input>Rockstar = {'Person',"Some Name",roving,50}.</input>
+{'Person',"Some Name",roving,50}
+3> <input>{ok,Bin} = 'People':encode('Person',Rockstar).</input>
+{ok,&lt;&lt;243,17,19,9,83,111,109,101,32,78,97,109,101,2,1,2,
+ 2,1,50&gt;&gt;}
+4> <input>{ok,Person} = 'People':decode('Person',Bin).</input>
+{ok,{'Person',"Some Name",roving,50}}
+5> </pre>
+
+ <section>
+ <title>Module Dependencies</title>
+ <p>It is common that ASN.1 modules import defined types, values, and
+ other entities from another ASN.1 module.</p>
+ <p>Earlier versions of the ASN.1 compiler required that modules
+ that were imported from had to be compiled before the module
+ that imported. This caused problems when ASN.1 modules had circular
+ dependencies.</p>
+ <p>Referenced modules are now parsed when the compiler finds an
+ entity that is imported. No code is generated for
+ the referenced module. However, the compiled modules rely on
+ that the referenced modules are also compiled.</p>
+ </section>
+ </section>
+
+ <section>
+ <title>ASN.1 Application User Interface</title>
+ <p>The <c>ASN.1</c> application provides the following two
+ separate user interfaces:</p>
+ <list type="bulleted">
+ <item>
+ <p>The module <c>asn1ct</c>, which provides the compile-time functions
+ (including the compiler)</p>
+ </item>
+ <item>
+ <p>The module <c>asn1rt_nif</c>, which provides the runtime functions
+ for the ASN.1 decoder for the BER back end</p>
+ </item>
+ </list>
+ <p>The reason for this division of the interfaces into compile-time
+ and runtime
+ is that only runtime modules (<c>asn1rt*</c>) need to be loaded in
+ an embedded system.
+ </p>
+
+ <section>
+ <title>Compile-Time Functions</title>
+ <p>The ASN.1 compiler can be started directly from the command line
+ by the <c>erlc</c> program. This is convenient when compiling
+ many ASN.1 files from the command line or when using Makefiles.
+ Some examples of how the <c>erlc</c> command can be used to start
+ the ASN.1 compiler:</p>
+ <pre>
+erlc Person.asn
+erlc -bper Person.asn
+erlc -bber ../Example.asn
+erlc -o ../asnfiles -I ../asnfiles -I /usr/local/standards/asn1 Person.asn</pre>
+ <p>Useful options for the ASN.1 compiler:</p>
+ <taglist>
+ <tag><c>-b[ber | per | uper]</c></tag>
+ <item>
+ <p>Choice of encoding rules. If omitted, <c>ber</c> is the
+ default.</p>
+ </item>
+ <tag><c>-o OutDirectory</c></tag>
+ <item>
+ <p>Where to put the generated files. Default is the current
+ directory.</p>
+ </item>
+ <tag><c>-I IncludeDir</c></tag>
+ <item>
+ <p>Where to search for <c>.asn1db</c> files and ASN.1
+ source specs to resolve references to other
+ modules. This option can be repeated many times if there
+ are several places to search in. The compiler
+ searches the current directory first.</p>
+ </item>
+ <tag><c>+der</c></tag>
+ <item>
+ <p>DER encoding rule. Only when using option <c>-ber</c>.</p>
+ </item>
+ <tag><c>+asn1config</c></tag>
+ <item>
+ <p>This functionality works together with option
+ <c>ber</c>. It enables the specialized decodes, see Section
+ <seealso marker="asn1_spec">Specialized Decode</seealso>.</p>
+ </item>
+ <tag><c>+undec_rest</c></tag>
+ <item>
+ <p>A buffer that holds a message being decoded can also have
+ trailing bytes. If those trailing bytes are important, they
+ can be returned along with the decoded value by compiling
+ the ASN.1 specification with option <c>+undec_rest</c>.
+ The return value from the decoder is
+ <c>{ok,Value,Rest}</c> where <c>Rest</c> is a binary
+ containing the trailing bytes.</p>
+ </item>
+ <tag><c>+'Any Erlc Option'</c></tag>
+ <item>
+ <p>Any option can be added to the Erlang compiler when
+ compiling the generated Erlang files. Any option
+ unrecognized by the ASN.1 compiler is passed to the
+ Erlang compiler.</p>
+ </item>
+ </taglist>
+ <p>For a complete description of <c>erlc</c>, see
+ ERTS Reference Manual.</p>
+ <p>The compiler and other compile-time functions can also be started
+ from the Erlang shell. Here follows a brief
+ description of the primary functions. For a
+ complete description of each function, see module <c>asn1ct</c> in
+ the <seealso marker="asn1ct">ASN.1 Reference Manual</seealso>.</p>
+ <p>The compiler is started by <c>asn1ct:compile/1</c> with
+ default options, or <c>asn1ct:compile/2</c> if explicit options
+ are given.</p>
+ <p>Example:</p>
+ <pre>
+asn1ct:compile("H323-MESSAGES.asn1"). </pre>
+ <p>This equals:</p>
+ <pre>
+asn1ct:compile("H323-MESSAGES.asn1",[ber]). </pre>
+ <p>If PER encoding is wanted:</p>
+ <pre>
+asn1ct:compile("H323-MESSAGES.asn1",[per]). </pre>
+ <p>The generic encode and decode functions can be called
+ as follows:</p>
+ <pre>
+'H323-MESSAGES':encode('SomeChoiceType',{call,&lt;&lt;"octetstring"&gt;&gt;}).
+'H323-MESSAGES':decode('SomeChoiceType',Bytes). </pre>
+ </section>
+
+ <section>
+ <title>Runtime Functions</title>
+ <p>When an ASN.1 specification is compiled with option <c>ber</c>,
+ the <c>asn1rt_nif</c> module and the NIF library in
+ <c>asn1/priv_dir</c> are needed at runtime.</p>
+ <p>By calling function <c>info/0</c> in a generated module, you
+ get information about which compiler options were used.</p>
+ </section>
+
+ <section>
+ <title>Errors</title>
+ <p>Errors detected at
+ compile-time are displayed on the screen together with line
+ numbers indicating where in the source file the respective error
+ was detected. If no errors are found, an Erlang ASN.1 module is
+ created.</p>
+ <p>The runtime encoders and decoders execute within a catch and
+ return <c>{ok, Data}</c> or
+ <c>{error, {asn1, Description}}</c> where
+ <c>Description</c> is
+ an Erlang term describing the error.</p>
+ </section>
+ </section>
+
+ <section>
+ <marker id="inlineExamples"></marker>
+ <title>Multi-File Compilation</title>
+ <p>There are various reasons for using multi-file compilation:</p>
+ <list type="bulleted">
+ <item>To choose the name for the generated module, for
+ example, because you need to compile the same specs for
+ different encoding rules.</item>
+ <item>You want only one resulting module.</item>
+ </list>
+ <p>Specify which ASN.1 specs to compile in a module with extension
+ <c>.set.asn</c>. Choose a module name and provide the
+ names of the ASN.1 specs. For example, if you have the specs
+ <c>File1.asn</c>, <c>File2.asn</c>, and <c>File3.asn</c>, your
+ module <c>MyModule.set.asn</c> looks as follows:</p>
+ <pre>
+File1.asn
+File2.asn
+File3.asn </pre>
+ <p>If you compile with the following, the result is one merged
+ module <c>MyModule.erl</c> with the generated code from the three
+ ASN.1 specs:</p>
+ <code type="none">
+~> erlc MyModule.set.asn </code>
+ </section>
+
+ <section>
+ <title>Remark about Tags</title>
+
+ <p>Tags used to be important for all users of ASN.1, because it
+ was necessary to add tags manually to certain constructs in order
+ for the ASN.1 specification to be valid. Example of
+ an old-style specification:</p>
+
+ <pre>
+Tags DEFINITIONS ::=
+BEGIN
+ Afters ::= CHOICE { cheese [0] IA5String,
+ dessert [1] IA5String }
+END </pre>
+
+ <p>Without the tags (the numbers in square brackets) the ASN.1
+ compiler refused to compile the file.</p>
+
+ <p>In 1994 the global tagging mode <c>AUTOMATIC TAGS</c> was introduced.
+ By putting <c>AUTOMATIC TAGS</c> in the module header, the ASN.1
+ compiler automatically adds tags when needed. The following is the
+ same specification in <c>AUTOMATIC TAGS</c> mode:</p>
+
+ <pre>
+Tags DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+ Afters ::= CHOICE { cheese IA5String,
+ dessert IA5String }
+END </pre>
+
+ <p>Tags are not mentioned any more in this User's Guide.</p>
+ </section>
+
+ <section>
+ <marker id="ASN1Types"></marker>
+ <title>ASN.1 Types</title>
+ <p>This section describes the ASN.1 types including their
+ functionality, purpose, and how values are assigned in Erlang.
+ </p>
+ <p>ASN.1 has both primitive and constructed types:</p>
+ <p></p>
+ <table>
+ <row>
+ <cell align="left" valign="middle"><em>Primitive Types</em></cell>
+ <cell align="left" valign="middle"><em>Constructed Types</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#BOOLEAN">BOOLEAN</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#SEQUENCE">SEQUENCE</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#INTEGER">INTEGER</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#SET">SET</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#REAL">REAL</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#CHOICE">CHOICE</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#NULL">NULL</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#SOF">SET OF and SEQUENCE OF</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#ENUMERATED">ENUMERATED</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#ANY">ANY</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#BIT STRING">BIT STRING</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#ANY">ANY DEFINED BY</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#OCTET STRING">OCTET STRING</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">EXTERNAL</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#Character Strings">Character Strings</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">EMBEDDED PDV</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#OBJECT IDENTIFIER">OBJECT IDENTIFIER</seealso></cell>
+ <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">CHARACTER STRING</seealso></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#Object Descriptor">Object Descriptor</seealso></cell>
+ <cell align="left" valign="middle"></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle"><seealso marker="#The TIME types">TIME Types</seealso></cell>
+ <cell align="left" valign="middle"></cell>
+ </row>
+ <tcaption>Supported ASN.1 Types</tcaption>
+ </table>
+ <marker id="TypeNameValue"></marker>
+ <note>
+ <p>The values of each ASN.1 type have their own representation in Erlang, as
+ described in the following sections. Users must provide
+ these values for encoding according to the representation, as shown in the
+ following example:</p>
+ </note>
+ <pre>
+Operational ::= BOOLEAN --ASN.1 definition </pre>
+ <p>In Erlang code it can look as follows:</p>
+ <pre>
+Val = true,
+{ok,Bytes} = MyModule:encode('Operational', Val), </pre>
+
+ <section>
+ <marker id="BOOLEAN"></marker>
+ <title>BOOLEAN</title>
+ <p>Booleans in ASN.1 express values that can be either
+ <c>TRUE</c> or <c>FALSE</c>.
+ The meanings assigned to <c>TRUE</c> and <c>FALSE</c> are outside the scope
+ of this text.</p>
+ <p>In ASN.1 it is possible to have:</p>
+ <pre>
+Operational ::= BOOLEAN</pre>
+ <p>Assigning a value to type <c>Operational</c> in Erlang is possible by
+ using the following Erlang code:</p>
+ <code type="erl">
+Myvar1 = true,</code>
+ <p>Thus, in Erlang the atoms <c>true</c> and <c>false</c> are used
+ to encode a boolean value.</p>
+ </section>
+
+ <section>
+ <marker id="INTEGER"></marker>
+ <title>INTEGER</title>
+ <p>ASN.1 itself specifies indefinitely large integers. Erlang
+ systems with version 4.3 and higher support very large
+ integers, in practice indefinitely large integers.</p>
+ <p>The concept of subtyping can be applied to integers and
+ to other ASN.1 types. The details of subtyping are not
+ explained here; for more information, see X.680. Various
+ syntaxes are allowed when defining a type as an integer:</p>
+ <pre>
+T1 ::= INTEGER
+T2 ::= INTEGER (-2..7)
+T3 ::= INTEGER (0..MAX)
+T4 ::= INTEGER (0&lt;..MAX)
+T5 ::= INTEGER (MIN&lt;..-99)
+T6 ::= INTEGER {red(0),blue(1),white(2)}</pre>
+ <p>The Erlang representation of an ASN.1 <c>INTEGER</c> is an integer or
+ an atom if a <c>Named Number List</c> (see <c>T6</c> in the previous
+ list) is specified.</p>
+ <p>The following is an example of Erlang code that assigns values for the
+ types in the previous list:</p>
+ <pre>
+T1value = 0,
+T2value = 6,
+T6value1 = blue,
+T6value2 = 0,
+T6value3 = white</pre>
+ <p>These Erlang variables are now bound to valid instances of
+ ASN.1 defined types. This style of value can be passed directly
+ to the encoder for transformation into a series of bytes.</p>
+ <p>The decoder returns an atom if the value corresponds to a
+ symbol in the <c>Named Number List</c>.</p>
+ </section>
+
+ <section>
+ <marker id="REAL"></marker>
+ <title>REAL</title>
+ <p>The following ASN.1 type is used for real numbers:</p>
+ <pre>
+R1 ::= REAL</pre>
+ <p>It is assigned a value in Erlang as follows:</p>
+ <pre>
+R1value1 = "2.14",
+R1value2 = {256,10,-2},</pre>
+ <p>In the last line, notice that the tuple {256,10,-2} is the real number
+ 2.56 in a special notation, which encodes faster than simply
+ stating the number as <c>"2.56"</c>. The arity three tuple is
+ <c>{Mantissa,Base,Exponent}</c>, that is, Mantissa * Base^Exponent.</p>
+ </section>
+
+ <section>
+ <marker id="NULL"></marker>
+ <title>NULL</title>
+ <p>The type <c>NULL</c> is suitable where supply and recognition of a value
+ is important but the actual value is not.</p>
+ <pre>
+Notype ::= NULL</pre>
+ <p>This type is assigned in Erlang as follows:</p>
+ <pre>
+N1 = 'NULL',</pre>
+ <p>The actual value is the quoted atom <c>'NULL'</c>.</p>
+ </section>
+
+ <section>
+ <marker id="ENUMERATED"></marker>
+ <title>ENUMERATED</title>
+ <p>The type <c>ENUMERATED</c> can be used when the value you want to
+ describe can only take one of a set of predefined values. Example:</p>
+ <pre>
+DaysOfTheWeek ::= ENUMERATED {
+ sunday(1),monday(2),tuesday(3),
+ wednesday(4),thursday(5),friday(6),saturday(7) }</pre>
+ <p>For example, to assign a weekday value in Erlang, use the same atom
+ as in the <c>Enumerations</c> of the type definition:</p>
+ <pre>
+Day1 = saturday,</pre>
+ <p>The enumerated type is similar to an integer type, when
+ defined with a set of predefined values. The difference is that
+ an enumerated type can only have specified
+ values, whereas an integer can have any value.</p>
+ </section>
+
+ <section>
+ <marker id="BIT STRING"></marker>
+ <title>BIT STRING</title>
+ <p>The type <c>BIT STRING</c> can be used to model information that
+ is made up of arbitrary length series of bits. It is intended
+ to be used for selection of flags, not for binary files.</p>
+ <p>In ASN.1, <c>BIT STRING</c> definitions can look as follows:</p>
+ <pre>
+Bits1 ::= BIT STRING
+Bits2 ::= BIT STRING {foo(0),bar(1),gnu(2),gnome(3),punk(14)}</pre>
+ <p>The following two notations are available for representation of <c>BIT
+ STRING</c> values in Erlang and as input to the encode functions:</p>
+ <list type="ordered">
+ <item>A bitstring. By default, a <c>BIT STRING</c> with no
+ symbolic names is decoded to an Erlang bitstring.</item>
+ <item>A list of atoms corresponding to atoms in the <c>NamedBitList</c>
+ in the <c>BIT STRING</c> definition. A <c>BIT STRING</c> with symbolic
+ names is always decoded to the format shown in the following
+ example:</item>
+ </list>
+ <pre>
+Bits1Val1 = &lt;&lt;0:1,1:1,0:1,1:1,1:1&gt;&gt;,
+Bits2Val1 = [gnu,punk],
+Bits2Val2 = &lt;&lt;2#1110:4&gt;&gt;,
+Bits2Val3 = [bar,gnu,gnome],</pre>
+ <p><c>Bits2Val2</c> and <c>Bits2Val3</c> denote the same value.</p>
+ <p><c>Bits2Val1</c> is assigned symbolic values. The assignment means
+ that the bits corresponding to <c>gnu</c> and <c>punk</c>, that is, bits
+ 2 and 14 are set to 1, and the rest are set to 0. The symbolic values
+ are shown as a list of values. If a named value, which is not
+ specified in the type definition, is shown, a runtime error occurs.</p>
+ <p><c>BIT STRING</c>s can also be subtyped with, for example, a <c>SIZE</c>
+ specification:</p>
+ <pre>
+Bits3 ::= BIT STRING (SIZE(0..31)) </pre>
+ <p>This means that no bit higher than 31 can be set.</p>
+
+ <section>
+ <title>Deprecated Representations for BIT STRING</title>
+ <p>In addition to the representations described earlier, the
+ following deprecated representations are available if the
+ specification has been compiled with option
+ <c>legacy_erlang_types</c>:</p>
+ <list type="ordered">
+ <item>Aa a list of binary digits (0 or 1). This format is
+ accepted as input to the encode functions, and a <c>BIT STRING</c>
+ is decoded to this format if option
+ <em>legacy_bit_string</em> is given.
+ </item>
+ <item>As <c>{Unused,Binary}</c> where <c>Unused</c> denotes
+ how many trailing zero-bits 0-7 that are unused in the
+ least significant byte in <c>Binary</c>. This format is
+ accepted as input to the encode functions, and a <c>BIT
+ STRING</c> is decoded to this format if
+ <c>compact_bit_string</c> has been given.
+ </item>
+ <item>As a hexadecimal number (or an integer). Avoid this
+ as it is easy to misinterpret a <c>BIT
+ STRING</c> value in this format.
+ </item>
+ </list>
+ </section>
+ </section>
+
+ <section>
+ <marker id="OCTET STRING"></marker>
+ <title>OCTET STRING</title>
+ <p><c>OCTET STRING</c> is the simplest of all ASN.1 types. <c>OCTET
+ STRING</c> only moves or transfers, for example, binary files or other
+ unstructured information complying with two rules: the
+ bytes consist of octets and encoding is not required.</p>
+ <p>It is possible to have the following ASN.1 type definitions:</p>
+ <pre>
+O1 ::= OCTET STRING
+O2 ::= OCTET STRING (SIZE(28)) </pre>
+ <p>With the following example assignments in Erlang:</p>
+ <pre>
+O1Val = &lt;&lt;17,13,19,20,0,0,255,254&gt;&gt;,
+O2Val = &lt;&lt;"must be exactly 28 chars...."&gt;&gt;,</pre>
+ <p>By default, an <c>OCTET STRING</c> is always represented as
+ an Erlang binary. If the specification has been compiled with
+ option <c>legacy_erlang_types</c>, the encode functions
+ accept both lists and binaries, and the decode functions
+ decode an <c>OCTET STRING</c> to a list.</p>
+ </section>
+
+ <section>
+ <marker id="Character Strings"></marker>
+ <title>Character Strings</title>
+ <p>ASN.1 supports a wide variety of character sets. The main difference
+ between an <c>OCTET STRING</c> and a character string is that the
+ <c>OCTET STRING</c> has no imposed semantics on the bytes delivered.</p>
+ <p>However, when using, for example, IA5String (which closely
+ resembles ASCII), byte 65 (in decimal
+ notation) <em>means</em> character 'A'.
+ </p>
+ <p>For example, if a defined type is to be a VideotexString and
+ an octet is received with the unsigned integer value <c>X</c>,
+ the octet is to be interpreted as specified in standard
+ ITU-T T.100, T.101.
+ </p>
+ <p>The ASN.1 to Erlang compiler
+ does not determine the correct interpretation of each BER
+ string octet value with different character strings. The
+ application is responsible for interpretation
+ of octets. Therefore, from the BER
+ string point of view, octets are very similar to
+ character strings and are compiled in the same way.
+ </p>
+ <p>When PER is
+ used, there is a significant difference in the encoding scheme
+ between <c>OCTET STRING</c>s and other strings. The constraints
+ specified for a type are especially important for PER, where
+ they affect the encoding.
+ </p>
+ <p>Examples:</p>
+ <pre>
+Digs ::= NumericString (SIZE(1..3))
+TextFile ::= IA5String (SIZE(0..64000)) </pre>
+ <p>The corresponding Erlang assignments:</p>
+ <pre>
+DigsVal1 = "456",
+DigsVal2 = "123",
+TextFileVal1 = "abc...xyz...",
+TextFileVal2 = [88,76,55,44,99,121 .......... a lot of characters here ....]</pre>
+ <p>The Erlang representation for "BMPString" and
+ "UniversalString" is either a list of ASCII values or a list
+ of quadruples. The quadruple representation associates to the
+ Unicode standard representation of characters. The ASCII
+ characters are all represented by quadruples beginning with
+ three zeros like {0,0,0,65} for character 'A'. When
+ decoding a value for these strings, the result is a list of
+ quadruples, or integers when the value is an ASCII character.</p>
+
+ <p>The following example shows how it works. Assume the following
+ specification is in file <c>PrimStrings.asn1</c>:</p>
+ <pre>
+PrimStrings DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+ BMP ::= BMPString
+END </pre>
+
+ <p>Encoding and decoding some strings:</p>
+
+ <pre>
+1> <input>asn1ct:compile('PrimStrings', [ber]).</input>
+ok
+2> <input>{ok,Bytes1} = 'PrimStrings':encode('BMP', [{0,0,53,53},{0,0,45,56}]).</input>
+{ok,&lt;&lt;30,4,53,54,45,56>>}
+3> <input>'PrimStrings':decode('BMP', Bytes1).</input>
+{ok,[{0,0,53,53},{0,0,45,56}]}
+4> <input>{ok,Bytes2} = 'PrimStrings':encode('BMP', [{0,0,53,53},{0,0,0,65}]).</input>
+{ok,&lt;&lt;30,4,53,53,0,65>>}
+5> <input>'PrimStrings':decode('BMP', Bytes2).</input>
+{ok,[{0,0,53,53},65]}
+6> <input>{ok,Bytes3} = 'PrimStrings':encode('BMP', "BMP string").</input>
+{ok,&lt;&lt;30,20,0,66,0,77,0,80,0,32,0,115,0,116,0,114,0,105,0,110,0,103>>}
+7> <input>'PrimStrings':decode('BMP', Bytes3).</input>
+{ok,"BMP string"} </pre>
+
+ <p>Type UTF8String is represented as a UTF-8 encoded binary in
+ Erlang. Such binaries can be created directly using the binary syntax
+ or by converting from a list of Unicode code points using function
+ <c>unicode:characters_to_binary/1</c>.</p>
+
+ <p>The following shows examples of how UTF-8 encoded binaries can
+ be created and manipulated:</p>
+ <pre>
+1> <input>Gs = "Мой маленький Гном".</input>
+[1052,1086,1081,32,1084,1072,1083,1077,1085,1100,1082,1080,
+ 1081,32,1043,1085,1086,1084]
+2> <input>Gbin = unicode:characters_to_binary(Gs).</input>
+&lt;&lt;208,156,208,190,208,185,32,208,188,208,176,208,187,208,
+ 181,208,189,209,140,208,186,208,184,208,185,32,208,147,
+ 208,...>>
+3> <input>Gbin = &lt;&lt;"Мой маленький Гном"/utf8>>.</input>
+&lt;&lt;208,156,208,190,208,185,32,208,188,208,176,208,187,208,
+ 181,208,189,209,140,208,186,208,184,208,185,32,208,147,
+ 208,...>>
+4> <input>Gs = unicode:characters_to_list(Gbin).</input>
+[1052,1086,1081,32,1084,1072,1083,1077,1085,1100,1082,1080,
+ 1081,32,1043,1085,1086,1084]</pre>
+
+ <p>For details, see the <seealso marker="stdlib:unicode">unicode</seealso>
+ module in <c>stdlib</c>.</p>
+
+ <p>In the following example, this ASN.1 specification is used:</p>
+ <pre>
+UTF DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+ UTF ::= UTF8String
+END </pre>
+
+ <p>Encoding and decoding a string with Unicode characters:</p>
+
+ <pre>
+5> <input>asn1ct:compile('UTF', [ber]).</input>
+ok
+6> <input>{ok,Bytes1} = 'UTF':encode('UTF', &lt;&lt;"Гном"/utf8>>).</input>
+{ok,&lt;&lt;12,8,208,147,208,189,208,190,208,188>>}
+7> <input>{ok,Bin1} = 'UTF':decode('UTF', Bytes1).</input>
+{ok,&lt;&lt;208,147,208,189,208,190,208,188>>}
+8> <input>io:format("~ts\n", [Bin1]).</input>
+Гном
+ok
+9> <input>unicode:characters_to_list(Bin1).</input>
+[1043,1085,1086,1084] </pre>
+ </section>
+
+ <section>
+ <marker id="OBJECT IDENTIFIER"></marker>
+ <title>OBJECT IDENTIFIER</title>
+ <p>The type <c>OBJECT IDENTIFIER</c> is used whenever a unique identity is
+ required. An ASN.1 module, a transfer syntax, and so on, is identified
+ with an <c>OBJECT IDENTIFIER</c>. Assume the following example:</p>
+ <pre>
+Oid ::= OBJECT IDENTIFIER</pre>
+ <p>Therefore, the following example is a valid Erlang instance of
+ type 'Oid':</p>
+ <pre>
+OidVal1 = {1,2,55},</pre>
+ <p>The <c>OBJECT IDENTIFIER</c> value is simply a tuple with the
+ consecutive values, which must be integers.
+ </p>
+ <p>The first value is limited to the values 0, 1, or 2. The
+ second value must be in the range 0..39 when the first value
+ is 0 or 1.
+ </p>
+ <p>The <c>OBJECT IDENTIFIER</c> is an important type and it is
+ widely used within different standards to identify various
+ objects uniquely. Dubuisson: ASN.1 - Communication Between
+ Heterogeneous Systems includes an
+ easy-to-understand description of the use of
+ <c>OBJECT IDENTIFIER</c>.</p>
+ </section>
+
+ <section>
+ <marker id="Object Descriptor"></marker>
+ <title>Object Descriptor</title>
+ <p>Values of this type can be assigned a value as an ordinary string
+ as follows:</p>
+
+ <pre>
+ "This is the value of an Object descriptor"</pre>
+ </section>
+
+ <section>
+ <marker id="The TIME types"></marker>
+ <title>TIME Types</title>
+ <p>Two time types are defined within ASN.1: Generalized
+ Time and Universal Time Coordinated (UTC). Both are assigned a
+ value as an ordinary string within double quotes, for example,
+ "19820102070533.8".</p>
+ <p>For DER encoding, the compiler does not check the validity
+ of the time values. The DER requirements upon those strings are
+ regarded as a matter for the application to fulfill.</p>
+ </section>
+
+ <section>
+ <marker id="SEQUENCE"></marker>
+ <title>SEQUENCE</title>
+ <p>The structured types of ASN.1 are constructed from other types
+ in a manner similar to the concepts of array and struct in C.</p>
+ <p>A <c>SEQUENCE</c> in ASN.1 is
+ comparable with a struct in C and a record in Erlang.
+ A <c>SEQUENCE</c> can be defined as follows:</p>
+ <pre>
+Pdu ::= SEQUENCE {
+ a INTEGER,
+ b REAL,
+ c OBJECT IDENTIFIER,
+ d NULL } </pre>
+ <p>This is a 4-component structure called <c>Pdu</c>. The record format
+ is the major format for representation of <c>SEQUENCE</c> in Erlang.
+ For each <c>SEQUENCE</c> and <c>SET</c> in an ASN.1 module an Erlang
+ record declaration is generated. For <c>Pdu</c>, a record
+ like the following is defined:</p>
+ <pre>
+-record('Pdu',{a, b, c, d}). </pre>
+ <p>The record declarations for a module <c>M</c> are placed in a
+ separate <c>M.hrl</c> file.</p>
+ <p>Values can be assigned in Erlang as follows:</p>
+ <pre>
+MyPdu = #'Pdu'{a=22,b=77.99,c={0,1,2,3,4},d='NULL'}. </pre>
+ <p>The decode functions return a record as result when decoding
+ a <c>SEQUENCE</c> or a <c>SET</c>.</p>
+
+ <p>A <c>SEQUENCE</c> and a <c>SET</c> can contain a component
+ with a <c>DEFAULT</c> keyword followed by the actual value, which
+ is the default value. The <c>DEFAULT</c> keyword means that the
+ application doing the encoding can omit encoding of the value, which
+ results in fewer bytes to send to the receiving application.</p>
+
+ <p>An application can use the atom <c>asn1_DEFAULT</c> to indicate
+ that the encoding is to be omitted for that position in
+ the <c>SEQUENCE</c>.</p>
+
+ <p>Depending on the encoding rules, the encoder can also compare
+ the given value to the default value and automatically omit the
+ encoding if the values are equal. How much effort the encoder makes
+ to compare the values depends on the encoding rules. The DER
+ encoding rules forbid encoding a value equal to the default value,
+ so it has a more thorough and time-consuming comparison than the
+ encoders for the other encoding rules.</p>
+
+ <p>In the following example, this ASN.1 specification is used:</p>
+ <pre>
+File DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+Seq1 ::= SEQUENCE {
+ a INTEGER DEFAULT 1,
+ b Seq2 DEFAULT {aa TRUE, bb 15}
+}
+
+Seq2 ::= SEQUENCE {
+ aa BOOLEAN,
+ bb INTEGER
+}
+
+Seq3 ::= SEQUENCE {
+ bs BIT STRING {a(0), b(1), c(2)} DEFAULT {a, c}
+}
+END </pre>
+ <p>Example where the BER encoder is able to omit encoding
+ of the default values:</p>
+ <pre>
+1> <input>asn1ct:compile('File', [ber]).</input>
+ok
+2> <input>'File':encode('Seq1', {'Seq1',asn1_DEFAULT,asn1_DEFAULT}).</input>
+{ok,&lt;&lt;48,0>>}
+3> <input>'File':encode('Seq1', {'Seq1',1,{'Seq2',true,15}}).</input>
+{ok,&lt;&lt;48,0>>} </pre>
+
+ <p>Example with a named <c>BIT STRING</c> where the BER
+ encoder does not omit the encoding:</p>
+ <pre>
+4> <input>'File':encode('Seq3', {'Seq3',asn1_DEFAULT).</input>
+{ok,&lt;&lt;48,0>>}
+5> <input>'File':encode('Seq3', {'Seq3',&lt;&lt;16#101:3>>).</input>
+{ok,&lt;&lt;48,4,128,2,5,160>>} </pre>
+
+ <p>The DER encoder omits the encoding for the same <c>BIT STRING</c>:</p>
+ <pre>
+6> <input>asn1ct:compile('File', [ber,der]).</input>
+ok
+7> <input>'File':encode('Seq3', {'Seq3',asn1_DEFAULT).</input>
+{ok,&lt;&lt;48,0>>}
+8> <input>'File':encode('Seq3', {'Seq3',&lt;&lt;16#101:3>>).</input>
+{ok,&lt;&lt;48,0>>} </pre>
+ </section>
+
+ <section>
+ <marker id="SET"></marker>
+ <title>SET</title>
+ <p>In Erlang, the <c>SET</c> type is used exactly as <c>SEQUENCE</c>.
+ Notice that if BER or DER encoding rules are used, decoding a
+ <c>SET</c> is slower than decoding a <c>SEQUENCE</c> because the
+ components must be sorted.</p>
+ </section>
+
+ <section>
+ <title>Extensibility for SEQUENCE and SET</title>
+ <p>When a <c>SEQUENCE</c> or <c>SET</c> contains an extension marker
+ and extension components as the following, the type can get more
+ components in newer versions of the ASN.1 spec:</p>
+ <pre>
+SExt ::= SEQUENCE {
+ a INTEGER,
+ ...,
+ b BOOLEAN }</pre>
+ <p>In this case it has got a new
+ component <c>b</c>. Thus, incoming messages that are decoded
+ can have more or fever components than this one.
+ </p>
+ <p>The component <c>b</c> is treated as
+ an original component when encoding a message. In this case, as
+ it is not an optional element, it must be encoded.
+ </p>
+ <p>During decoding, the <c>b</c> field of the record gets the decoded
+ value of the <c>b</c>
+ component, if present, otherwise the value <c>asn1_NOVALUE</c>.</p>
+ </section>
+
+ <section>
+ <marker id="CHOICE"></marker>
+ <title>CHOICE</title>
+ <p>The type <c>CHOICE</c> is a space saver and is similar to the
+ concept of a 'union' in C.</p>
+ <p>Assume the following:</p>
+ <pre>
+SomeModuleName DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+T ::= CHOICE {
+ x REAL,
+ y INTEGER,
+ z OBJECT IDENTIFIER }
+END </pre>
+ <p>It is then possible to assign values as follows:</p>
+ <pre>
+TVal1 = {y,17},
+TVal2 = {z,{0,1,2}},</pre>
+ <p>A <c>CHOICE</c> value is always represented as the tuple
+ <c>{ChoiceAlternative, Val}</c> where <c>ChoiceAlternative</c>
+ is an atom denoting the selected choice alternative.
+ </p>
+
+ <section>
+ <title>Extensible CHOICE</title>
+ <p>When a <c>CHOICE</c> contains an extension marker and the
+ decoder detects an unknown alternative of the <c>CHOICE</c>,
+ the value is represented as follows:</p>
+ <pre>
+{asn1_ExtAlt, BytesForOpenType}</pre>
+ <p>Here <c>BytesForOpenType</c> is a list of bytes constituting the
+ encoding of the "unknown" <c>CHOICE</c> alternative.</p>
+ </section>
+ </section>
+
+ <section>
+ <marker id="SOF"></marker>
+ <title>SET OF and SEQUENCE OF</title>
+ <p>The types <c>SET OF</c> and <c>SEQUENCE OF</c> correspond
+ to the concept of an array
+ in several programming languages. The Erlang syntax for
+ both types is straightforward, for example:</p>
+ <pre>
+Arr1 ::= SET SIZE (5) OF INTEGER (4..9)
+Arr2 ::= SEQUENCE OF OCTET STRING </pre>
+ <p>In Erlang the following can apply:</p>
+ <pre>
+Arr1Val = [4,5,6,7,8],
+Arr2Val = ["abc",[14,34,54],"Octets"], </pre>
+ <p>Notice that the definition of type <c>SET OF</c> implies that
+ the order of the components is undefined, but in practice there is
+ no difference between <c>SET OF</c> and <c>SEQUENCE OF</c>.
+ The ASN.1 compiler for Erlang does not randomize the order of the
+ <c>SET OF</c> components before encoding.</p>
+ <p>However, for a value of type <c>SET OF</c>, the DER
+ encoding format requires the elements to be sent in ascending
+ order of their encoding, which implies an expensive sorting
+ procedure in runtime. Therefore it is recommended to
+ use <c>SEQUENCE OF</c> instead of <c>SET OF</c> if possible.</p>
+ </section>
+
+ <section>
+ <marker id="ANY"></marker>
+ <title>ANY and ANY DEFINED BY</title>
+ <p>The types <c>ANY</c> and <c>ANY DEFINED BY</c> have been removed
+ from the standard since 1994. It is recommended not to use
+ these types any more. They can, however, exist in some old ASN.1
+ modules. The idea with this type was to leave a "hole" in a
+ definition where it was possible to
+ put unspecified data of any kind, even non-ASN.1 data.</p>
+ <p>A value of this type is encoded as an <c>open type</c>.</p>
+ <p>Instead of <c>ANY</c> and <c>ANY DEFINED BY</c>, it is
+ recommended to use
+ <c>information object class</c>, <c>table constraints</c>, and
+ <c>parameterization</c>. In particular the construct
+ <c>TYPE-IDENTIFIER.@Type</c> accomplish the same as the
+ deprecated <c>ANY</c>.</p>
+ <p>See also
+ <seealso marker="#Information Object">Information object</seealso>.</p>
+ </section>
+
+ <section>
+ <marker id="NegotiationTypes"></marker>
+ <title>EXTERNAL, EMBEDDED PDV, and CHARACTER STRING</title>
+ <p>The types <c>EXTERNAL</c>, <c>EMBEDDED PDV</c>, and
+ <c>CHARACTER STRING</c> are used in presentation layer negotiation.
+ They are encoded according to their associated type, see X.680.</p>
+ <p>The type <c>EXTERNAL</c> had a slightly different associated type
+ before 1994. X.691 states that encoding must follow
+ the older associated type. So, generated encode/decode
+ functions convert values of the newer format to the older format
+ before encoding. This implies that it is allowed to use
+ <c>EXTERNAL</c> type values of either format for encoding. Decoded
+ values are always returned in the newer format.</p>
+ </section>
+
+ <section>
+ <title>Embedded Named Types</title>
+ <p>The structured types previously described can have other named
+ types as their components. The general syntax to assign a value
+ to component <c>C</c> of a named ASN.1 type <c>T</c> in Erlang
+ is the record syntax <c>#'T'{'C'=Value}</c>.
+ Here <c>Value</c> can be a value of yet another type <c>T2</c>,
+ for example:</p>
+ <pre>
+EmbeddedExample DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+B ::= SEQUENCE {
+ a Arr1,
+ b T }
+
+Arr1 ::= SET SIZE (5) OF INTEGER (4..9)
+
+T ::= CHOICE {
+ x REAL,
+ y INTEGER,
+ z OBJECT IDENTIFIER }
+ END </pre>
+ <p><c>SEQUENCE</c> <c>b</c> can be encoded as follows in Erlang:</p>
+ <pre>
+1> 'EmbeddedExample':encode('B', {'B',[4,5,6,7,8],{x,"7.77"}}).
+{ok,&lt;&lt;5,56,0,8,3,55,55,55,46,69,45,50>>} </pre>
+ </section>
+ </section>
+
+ <section>
+ <title>Naming of Records in .hrl Files</title>
+ <p>When an ASN.1 specification is compiled, all defined types of type
+ <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the
+ generated <c>.hrl</c> file. This is because the values for
+ <c>SET</c> and <c>SEQUENCE</c> are represented as records as
+ mentioned earlier.</p>
+ <p>Some special cases of this functionality are presented in the
+ next section.</p>
+
+ <section>
+ <title>Embedded Structured Types</title>
+ <p>In ASN.1 it is also possible to have components that are themselves
+ structured types.
+ For example, it is possible to have the following:</p>
+ <pre>
+Emb ::= SEQUENCE {
+ a SEQUENCE OF OCTET STRING,
+ b SET {
+ a INTEGER,
+ b INTEGER DEFAULT 66},
+ c CHOICE {
+ a INTEGER,
+ b FooType } }
+
+FooType ::= [3] VisibleString </pre>
+ <p>The following records are generated because of type <c>Emb</c>:</p>
+ <pre>
+-record('Emb,{a, b, c}).
+-record('Emb_b',{a, b = asn1_DEFAULT}). % the embedded SET type </pre>
+ <p>Values of type <c>Emb</c> can be assigned as follows:</p>
+ <code type="none">
+V = #'Emb'{a=["qqqq",[1,2,255]],
+ b = #'Emb_b'{a=99},
+ c ={b,"Can you see this"}}.</code>
+ <p>For an embedded type of type <c>SEQUENCE</c>/<c>SET</c> in a
+ <c>SEQUENCE</c>/<c>SET</c>, the record name is extended with an
+ underscore and the component name. If the embedded structure is
+ deeper with the <c>SEQUENCE</c>, <c>SET</c>, or <c>CHOICE</c>
+ types in the line, each component name/alternative name is
+ added to the record name.</p>
+ <p>Example:</p>
+ <pre>
+Seq ::= SEQUENCE{
+ a CHOICE{
+ b SEQUENCE {
+ c INTEGER
+ }
+ }
+} </pre>
+ <p>This results in the following record:</p>
+ <pre>
+-record('Seq_a_b',{c}). </pre>
+ <p>If the structured type has a component with an embedded
+ <c>SEQUENCE OF</c>/<c>SET OF</c> which embedded type in turn
+ is a <c>SEQUENCE</c>/<c>SET</c>, it gives a record with the
+ <c>SEQUENCE OF</c>/<c>SET OF</c>
+ addition as in the following example:</p>
+ <pre>
+Seq ::= SEQUENCE {
+ a SEQUENCE OF SEQUENCE {
+ b
+ }
+ c SET OF SEQUENCE {
+ d
+ }
+} </pre>
+ <p>This results in the following records:</p>
+ <pre>
+-record('Seq_a_SEQOF'{b}).
+-record('Seq_c_SETOF'{d}). </pre>
+ <p>A parameterized type is to be considered as an embedded
+ type. Each time such a type is referenced, an instance of it is
+ defined. Thus, in the following example a record with name
+ <c>'Seq_b'</c> is generated in the <c>.hrl</c> file and is used
+ to hold values:</p>
+ <pre>
+Seq ::= SEQUENCE {
+ b PType{INTEGER}
+}
+
+PType{T} ::= SEQUENCE{
+ id T
+} </pre>
+ </section>
+
+ <section>
+ <title>Recursive Types</title>
+ <p>Types that refer to themselves are called recursive types.
+ Example:</p>
+ <pre>
+Rec ::= CHOICE {
+ nothing NULL,
+ something SEQUENCE {
+ a INTEGER,
+ b OCTET STRING,
+ c Rec }} </pre>
+ <p>This is allowed in ASN.1 and the ASN.1-to-Erlang compiler
+ supports this recursive type.
+ A value for this type is assigned in Erlang as follows:</p>
+ <pre>
+V = {something,#'Rec_something'{a = 77,
+ b = "some octets here",
+ c = {nothing,'NULL'}}}. </pre>
+ </section>
+ </section>
+
+ <section>
+ <title>ASN.1 Values</title>
+ <p>Values can be assigned to an ASN.1 type within the ASN.1 code
+ itself, as opposed to the actions in the previous section where
+ a value was assigned to an ASN.1 type in Erlang. The full value
+ syntax of ASN.1 is supported and X.680 describes in detail how
+ to assign values in ASN.1. A short example:</p>
+ <pre>
+TT ::= SEQUENCE {
+ a INTEGER,
+ b SET OF OCTET STRING }
+
+tt TT ::= {a 77,b {"kalle","kula"}} </pre>
+ <p>The value defined here can be used in several ways. It can, for
+ example, be used as the value in some <c>DEFAULT</c> component:</p>
+ <pre>
+SS ::= SET {
+ s OBJECT IDENTIFIER,
+ val TT DEFAULT tt } </pre>
+ <p>It can also be used from inside an Erlang program. If this ASN.1
+ code is defined in ASN.1 module <c>Values</c>, the ASN.1 value
+ <c>tt</c> can be reached from Erlang as a function call to
+ <c>'Values':tt()</c> as in the following example:</p>
+ <pre>
+1> <input>Val = 'Values':tt().</input>
+{'TT',77,["kalle","kula"]}
+2> <input>{ok,Bytes} = 'Values':encode('TT',Val).</input>
+{ok,&lt;&lt;48,18,128,1,77,161,13,4,5,107,97,108,108,101,4,4,
+ 107,117,108,97&gt;&gt;}
+4> <input>'Values':decode('TT',Bytes).</input>
+{ok,{'TT',77,["kalle","kula"]}}
+5> </pre>
+ <p>This example shows that a function is generated by the compiler
+ that returns a valid Erlang representation of the value, although
+ the value is of a complex type.</p>
+ <p>Furthermore, a macro is generated for each value in the <c>.hrl</c>
+ file. So, the defined value <c>tt</c> can also be extracted by
+ <c>?tt</c> in application code.</p>
+ </section>
+
+ <section>
+ <title>Macros</title>
+ <p>The type <c>MACRO</c> is not supported. It is no longer part of
+ the ASN.1 standard.</p>
+ </section>
+
+ <section>
+ <marker id="Information Object"></marker>
+ <title>ASN.1 Information Objects (X.681)</title>
+ <p>Information Object Classes, Information Objects, and Information
+ Object Sets (in the following called classes, objects, and
+ object sets, respectively) are defined in the standard
+ definition X.681. Only a brief explanation is given here.</p>
+ <p>These constructs makes it possible to define open types, that
+ is, values of that type can be of any ASN.1 type. Also,
+ relationships can be defined between different types and
+ values, as classes can hold types, values, objects, object
+ sets, and other classes in their fields. A class can be
+ defined in ASN.1 as follows:</p>
+ <pre>
+GENERAL-PROCEDURE ::= CLASS {
+ &amp;Message,
+ &amp;Reply OPTIONAL,
+ &amp;Error OPTIONAL,
+ &amp;id PrintableString UNIQUE
+}
+WITH SYNTAX {
+ NEW MESSAGE &amp;Message
+ [REPLY &amp;Reply]
+ [ERROR &amp;Error]
+ ADDRESS &amp;id
+} </pre>
+ <p>An object is an instance of a class. An object set is a set
+ containing objects of a specified class. A definition can look
+ as follows:</p>
+ <pre>
+object1 GENERAL-PROCEDURE ::= {
+ NEW MESSAGE PrintableString
+ ADDRESS "home"
+}
+
+object2 GENERAL-PROCEDURE ::= {
+ NEW MESSAGE INTEGER
+ ERROR INTEGER
+ ADDRESS "remote"
+}</pre>
+ <p>The object <c>object1</c> is an instance of the class
+ <c>GENERAL-PROCEDURE</c> and has one type field and one
+ fixed type value field. The object <c>object2</c> has also an
+ optional field <c>ERROR</c>, which is a type field. The field
+ <c>ADDRESS</c> is a <c>UNIQUE</c> field. Objects in an object set
+ must have unique values in their <c>UNIQUE</c> field, as in
+ <c>GENERAL-PROCEDURES</c>:</p>
+ <pre>
+GENERAL-PROCEDURES GENERAL-PROCEDURE ::= {
+ object1 | object2} </pre>
+ <p>You cannot encode a class, object, or object set, only refer to
+ it when defining other ASN.1 entities. Typically you refer to a
+ class as well as to object sets by table constraints and component
+ relation constraints (X.682) in ASN.1 types, as in the following:</p>
+ <pre>
+StartMessage ::= SEQUENCE {
+ msgId GENERAL-PROCEDURE.&amp;id ({GENERAL-PROCEDURES}),
+ content GENERAL-PROCEDURE.&amp;Message ({GENERAL-PROCEDURES}{@msgId}),
+ } </pre>
+ <p>In type <c>StartMessage</c>, the constraint following field
+ <c>content</c> tells that in a value of type
+ <c>StartMessage</c> the value in field <c>content</c> must
+ come from the same object that is chosen by field <c>msgId</c>.</p>
+ <p>So, the value
+ <c>#'StartMessage'{msgId="home",content="Any Printable String"}</c>
+ is legal to encode as a <c>StartMessage</c> value. However, the value
+ <c>#'StartMessage'{msgId="remote", content="Some String"}</c>
+ is illegal as the constraint in <c>StartMessage</c> tells that
+ when you have chosen a value from a specific object in object
+ set <c>GENERAL-PROCEDURES</c> in field
+ <c>msgId</c>, you must choose a value from that same object in
+ the content field too. In this second case, it is to be
+ any <c>INTEGER</c> value.</p>
+ <p><c>StartMessage</c> can in field <c>content</c> be
+ encoded with a value of any type that an object in object set
+ <c>GENERAL-PROCEDURES</c> has in its <c>NEW MESSAGE</c> field.
+ This field refers to a type field
+ <c>&amp;Message</c> in the class. Field <c>msgId</c> is always
+ encoded as a <c>PrintableString</c>, as the field refers to a
+ fixed type in the class.</p>
+ <p>In practice, object sets are usually declared to be extensible so
+ that more objects can be added to the set later. Extensibility is
+ indicated as follows:</p>
+ <pre>
+GENERAL-PROCEDURES GENERAL-PROCEDURE ::= {
+ object1 | object2, ...} </pre>
+ <p>When decoding a type that uses an extensible set constraint,
+ it is always possible that the value in field <c>UNIQUE</c>
+ is unknown (that is, the type has been encoded with a later
+ version of the ASN.1 specification). The unencoded data is then
+ returned wrapped in a tuple as follows:</p>
+
+ <pre>
+{asn1_OPENTYPE,Binary}</pre>
+
+ <p>Here <c>Binary</c> is an Erlang binary that contains the encoded
+ data. (If option <c>legacy_erlang_types</c> has been given,
+ only the binary is returned.)</p>
+ </section>
+
+ <section>
+ <title>Parameterization (X.683)</title>
+ <p>Parameterization, which is defined in X.683, can be used when
+ defining types, values, value sets, classes, objects, or object sets.
+ A part of a definition can be supplied as a parameter. For
+ example, if a <c>Type</c> is used in a definition with a certain
+ purpose, you want the type name to express the intention. This
+ can be done with parameterization.</p>
+ <p>When many types (or another ASN.1 entity) only differ in some
+ minor cases, but the structure of the types is similar, only
+ one general type can be defined and the differences can be supplied
+ through parameters.</p>
+ <p>Example of use of parameterization:</p>
+ <pre>
+General{Type} ::= SEQUENCE
+{
+ number INTEGER,
+ string Type
+}
+
+T1 ::= General{PrintableString}
+
+T2 ::= General{BIT STRING}</pre>
+ <p>An example of a value that can be encoded as type <c>T1</c> is
+ <c>{12,"hello"}</c>.</p>
+ <p>Notice that the compiler does not generate encode/decode functions
+ for parameterized types, only for the instances of the parameterized
+ types. Therefore, if a file contains the types <c>General{}</c>,
+ <c>T1</c>, and <c>T2</c> as in the previous example, encode/decode
+ functions are only generated for <c>T1</c> and <c>T2</c>.
+ </p>
+ </section>
+</chapter>
+
diff --git a/lib/asn1/doc/src/asn1_introduction.xml b/lib/asn1/doc/src/asn1_introduction.xml
new file mode 100644
index 0000000000..ae0379684a
--- /dev/null
+++ b/lib/asn1/doc/src/asn1_introduction.xml
@@ -0,0 +1,99 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>1997</year><year>2013</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>Introduction</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date>2015-03-31</date>
+ <rev>A</rev>
+ <file>asn1_introduction.xml</file>
+ </header>
+
+ <p>The <c>ASN.1</c> application provides the following:</p>
+
+ <list type="bulleted">
+ <item>An ASN.1 compiler for Erlang, which generates encode and
+ decode functions to be used by Erlang programs sending and
+ receiving ASN.1 specified data.</item>
+ <item>Runtime functions used by the generated code.</item>
+ <item>Support for the following encoding rules:
+ <list><item>Basic Encoding Rules (BER)</item>
+ <item>Distinguished Encoding Rules (DER), a specialized form of
+ BER that is used in security-conscious applications</item>
+ <item>Packed Encoding Rules (PER), both the aligned and
+ unaligned variant</item>
+ </list>
+ </item>
+ </list>
+
+ <section>
+ <title>Scope</title>
+ <p>This application covers all features of ASN.1 up to the 1997
+ edition of the specification. In the 2002 edition,
+ new features were introduced. The following features
+ of the 2002 edition are fully or partly supported:</p>
+ <list type="bulleted">
+ <item>
+ <p>Decimal notation (for example, <c>"1.5e3</c>) for REAL values.
+ The NR1, NR2, and NR3 formats as explained in ISO 6093 are
+ supported.</p>
+ </item>
+ <item>
+ <p>The <c>RELATIVE-OID</c> type for relative object identifiers is
+ fully supported.</p>
+ </item>
+ <item>
+ <p>The subtype constraint (<c>CONTAINING</c>/<c>ENCODED BY</c>) to
+ constrain the content of an octet string or a bit string is
+ parsed when compiling, but no further action is taken. This
+ constraint is not a PER-visible constraint.</p>
+ </item>
+ <item>
+ <p>The subtype constraint by regular expressions (<c>PATTERN</c>)
+ for character string types is parsed when compiling, but no
+ further action is taken. This constraint is not a
+ PER-visible constraint.</p>
+ </item>
+ <item>
+ <p>Multiple-line comments as in C, <c>/* ... */</c>, are
+ supported.</p>
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Prerequisites</title>
+ <p>It is assumed that the reader is familiar with the Erlang
+ programming language, concepts of OTP, and is familiar with the
+ ASN.1 notation. The ASN.1 notation is documented in the standard
+ definition X.680, which is the primary text. It can also be
+ helpful, but not necessary, to read the standard definitions
+ X.681, X.682, X.683, X.690, and X.691.</p>
+ <p>A good book explaining those reference texts is
+ Dubuisson: ASN.1 - Communication Between Heterogeneous Systems,
+ is free to download at
+ <url href="http://www.oss.com/asn1/dubuisson.html">http://www.oss.com/asn1/dubuisson.html</url>.</p>
+ </section>
+
+</chapter>
+
diff --git a/lib/asn1/doc/src/asn1_overview.xml b/lib/asn1/doc/src/asn1_overview.xml
new file mode 100644
index 0000000000..4a10819c36
--- /dev/null
+++ b/lib/asn1/doc/src/asn1_overview.xml
@@ -0,0 +1,49 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>1997</year><year>2013</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>ASN.1</title>
+ <prepared>Kenneth Lundin</prepared>
+ <docno></docno>
+ <date>1999-03-25</date>
+ <rev>D</rev>
+ <file>asn1_overview.xml</file>
+ </header>
+
+<section>
+ <title>Introduction</title>
+
+ <p>ASN.1 is a formal language for
+ describing data structures to be exchanged between distributed
+ computer systems. The purpose of ASN.1 is to have a platform
+ and programming language independent notation to express types
+ using a standardized set of rules for the transformation of
+ values of a defined type into a stream of bytes. This stream of
+ bytes can then be sent on any type of communication
+ channel. This way, two applications written in different
+ programming languages running on different computers, and with
+ different internal representation of data, can exchange instances
+ of structured data types.</p>
+
+</section>
+</chapter>
+
diff --git a/lib/asn1/doc/src/asn1_spec.xmlsrc b/lib/asn1/doc/src/asn1_spec.xmlsrc
index 9001aca65c..e050dff553 100644
--- a/lib/asn1/doc/src/asn1_spec.xmlsrc
+++ b/lib/asn1/doc/src/asn1_spec.xmlsrc
@@ -29,94 +29,100 @@
<file>asn1_spec.xml</file>
</header>
<marker id="SpecializedDecodes"></marker>
- <p>When performance is of highest priority and one is interested in
- a limited part of the ASN.1 encoded message, before one decide what
- to do with the rest of it, one may want to decode only this small
- part. The situation may be a server that has to decide to which
- addressee it will send a message. The addressee may be interested in
- the entire message, but the server may be a bottleneck that one want
- to spare any unnecessary load. Instead of making two <em>complete decodes</em> (the normal case of decode), one in the server and one
- in the addressee, it is only necessary to make one <em>specialized decode</em>(in the server) and another complete decode(in the
- addressee). The following specialized decodes <em>exclusive decode</em> and <em>selected decode</em> support to solve this and
- similar problems.
- </p>
- <p>So far this functionality is only provided when using the
- optimized BER_BIN version, that is when compiling with the
- options <c>ber_bin</c> and <c>optimize</c>. It does also work
- using the <c>nif</c> option. We have no intent to make this
- available on the default BER version, but maybe in the PER_BIN
- version (<c>per_bin</c>).
- </p>
+ <p>When performance is of highest priority and you are interested in
+ a limited part of the ASN.1 encoded message before deciding what
+ to do with the rest of it, an option is to decode only this small
+ part. The situation can be a server that has to decide the
+ addressee of a message. The addressee can be interested in
+ the entire message, but the server can be a bottleneck that you want
+ to spare any unnecessary load.</p>
+ <p> Instead of making two <em>complete decodes</em> (the normal case of
+ decode), one in the server and one in the addressee, it is only
+ necessary to make one <em>specialized decode</em>(in the server)
+ and another complete decode(in the addressee). This section
+ describes the following two specialized decodes, which support
+ to solve this and similar problems:</p>
+ <list type="bulleted">
+ <item><em>Exclusive decode</em></item>
+ <item><em>Selected decode</em></item>
+ </list>
+ <p>This functionality is only provided when using <c>BER</c>
+ (option <c>ber</c>).</p>
<section>
<title>Exclusive Decode</title>
<p>The basic idea with exclusive
- decode is that you specify which parts of the message you want to
+ decode is to specify which parts of the message you want to
exclude from being decoded. These parts remain encoded and are
- returned in the value structure as binaries. They may be decoded
+ returned in the value structure as binaries. They can be decoded
in turn by passing them to a certain <c>decode_part/2</c>
- function. The performance gain is high when the message is large
- and you can do an exclusive decode and later on one or several
- decodes of the parts or a second complete decode instead of two or
+ function. The performance gain is high for large messages.
+ You can do an exclusive decode and later one or more
+ decodes of the parts, or a second complete decode instead of two or
more complete decodes.
</p>
<section>
- <title>How To Make It Work</title>
- <p>In order to make exclusive decode work you have to do the
- following:
+ <title>Procedure</title>
+ <p>To perform an exclusive decode:
</p>
<list type="bulleted">
- <item>First,decide the name of the function for the exclusive
- decode.</item>
- <item>Second, write instructions that must consist of the name
- of the exclusive decode function, the name of the ASN.1
- specification and a notation that tells which parts of the
- message structure will be excluded from decode. These
- instructions shall be included in a configuration
- file. </item>
- <item>Third, compile with the additional option
- <c>asn1config</c>. The compiler searches for a configuration
- file with the same name as the ASN.1 spec but with the
- extension .asn1config. This configuration file is not the same
- as used for compilation of a set of files. See section
- <seealso marker="#UndecodedPart">Writing an Exclusive Decode Instruction.</seealso></item>
+ <item><em>Step 1:</em> Decide the name of the function for the
+ exclusive decode.</item>
+ <item><p><em>Step 2:</em> Include the following instructions in
+ a configuration file:</p>
+ <list type="bulleted">
+ <item>The name of the exclusive decode function</item>
+ <item>The name of the ASN.1 specification</item>
+ <item>A notation that tells which parts of the message
+ structure to be excluded from decode</item>
+ </list></item>
+ <item><em>Step 3</em> Compile with the additional option
+ <c>asn1config</c>. The compiler searches for a configuration
+ file with the same name as the ASN.1 specification but with
+ extension <c>.asn1config</c>. This configuration file is not
+ the same as used for compilation of a set of files. See Section
+ <seealso marker="#UndecodedPart">Writing an Exclusive Decode
+ Instruction.</seealso></item>
</list>
</section>
<section>
<title>User Interface</title>
- <p>The run-time user interface for exclusive decode consists of
- two different functions. First, the function for an exclusive
- decode, whose name the user decides in the configuration
- file. Second, the compiler generates a <c>decode_part/2</c>
- function when exclusive decode is chosen. This function decodes
- the parts that were left undecoded during the exclusive
- decode. Both functions are described below.
- </p>
- <p>If the exclusive decode function has for example got the name
+ <p>The runtime user interface for exclusive decode consists of
+ the following two functions:</p>
+ <list type="bulleted">
+ <item>A function for an exclusive decode, whose name the user
+ decides in the configuration file</item>
+ <item>The compiler generates a <c>decode_part/2</c>
+ function when exclusive decode is chosen. This function decodes
+ the parts that were left undecoded during the exclusive
+ decode.</item>
+ </list>
+ <p>Both functions are described in the following.</p>
+ <p>If the exclusive decode function has, for example, the name
<c>decode_exclusive</c> and an ASN.1 encoded message
- <c>Bin</c> shall be exclusive decoded, the call is:</p>
+ <c>Bin</c> is to be exclusive decoded, the call is as follows:</p>
<pre>
{ok,Excl_Message} = 'MyModule':decode_exclusive(Bin) </pre>
<marker id="UndecodedPart"></marker>
- <p>The result <c>Excl_Message</c> has the same structure as an
- complete decode would have, except for the parts of the top-type
- that were not decoded. The undecoded parts will be on their place
- in the structure on the format <c>{Type_Key,Undecoded_Value}</c>.
+ <p>The result <c>Excl_Message</c> has the same structure as a
+ complete decode would have, except for the parts of the top type
+ that were not decoded. The undecoded parts are on their places
+ in the structure on format <c>{Type_Key,Undecoded_Value}</c>.
</p>
- <p>Each undecoded part that shall be decoded must be fed into the <c>decode_part/2</c> function,like:</p>
+ <p>Each undecoded part that is to be decoded must be fed into
+ function <c>decode_part/2</c> as follows:</p>
<pre>
-{ok,Part_Message} = 'MyModule':decode_part(Type_Key,Undecoded_Value) </pre>
+{ok,Part_Message} = 'MyModule':decode_part(Type_Key,Undecoded_Value)</pre>
</section>
<section>
<marker id="Exclusive Instruction"></marker>
<title>Writing an Exclusive Decode Instruction</title>
- <p>This instruction is written in the configuration file on the
- format:</p>
+ <p>This instruction is written in the configuration file
+ in the following format:</p>
<pre>
-
Exclusive_Decode_Instruction = {exclusive_decode,{Module_Name,Decode_Instructions}}.
Module_Name = atom()
@@ -137,70 +143,76 @@ Element = {Name,parts} |
Top_Type = atom()
-Name = atom()
- </pre>
- <p>Observe that the instruction must be a valid Erlang term ended
- by a dot.
+Name = atom()</pre>
+ <p>The instruction must be a valid Erlang term ended by a dot.
</p>
- <p>In the <c>Type_List</c> the "path" from the top type to each
- undecoded sub-components is described. The top type of the path is
+ <p>In <c>Type_List</c> the "path" from the top type to each
+ undecoded subcomponents is described. The top type of the path is
an atom, the name of it. The action on each component/type that
- follows will be described by one of <c>{Name,parts}, {Name,undecoded}, {Name,Element_List}</c></p>
- <p>The use and effect of the actions are:
+ follows is described by one of
+ <c>{Name,parts}, {Name,undecoded}, {Name,Element_List}</c>.</p>
+ <p>The use and effect of the actions are as follows:
</p>
<list type="bulleted">
- <item><c>{Name,undecoded}</c> Tells that the element will be
- left undecoded during the exclusive decode. The type of Name may
- be any ASN.1 type. The value of element Name will be returned as a
- tuple,as mentioned <seealso marker="#UndecodedPart">above</seealso>, in the value structure of the top type.</item>
- <item><c>{Name,parts}</c> The type of Name may be one of
- SEQUENCE OF or SET OF. The action implies that the different
- components of Name will be left undecoded. The value of Name
- will be returned as a tuple, as <seealso marker="#UndecodedPart">above </seealso>, where the second element is a list of
- binaries. That is because the representation of a SEQUENCE OF/
- SET OF in Erlang is a list of its internal type. Any of the
- elements of this list or the entire list can be decoded by the
- <c>decode_part</c> function.</item>
- <item><c>{Name,Element_List}</c>This action is used when one or
- more of the sub-types of Name will be exclusive decoded.</item>
+ <item><c>{Name,undecoded}</c> - Tells that the element is left
+ undecoded during the exclusive decode. The type of <c>Name</c>
+ can be any ASN.1 type. The value of element <c>Name</c> is
+ returned as a tuple (as mentioned in the previous section) in
+ the value structure of the top type.</item>
+ <item><c>{Name,parts}</c> - The type of <c>Name</c> can be one of
+ <c>SEQUENCE OF</c> or <c>SET OF</c>. The action implies that
+ the different components of <c>Name</c> are left undecoded. The
+ value of <c>Name</c> is returned as a tuple (as mentioned in
+ the previous section) where the second element is a list of
+ binaries. This is because the representation of a <c>SEQUENCE OF</c>
+ or a <c>SET OF</c> in Erlang is a list of its internal type. Any
+ of the elements in this list or the entire list can be decoded by
+ function <c>decode_part</c>.</item>
+ <item><c>{Name,Element_List}</c> - This action is used when one or
+ more of the subtypes of <c>Name</c> is exclusive decoded.</item>
</list>
- <p>Name in the actions above may be a component name of a
- SEQUENCE or a SET or a name of an alternative in a CHOICE.
+ <p><c>Name</c> in these actions can be a component name of a
+ <c>SEQUENCE OF</c> or a <c>SET OF</c>, or a name of an alternative
+ in a <c>CHOICE</c>.
</p>
</section>
<section>
<title>Example</title>
- <p>In the examples below we use the definitions from the following ASN.1 spec:</p>
+ <p>In this examples, the definitions from the following ASN.1
+ specification are used:</p>
<marker id="Asn1spec"></marker>
<codeinclude file="Seq.asn" tag="" type="none"></codeinclude>
- <p>If <c>Button</c> is a top type and we want to exclude
- component <c>number</c> from decode the Type_List in the
- instruction in the configuration file will be
- <c>['Button',[{number,undecoded}]]</c>. If we call the decode
- function <c>decode_Button_exclusive</c> the Decode_Instruction
- will be
+ <p>If <c>Button</c> is a top type and it is needed to exclude
+ component <c>number</c> from decode, <c>Type_List</c> in the
+ instruction in the configuration file is
+ <c>['Button',[{number,undecoded}]]</c>. If you call the decode
+ function <c>decode_Button_exclusive</c>, <c>Decode_Instruction</c> is
<c>{decode_Button_exclusive,['Button',[{number,undecoded}]]}</c>.
</p>
- <p>We also have another top type <c>Window</c> whose sub
- component actions in type <c>Status</c> and the parts of component
- <c>buttonList</c> shall be left undecoded. For this type we name
- the function <c>decode__Window_exclusive</c>. The whole
- Exclusive_Decode_Instruction configuration is as follows: </p>
+ <p>Another top type is <c>Window</c> whose subcomponent
+ actions in type <c>Status</c> and the parts of component
+ <c>buttonList</c> are to be left undecoded. For this type, the
+ function is named <c>decode__Window_exclusive</c>. The complete
+ <c>Exclusive_Decode_Instruction</c> configuration is as follows:</p>
<codeinclude file="Seq.asn1config" tag="" type="none"></codeinclude>
+ <p>The following figure shows the bytes of a <c>Window:status</c>
+ message. The components <c>buttonList</c> and <c>actions</c> are
+ excluded from decode. Only <c>state</c> and <c>enabled</c> are decoded
+ when <c>decode__Window_exclusive</c> is called.</p>
<p></p>
<image file="exclusive_Win_But.gif">
- <icaption>Figure symbolizes the bytes of a Window:status message. The components buttonList and actions are excluded from decode. Only state and enabled are decoded when decode__Window_exclusive is called. </icaption>
+ <icaption>Bytes of a Window:status Message</icaption>
</image>
<p></p>
- <p>Compiling GUI.asn including the configuration file is done like:</p>
+ <p>Compiling <c>GUI.asn</c> including the configuration file is done
+ as follows:</p>
<pre>
-unix> erlc -bber_bin +optimize +asn1config GUI.asn
+unix> erlc -bber +asn1config GUI.asn
-erlang> asn1ct:compile('GUI',[ber_bin,optimize,asn1config]). </pre>
- <p>The module can be used like:</p>
+erlang> asn1ct:compile('GUI', [ber,asn1config]).</pre>
+ <p>The module can be used as follows:</p>
<pre>
-
1> Button_Msg = {'Button',123,true}.
{'Button',123,true}
2> {ok,Button_Bytes} = 'GUI':encode('Button',Button_Msg).
@@ -289,35 +301,39 @@ BoolOpt,{Type_Key_Choice,Val_Choice}}}}=
11> 'GUI':decode_part(Type_Key_SeqOf,hd(Val_SEQOF)).
{ok,{'Button',3,true}}
12> 'GUI':decode_part(Type_Key_Choice,Val_Choice).
-{ok,{possibleActions,[{'Action',16,{'Button',17,true}}]}}
- </pre>
+{ok,{possibleActions,[{'Action',16,{'Button',17,true}}]}}</pre>
</section>
</section>
<section>
<title>Selective Decode</title>
- <p>This specialized decode decodes one single subtype of a
- constructed value. It is the fastest method to extract one sub
- value. The typical use of this decode is when one want to
- inspect, for instance a version number,to be able to decide what
+ <p>This specialized decode decodes a subtype of a
+ constructed value and is the fastest method to extract a
+ subvalue. This decode is typically used when you want to
+ inspect, for example, a version number, to be able to decide what
to do with the entire value. The result is returned as
<c>{ok,Value}</c> or <c>{error,Reason}</c>.
</p>
<section>
- <title>How To Make It Work</title>
- <p>The following steps are necessary:
+ <title>Procedure</title>
+ <p>To perform a selective decode:
</p>
<list type="bulleted">
- <item>Write instructions in the configuration
- file. Including the name of a user function, the name of the ASN.1
- specification and a notation that tells which part of the type
- will be decoded. </item>
- <item>Compile with the additional option
- <c>asn1config</c>. The compiler searches for a configuration file
- with the same name as the ASN.1 spec but with the extension
- .asn1config. In the same file you can provide configuration specs
- for exclusive decode as well. The generated Erlang module has the
+ <item><p><em>Step 1:</em> Include the following instructions in
+ the configuration file:</p>
+ <list type="bulleted">
+ <item>The name of the user function</item>
+ <item>The name of the ASN.1 specification</item>
+ <item>A notation that tells which part of the type to be
+ decoded</item>
+ </list></item>
+ <item><em>Step 2:</em> Compile with the additional option
+ <c>asn1config</c>. The compiler searches for a configuration file
+ with the same name as the ASN.1 specification, but with extension
+ <c>.asn1config</c>. In the same file you can also provide
+ configuration specifications for exclusive decode.
+ The generated Erlang module has the
usual functionality for encode/decode preserved and the
specialized decode functionality added. </item>
</list>
@@ -326,21 +342,20 @@ BoolOpt,{Type_Key_Choice,Val_Choice}}}}=
<section>
<title>User Interface</title>
<p>The only new user interface function is the one provided by the
- user in the configuration file. You can invoke that function by
+ user in the configuration file. The function is started by
the <c>ModuleName:FunctionName</c> notation.
</p>
- <p>So, if you have the following spec
+ <p>For example, if the configuration file includes the specification
<c>{selective_decode,{'ModuleName',[{selected_decode_Window,TypeList}]}}</c>
- in the con-fig file, you do the selective decode by
+ do the selective decode by
<c>{ok,Result}='ModuleName':selected_decode_Window(EncodedBinary).</c></p>
</section>
<section>
<marker id="Selective Instruction"></marker>
<title>Writing a Selective Decode Instruction</title>
- <p>It is possible to describe one or many selective decode
- functions in a configuration file, you have to use the following
- notation:</p>
+ <p>One or more selective decode functions can be described in a
+ configuration file. Use the following notation:</p>
<pre>
Selective_Decode_Instruction = {selective_decode,{Module_Name,Decode_Instructions}}.
@@ -358,37 +373,43 @@ Element_List = Name|List_Selector
Name = atom()
-List_Selector = [integer()] </pre>
- <p>Observe that the instruction must be a valid Erlang term ended
- by a dot.
- </p>
- <p>The <c>Module_Name</c> is the same as the name of the ASN.1
- spec, but without the extension. A <c>Decode_Instruction</c> is
- a tuple with your chosen function name and the components from
- the top type that leads to the single type you want to
- decode. Notice that you have to choose a name of your function
- that will not be the same as any of the generated functions. The
- first element of the <c>Type_List</c> is the top type of the
- encoded message. In the <c>Element_List</c> it is followed by
- each of the component names that leads to selected type. Each of
- the names in the <c>Element_List</c> must be constructed types
- except the last name, which can be any type.
+List_Selector = [integer()]</pre>
+ <p>The instruction must be a valid Erlang term ended by a dot.
</p>
- <p>The List_Selector makes it possible to choose one of the
- encoded components in a SEQUENCE OF/ SET OF. It is also possible
- to go further in that component and pick a sub type of that to
- decode. So in the <c>Type_List</c>: <c>['Window',status,buttonList,[1],number]</c> the
- component <c>buttonList</c> has to be a SEQUENCE OF or SET OF type. In
- this example component <c>number</c> of the first of the encoded
- elements in the SEQUENCE OF <c>buttonList</c> is selected. This apply on
- the ASN.1 spec <seealso marker="#Asn1spec">above</seealso>.
+ <list type="bulleted">
+ <item><c>Module_Name</c> is the same as the name of the ASN.1
+ specification, but without the extension.</item>
+ <item><c>Decode_Instruction</c> is a tuple with your chosen
+ function name and the components from the top type that leads
+ to the single type you want to decode. Ensure to choose a name
+ of your function that is not the same as any of the generated
+ functions.</item>
+ <item> The first element of <c>Type_List</c> is the top type of the
+ encoded message. In <c>Element_List</c>, it is followed by
+ each of the component names that leads to selected type.</item>
+ <item>Each name in <c>Element_List</c> must be a constructed type
+ except the last name, which can be any type.</item>
+ <item><c>List_Selector</c> makes it possible to choose one of the
+ encoded components in a a <c>SEQUENCE OF</c> or a <c>SET OF</c>.
+ It is also possible to go further in that component and pick a
+ subtype of that to decode. So, in the <c>Type_List</c>:
+ <c>['Window',status,buttonList,[1],number]</c>, component
+ <c>buttonList</c> must be of type <c>SEQUENCE OF</c> or
+ <c>SET OF</c>.</item>
+ </list>
+ <p>In the example, component <c>number</c> of the first of the encoded
+ elements in the <c>SEQUENCE OF</c> <c>buttonList</c> is selected.
+ This applies on the ASN.1 specification in Section
+ <seealso marker="#Asn1spec">Writing an Exclusive Decode
+ Instruction</seealso>.
</p>
</section>
<section>
<title>Another Example</title>
- <p>In this example we use the same ASN.1 spec as <seealso marker="#Asn1spec">above</seealso>. A valid selective decode
- instruction is:</p>
+ <p>In this example, the same ASN.1 specification as in Section
+ <seealso marker="#Asn1spec">Writing an Exclusive Decode Instruction</seealso>
+ is used. The following is a valid selective decode instruction:</p>
<pre>
{selective_decode,
{'GUI',
@@ -404,16 +425,17 @@ List_Selector = [integer()] </pre>
actions,
possibleActions,
[1],
- handle,number]}]}}.
- </pre>
- <p>The first <c>Decode_Instruction</c>,
+ handle,number]}]}}.</pre>
+ <p>The first instruction,
<c>{selected_decode_Window1,['Window',status,buttonList,[1],number]}</c>
- is commented in the previous section. The instruction
- <c>{selected_decode_Action,['Action',handle,number]}</c> picks
- the component <c>number</c> in the <c>handle</c> component of the type
- <c>Action</c>. If we have the value <c>ValAction = {'Action',17,{'Button',4711,false}}</c> the internal value 4711
- should be picked by <c>selected_decode_Action</c>. In an Erlang
- terminal it looks like:</p>
+ is described in the previous section.</p>
+ <p> The second instruction,
+ <c>{selected_decode_Action,['Action',handle,number]}</c>, takes
+ component <c>number</c> in the <c>handle</c> component of type
+ <c>Action</c>. If the value is
+ <c>ValAction = {'Action',17,{'Button',4711,false}}</c>, the internal
+ value 4711 is to be picked by <c>selected_decode_Action</c>. In an
+ Erlang terminal it looks as follows:</p>
<pre>
ValAction = {'Action',17,{'Button',4711,false}}.
{'Action',17,{'Button',4711,false}}
@@ -423,44 +445,41 @@ ValAction = {'Action',17,{'Button',4711,false}}.
&lt;&lt;48,18,2,1,17,160,13,172,11,171,9,48,7,128,2,18,103,129,1,0&gt;&gt;
9> 'GUI':selected_decode_Action(BinBytes).
{ok,4711}
-10> </pre>
+10></pre>
<p>The third instruction,
<c>['Window',status,actions,possibleActions,[1],handle,number]</c>,
- which is a little more complicated,</p>
+ works as follows:</p>
<list type="bulleted">
- <item>starts with type <em>Window</em>. </item>
- <item>Picks component <em>status</em> of <c>Window</c> that is
- of type <c>Status</c>.</item>
- <item>Then takes component <em>actions</em> of type
+ <item><em>Step 1:</em> Starts with type <c>Window</c>.</item>
+ <item><em>Step 2:</em> Takes component <c>status</c> of <c>Window</c>
+ that is of type <c>Status</c>.</item>
+ <item><em>Step 3:</em> Takes <em>actions</em> of type
<c>Status</c>.</item>
- <item>Then <em>possibleActions</em> of the internal defined
- CHOICE type.</item>
- <item>Thereafter it goes into the first component of the
- SEQUENCE OF by <em>[1]</em>. That component is of type
- <c>Action</c>.</item>
- <item>The instruction next picks component
- <em>handle</em>.</item>
- <item>And finally component <em>number</em> of the type
+ <item><em>Step 4:</em> Takes <c>possibleActions</c> of the internally
+ defined <c>CHOICE</c> type.</item>
+ <item><em>Step 5:</em> Goes into the first component of
+ <c>SEQUENCE OF</c> by <c>[1]</c>. That component is of type
+ <c>Action</c>.</item>
+ <item><em>Step 6:</em> Takes component <c>handle</c>.</item>
+ <item><em>Step 7:</em> Takes component <c>number</c> of type
<c>Button</c>.</item>
</list>
- <p>The following figures shows which components are in the
- TypeList
- <c>['Window',status,actions,possibleActions,[1],handle,number]</c>. And
- which part of a message that will be decoded by
- selected_decode_Window2.
- </p>
+ <p>The following figure shows which components are in <c>TypeList</c>
+ <c>['Window',status,actions,possibleActions,[1],handle,number]</c>:</p>
<p></p>
<image file="selective_TypeList.gif">
- <icaption>The elements specified in the config file for selective decode of a sub-value in a Window message</icaption>
+ <icaption>Elements Specified in Configuration File for Selective Decode of a Subvalue in a Window Message</icaption>
</image>
+ <p>In the following figure, only the marked element is decoded by
+ <c>selected_decode_Window2</c>:</p>
<p></p>
<image file="selective_Window2.gif">
- <icaption>Figure symbolizes the bytes of a Window:status message. Only the marked element is decoded when selected_decode_Window2 is called. </icaption>
+ <icaption>Bytes of a Window:status Message</icaption>
</image>
- <p>With the following example you can examine that both
+ <p>With the following example, you can examine that both
<c>selected_decode_Window2</c> and
- <c>selected_decode_Window1</c> decodes the intended sub-value
- of the value <c>Val</c></p>
+ <c>selected_decode_Window1</c> decodes the intended subvalue
+ of value <c>Val</c>:</p>
<pre>
1> Val = {'Window',{status,{'Status',12,
[{'Button',13,true},
@@ -478,8 +497,8 @@ ValAction = {'Action',17,{'Button',4711,false}}.
4> 'GUI':selected_decode_Window1(Bin).
{ok,13}
5> 'GUI':selected_decode_Window2(Bin).
-{ok,18} </pre>
- <p>Observe that the value feed into the selective decode
+{ok,18}</pre>
+ <p>Notice that the value fed into the selective decode
functions must be a binary.
</p>
</section>
@@ -489,19 +508,19 @@ ValAction = {'Action',17,{'Button',4711,false}}.
<title>Performance</title>
<p>To give an indication on the possible performance gain using
the specialized decodes, some measures have been performed. The
- relative figures in the outcome between selective, exclusive and
- complete decode (the normal case) depends on the structure of
- the type, the size of the message and on what level the
+ relative figures in the outcome between selective, exclusive, and
+ complete decode (the normal case) depend on the structure of
+ the type, the size of the message, and on what level the
selective and exclusive decodes are specified.
</p>
<section>
- <title>ASN.1 Specifications, Messages and Configuration</title>
- <p>The specs <seealso marker="#Asn1spec">GUI</seealso> and
+ <title>ASN.1 Specifications, Messages, and Configuration</title>
+ <p>The specifications <seealso marker="#Asn1spec">GUI</seealso> and
<url href="http://www.itu.int/ITU-T/asn1/database/itu-t/h/h248/2002/MEDIA-GATEWAY-CONTROL.html">MEDIA-GATEWAY-CONTROL</url>
- was used in the test.
+ were used in the test.
</p>
- <p>For the GUI spec the configuration looked like:</p>
+ <p>For the <c>GUI</c> specification the configuration was as follows:</p>
<pre>
{selective_decode,
{'GUI',
@@ -523,9 +542,8 @@ ValAction = {'Action',17,{'Button',4711,false}}.
['Window',
[{status,
[{buttonList,parts},
- {actions,undecoded}]}]]}]}}.
- </pre>
- <p>The MEDIA-GATEWAY-CONTROL configuration was:</p>
+ {actions,undecoded}]}]]}]}}.</pre>
+ <p>The <c>MEDIA-GATEWAY-CONTROL</c> configuration was as follows:</p>
<pre>
{exclusive_decode,
{'MEDIA-GATEWAY-CONTROL',
@@ -538,9 +556,8 @@ ValAction = {'Action',17,{'Button',4711,false}}.
{selective_decode,
{'MEDIA-GATEWAY-CONTROL',
[{decode_MegacoMessage_selective,
- ['MegacoMessage',mess,version]}]}}.
- </pre>
- <p>The corresponding values were:</p>
+ ['MegacoMessage',mess,version]}]}}.</pre>
+ <p>The corresponding values were as follows:</p>
<pre>
{'Window',{status,{'Status',12,
[{'Button',13,true},
@@ -649,177 +666,178 @@ ValAction = {'Action',17,{'Button',4711,false}}.
{'StatisticsParameter',[0,11,0,3],[[52,53,49,48,48]]},
{'StatisticsParameter',[0,12,0,6],[[48,46,50]]},
{'StatisticsParameter',[0,12,0,7],[[50,48]]},
- {'StatisticsParameter',[0,12,0,8],[[52,48]]}]}]}}}]}]}}}]}}}
- </pre>
- <p>The size of the encoded values was 458 bytes for GUI and 464
- bytes for MEDIA-GATEWAY-CONTROL.
+ {'StatisticsParameter',[0,12,0,8],[[52,48]]}]}]}}}]}]}}}]}}}</pre>
+ <p>The size of the encoded values was 458 bytes for <c>GUI</c> and 464
+ bytes for <c>MEDIA-GATEWAY-CONTROL</c>.
</p>
</section>
<section>
<title>Results</title>
- <p>The ASN.1 specs in the test are compiled with the options
- <c>ber_bin, optimize, driver</c> and <c>asn1config</c>. If the
- <c>driver</c> option had been omitted there should have been
+ <p>The ASN.1 specifications in the test were compiled with options
+ <c>ber_bin, optimize, driver</c> and <c>asn1config</c>. Omitting
+ option <c>driver</c> gives
higher values for <c>decode</c> and <c>decode_part</c>. These tests have
- not been re-run using nifs, but are expected to perform about 5% better
+ not been rerun using NIFs, but are expected to perform about 5% better
than the linked-in driver.
</p>
<p>The test program runs 10000 decodes on the value, resulting
- in a printout with the elapsed time in microseconds for the
+ in an output with the elapsed time in microseconds for the
total number of decodes.
</p>
<table>
<row>
<cell align="left" valign="top"><em>Function</em></cell>
- <cell align="left" valign="top"><em>Time</em>(microseconds)</cell>
- <cell align="left" valign="top"><em>Kind of Decode</em></cell>
- <cell align="left" valign="top"><em>ASN.1 spec</em></cell>
- <cell align="left" valign="top"><em>% of time vs. complete decode</em></cell>
+ <cell align="left" valign="top"><em>Time</em> (microseconds)</cell>
+ <cell align="left" valign="top"><em>Decode Type</em></cell>
+ <cell align="left" valign="top"><em>ASN.1 Specification</em></cell>
+ <cell align="left" valign="top"><em>% of Time versus Complete Decode</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>decode_MegacoMessage_selective/1</c></cell>
<cell align="left" valign="middle"><c>374045</c></cell>
- <cell align="left" valign="middle"><c>selective</c></cell>
+ <cell align="left" valign="middle"><c>Selective</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>8.3</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>decode_MegacoMessage_exclusive/1</c></cell>
<cell align="left" valign="middle"><c>621107</c></cell>
- <cell align="left" valign="middle"><c>exclusive</c></cell>
+ <cell align="left" valign="middle"><c>Exclusive</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>13.8</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>decode/2</c></cell>
<cell align="left" valign="middle"><c>4507457</c></cell>
- <cell align="left" valign="middle"><c>complete</c></cell>
+ <cell align="left" valign="middle"><c>Complete</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>100</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>selected_decode_Window1/1</c></cell>
<cell align="left" valign="middle"><c>449585</c></cell>
- <cell align="left" valign="middle"><c>selective</c></cell>
+ <cell align="left" valign="middle"><c>Selective</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>7.6</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>selected_decode_Window2/1</c></cell>
<cell align="left" valign="middle"><c>890666</c></cell>
- <cell align="left" valign="middle"><c>selective</c></cell>
+ <cell align="left" valign="middle"><c>Selective</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>15.1</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>decode_Window_status_exclusive/1</c></cell>
<cell align="left" valign="middle"><c>1251878</c></cell>
- <cell align="left" valign="middle"><c>exclusive</c></cell>
+ <cell align="left" valign="middle"><c>Exclusive</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>21.3</em></cell>
</row>
<row>
<cell align="left" valign="middle"><c>decode/2</c></cell>
<cell align="left" valign="middle"><c>5889197</c></cell>
- <cell align="left" valign="middle"><c>complete</c></cell>
+ <cell align="left" valign="middle"><c>Complete</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>100</em></cell>
</row>
- <tcaption>Results of complete, exclusive and selective decode</tcaption>
+ <tcaption>Results of Complete, Exclusive, and Selective Decode</tcaption>
</table>
- <p>Another interesting question is what the relation is between
+ <p>It is also of interest to know the relation is between
a complete decode, an exclusive decode followed by
- <c>decode_part</c> of the excluded parts and a selective decode
- followed by a complete decode. Some situations may be compared to
- this simulation, e.g. inspect a sub-value and later on look at
+ <c>decode_part</c> of the excluded parts, and a selective decode
+ followed by a complete decode. Some situations can be compared to
+ this simulation, for example, inspect a subvalue and later inspect
the entire value. The following table shows figures from this
- test. The number of loops and time unit is the same as in the
+ test. The number of loops and the time unit are the same as in the
previous test.
</p>
<table>
<row>
<cell align="left" valign="top"><em>Actions</em></cell>
<cell align="left" valign="top"><em>Function</em>&nbsp;&nbsp;&nbsp;&nbsp;</cell>
- <cell align="left" valign="top"><em>Time</em>(microseconds)</cell>
- <cell align="left" valign="top"><em>ASN.1 spec</em></cell>
- <cell align="left" valign="top"><em>% of time vs. complete decode</em></cell>
+ <cell align="left" valign="top"><em>Time</em> (microseconds)</cell>
+ <cell align="left" valign="top"><em>ASN.1 Specification</em></cell>
+ <cell align="left" valign="top"><em>% of Time vs. Complete Decode</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>complete</c></cell>
+ <cell align="left" valign="middle"><c>Complete</c></cell>
<cell align="left" valign="middle"><c>decode/2</c></cell>
<cell align="left" valign="middle"><c>4507457</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>100</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>selective and complete</c></cell>
+ <cell align="left" valign="middle"><c>Selective and Complete</c></cell>
<cell align="left" valign="middle"><c>decode_&shy;MegacoMessage_&shy;selective/1</c></cell>
<cell align="left" valign="middle"><c>4881502</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>108.3</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>exclusive and decode_part</c></cell>
+ <cell align="left" valign="middle"><c>Exclusive and decode_part</c></cell>
<cell align="left" valign="middle"><c>decode_&shy;MegacoMessage_&shy;exclusive/1</c></cell>
<cell align="left" valign="middle"><c>5481034</c></cell>
<cell align="left" valign="middle"><c>MEDIA-GATEWAY-CONTROL</c></cell>
<cell align="left" valign="middle"><em>112.3</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>complete</c></cell>
+ <cell align="left" valign="middle"><c>Complete</c></cell>
<cell align="left" valign="middle"><c>decode/2</c></cell>
<cell align="left" valign="middle"><c>5889197</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>100</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>selective and complete</c></cell>
+ <cell align="left" valign="middle"><c>Selective and Complete</c></cell>
<cell align="left" valign="middle"><c>selected_&shy;decode_&shy;Window1/1</c></cell>
<cell align="left" valign="middle"><c>6337636</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>107.6</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>selective and complete</c></cell>
+ <cell align="left" valign="middle"><c>Selective and Complete</c></cell>
<cell align="left" valign="middle"><c>selected_&shy;decode_&shy;Window2/1</c></cell>
<cell align="left" valign="middle"><c>6795319</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>115.4</em></cell>
</row>
<row>
- <cell align="left" valign="middle"><c>exclusive and decode_part</c></cell>
+ <cell align="left" valign="middle"><c>Exclusive and decode_part</c></cell>
<cell align="left" valign="middle"><c>decode_&shy;Window_&shy;status_&shy;exclusive/1</c></cell>
<cell align="left" valign="middle"><c>6249200</c></cell>
<cell align="left" valign="middle"><c>GUI</c></cell>
<cell align="left" valign="middle"><em>106.1</em></cell>
</row>
- <tcaption>Results of complete, exclusive + decode_part and selective + complete decodes</tcaption>
+ <tcaption>Results of Complete, Exclusive + decode_part, and Selective + complete decodes</tcaption>
</table>
<p>Other ASN.1 types and values can differ much from these
- figures. Therefore it is important that you, in every case where
+ figures. It is therefore important that you, in every case where
you intend to use either of these decodes, perform some tests
- that shows if you will benefit your purpose.
+ that show if you will benefit your purpose.
</p>
</section>
<section>
- <title>Comments</title>
- <p>Generally speaking the gain of selective and exclusive decode
- in advance of complete decode is greater the bigger value and the
- less deep in the structure you have to decode. One should also
- prefer selective decode instead of exclusive decode if you are
- interested in just one single sub-value.</p>
- <p>Another observation is that the exclusive decode followed by
- decode_part decodes is very attractive if the parts will be sent
- to different servers for decoding or if one in some cases not is
- interested in all parts.</p>
- <p>The fastest selective decode are when the decoded type is a
+ <title>Final Remarks</title>
+ <list type="bulleted">
+ <item>The gain of using selective and exclusive decode instead of a
+ complete decode is greater the bigger the value and the
+ less deep in the structure you have to decode.</item>
+ <item>Use selective decode instead of exclusive decode if you are
+ interested in only a single subvalue.</item>
+ <item>Exclusive decode followed by
+ <c>decode_part</c> decodes is attractive if the parts are sent
+ to different servers for decoding, or if you in some cases are not
+ interested in all parts.</item>
+ <item>The fastest selective decode is when the decoded type is a
primitive type and not so deep in the structure of the top
- type. The <c>selected_decode_Window2</c> decodes a big constructed
- value, which explains why this operation is relatively slow.</p>
- <p>It may vary from case to case which combination of
- selective/complete decode or exclusive/part decode is the fastest.</p>
+ type. <c>selected_decode_Window2</c> decodes a high constructed
+ value, which explains why this operation is relatively slow.</item>
+ <item>It can vary from case to case which combination of
+ selective/complete decode or exclusive/part decode is the fastest.</item>
+ </list>
</section>
</section>
</chapter>
diff --git a/lib/asn1/doc/src/asn1_ug.xml b/lib/asn1/doc/src/asn1_ug.xml
deleted file mode 100644
index 8b33497dd3..0000000000
--- a/lib/asn1/doc/src/asn1_ug.xml
+++ /dev/null
@@ -1,1417 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>1997</year><year>2013</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- The contents of this file are subject to the Erlang Public License,
- Version 1.1, (the "License"); you may not use this file except in
- compliance with the License. You should have received a copy of the
- Erlang Public License along with this software. If not, it can be
- retrieved online at http://www.erlang.org/.
-
- Software distributed under the License is distributed on an "AS IS"
- basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
- the License for the specific language governing rights and limitations
- under the License.
-
- </legalnotice>
-
- <title>Asn1</title>
- <prepared>Kenneth Lundin</prepared>
- <docno></docno>
- <date>1999-03-25</date>
- <rev>D</rev>
- <file>asn1_ug.xml</file>
- </header>
-
- <section>
- <title>Introduction</title>
-
- <section>
- <title>Features</title>
- <p>The Asn1 application provides:</p>
- <list type="bulleted">
- <item>An ASN.1 compiler for Erlang, which generates encode and
- decode functions to be used by Erlang programs sending and
- receiving ASN.1 specified data.</item>
- <item>Run-time functions used by the generated code.</item>
- <item>Support for the following encoding rules:
- <list>
- <item>
- Basic Encoding Rules (<em>BER</em>)
- </item>
- <item>
- Distinguished Encoding Rules (<em>DER</em>), a specialized
- form of BER that is used in security-conscious
- applications.
- </item>
- <item>
- Packed Encoding Rules (<em>PER</em>); both the aligned and
- unaligned variant.
- </item>
- </list>
- </item>
- </list>
- </section>
-
- <section>
- <title>Overview</title>
- <p>ASN.1 (Abstract Syntax Notation One) is a formal language for
- describing data structures to be exchanged between distributed
- computer systems. The purpose of ASN.1 is to have a platform
- and programming language independent notation to express types
- using a standardized set of rules for the transformation of
- values of a defined type into a stream of bytes. This stream of
- bytes can then be sent on any type of communication
- channel. This way, two applications written in different
- programming languages running on different computers with
- different internal representation of data can exchange instances
- of structured data types.</p>
- </section>
-
- <section>
- <title>Prerequisites</title>
- <p>It is assumed that the reader is familiar with the ASN.1
- notation as documented in the standard definition [<cite
- id="X.680"></cite>] which is the primary text. It may also be
- helpful, but not necessary, to read the standard definitions
- [<cite id="X.681"></cite>] [<cite id="X.682"></cite>] [<cite
- id="X.683"></cite>] [<cite id="X.690"></cite>] [<cite
- id="X.691"></cite>]. </p>
- <p>A good book explaining those reference texts is
- [<cite id="DUBUISSON"></cite>], which is free to download at
- <url href="http://www.oss.com/asn1/dubuisson.html">http://www.oss.com/asn1/dubuisson.html</url>.
- </p>
- </section>
-
- <section>
- <title>Capabilities</title>
- <p>This application covers all features of ASN.1 up to the 1997
- edition of the specification. In the 2002 edition of ASN.1 a
- number of new features were introduced. The following features
- of the 2002 edition are fully or partly supported as shown
- below:</p>
- <list type="bulleted">
- <item>
- <p>Decimal notation (e.g., "1.5e3") for REAL values. The
- NR1, NR2 and NR3 formats as explained in ISO6093 are
- supported.</p>
- </item>
- <item>
- <p>The RELATIVE-OID type for relative object identifiers is
- fully supported.</p>
- </item>
- <item>
- <p>The subtype constraint (CONTAINING/ENCODED BY) to
- constrain the content of an octet string or a bit string is
- parsed when compiling, but no further action is taken. This
- constraint is not a PER-visible constraint.</p>
- </item>
- <item>
- <p>The subtype constraint by regular expressions (PATTERN)
- for character string types is parsed when compiling, but no
- further action is taken. This constraint is not a
- PER-visible constraint.</p>
- </item>
- <item>
- <p>Multiple-line comments as in C, <c>/* ... */</c>, are
- supported.</p>
- </item>
- </list>
- </section>
-
- </section>
-
- <section>
- <title>Getting Started with Asn1</title>
-
- <section>
- <title>A First Example</title>
- <p>The following example demonstrates the basic functionality used to run
- the Erlang ASN.1 compiler.</p>
- <p>Create a file called <c>People.asn</c> containing the following:</p>
- <pre>
-People DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
- Person ::= SEQUENCE {
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END </pre>
- <p>This file (<c>People.asn</c>) must be compiled before it can be
- used.
- The ASN.1 compiler checks that the syntax is correct and that the
- text represents proper ASN.1 code before generating an abstract
- syntax tree. The code-generator then uses the abstract syntax
- tree in order to generate code.
- </p>
- <p>The generated Erlang files will be placed in the current directory or
- in the directory specified with the <c>{outdir,Dir}</c> option.
- The following shows how the compiler
- can be called from the Erlang shell:</p>
- <pre>
-1><input> asn1ct:compile("People", [ber]).</input>
-ok
-2> </pre>
-
- <p>The <c>verbose</c> option can be given to have information
- about the generated files printed:</p>
- <pre>
-2><input> asn1ct:compile("People", [ber,verbose]).</input>
-Erlang ASN.1 compiling "People.asn"
---{generated,"People.asn1db"}--
---{generated,"People.hrl"}--
---{generated,"People.erl"}--
-ok
-3> </pre>
-
- <p>The ASN.1 module <c>People</c> is now accepted and the
- abstract syntax tree is saved in the <c>People.asn1db</c> file;
- the generated Erlang code is compiled using the Erlang compiler
- and loaded into the Erlang run-time system. Now there is an API
- for <c>encode/2</c> and <c>decode/2</c> in the module
- <c>People</c>, which is invoked by: <br></br>
- <c><![CDATA['People':encode(<Type name>, <Value>)]]></c>
- <br></br>
- or <br></br>
-<c><![CDATA['People':decode(<Type name>, <Value>)]]></c></p>
-
- <p>Assume there is a network
- application which receives instances of the ASN.1 defined
- type Person, modifies and sends them back again:</p>
- <code type="none">
-receive
- {Port,{data,Bytes}} ->
- case 'People':decode('Person',Bytes) of
- {ok,P} ->
- {ok,Answer} = 'People':encode('Person',mk_answer(P)),
- Port ! {self(),{command,Answer}};
- {error,Reason} ->
- exit({error,Reason})
- end
- end, </code>
- <p>In the example above, a series of bytes is received from an
- external source and the bytes are then decoded into a valid
- Erlang term. This was achieved with the call
- <c>'People':decode('Person',Bytes)</c> which returned
- an Erlang value of the ASN.1 type <c>Person</c>. Then an answer was
- constructed and encoded using
- <c>'People':encode('Person',Answer)</c> which takes an
- instance of a defined ASN.1 type and transforms it to a
- binary according to the BER or PER encoding rules.
- <br></br>
-The encoder and the decoder can also be run from
- the shell.</p>
- <pre>
-2> <input>Rockstar = {'Person',"Some Name",roving,50}.</input>
-{'Person',"Some Name",roving,50}
-3> <input>{ok,Bin} = 'People':encode('Person',Rockstar).</input>
-{ok,&lt;&lt;243,17,19,9,83,111,109,101,32,78,97,109,101,2,1,2,
- 2,1,50&gt;&gt;}
-4> <input>{ok,Person} = 'People':decode('Person',Bin).</input>
-{ok,{'Person',"Some Name",roving,50}}
-5> </pre>
- </section>
-
- <section>
- <title>Module dependencies</title>
- <p>It is common that ASN.1 modules import defined types, values and
- other entities from another ASN.1 module.</p>
- <p>Earlier versions of the ASN.1 compiler required that modules that
- were imported from had to be compiled before the module that
- imported. This caused problems when ASN.1 modules had circular
- dependencies.</p>
- <p>Referenced modules are now parsed when the compiler finds an
- entity that is imported. There will not be any code generated for
- the referenced module. However, the compiled module rely on
- that the referenced modules also will be compiled.</p>
- </section>
- </section>
-
- <section>
- <title>The Asn1 Application User Interface</title>
- <p>The Asn1 application provides two separate user interfaces:</p>
- <list type="bulleted">
- <item>
- <p>The module <c>asn1ct</c> which provides the compile-time functions
- (including the compiler).</p>
- </item>
- <item>
- <p>The module <c>asn1rt_nif</c> which provides the run-time functions
- for the ASN.1 decoder for the BER back-end.</p>
- </item>
- </list>
- <p>The reason for the division of the interface into compile-time
- and run-time
- is that only run-time modules (<c>asn1rt*</c>) need to be loaded in
- an embedded system.
- </p>
-
- <section>
- <title>Compile-time Functions</title>
- <p>The ASN.1 compiler can be invoked directly from the command-line
- by means of the <c>erlc</c> program. This is convenient when compiling
- many ASN.1 files from the command-line or when using Makefiles.
- Here are some examples of how the <c>erlc</c> command can be used to invoke the
- ASN.1 compiler:</p>
- <pre>
-erlc Person.asn
-erlc -bper Person.asn
-erlc -bber ../Example.asn
-erlc -o ../asnfiles -I ../asnfiles -I /usr/local/standards/asn1 Person.asn </pre>
- <p>The useful options for the ASN.1 compiler are:</p>
- <taglist>
- <tag><c>-b[ber | per | uper]</c></tag>
- <item>
- <p>Choice of encoding rules, if omitted <c>ber</c> is the
- default.</p>
- </item>
- <tag><c>-o OutDirectory</c></tag>
- <item>
- <p>Where to put the generated files, default is the current
- directory.</p>
- </item>
- <tag><c>-I IncludeDir</c></tag>
- <item>
- <p>Where to search for <c>.asn1db</c> files and ASN.1
- source specs in order to resolve references to other
- modules. This option can be repeated many times if there
- are several places to search in. The compiler will always
- search the current directory first.</p>
- </item>
- <tag><c>+der</c></tag>
- <item>
- <p>DER encoding rule. Only when using <c>-ber</c> option.</p>
- </item>
- <tag><c>+asn1config</c></tag>
- <item>
- <p>This functionality works together with the
- <c>ber</c> option. It enables the
- specialized decodes, see the <seealso marker="asn1_spec">Specialized Decode</seealso> chapter.
- </p>
- </item>
- <tag><c>+undec_rest</c></tag>
- <item>
- <p>A buffer that holds a message being decoded may also have
- trailing bytes. If those trailing bytes are important they
- can be returned along with the decoded value by compiling
- the ASN.1 specification with the <c>+undec_rest</c> option.
- The return value from the decoder will be
- <c>{ok,Value,Rest}</c> where <c>Rest</c> is a binary
- containing the trailing bytes.</p>
- </item>
- <tag><c>+'Any Erlc Option'</c></tag>
- <item>
- <p>You may add any option to the Erlang compiler when
- compiling the generated Erlang files. Any option
- unrecognized by the ASN.1 compiler will be passed to the
- Erlang compiler.</p>
- </item>
- </taglist>
- <p>For a complete description of <c>erlc</c> see Erts Reference Manual.</p>
- <p>The compiler and other compile-time functions can also be invoked from
- the Erlang shell. Below follows a brief
- description of the primary functions, for a
- complete description of each function see
- <seealso marker="asn1ct">the Asn1 Reference Manual</seealso>, the
- <c>asn1ct</c> module.</p>
- <p>The compiler is invoked by using <c>asn1ct:compile/1</c> with
- default options, or <c>asn1ct:compile/2</c> if explicit options
- are given.
- Example:</p>
- <pre>
-asn1ct:compile("H323-MESSAGES.asn1"). </pre>
- <p>which equals:</p>
- <pre>
-asn1ct:compile("H323-MESSAGES.asn1",[ber]). </pre>
- <p>If one wants PER encoding:</p>
- <pre>
-asn1ct:compile("H323-MESSAGES.asn1",[per]). </pre>
- <p>The generic encode and decode functions can be invoked like this:</p>
- <pre>
-'H323-MESSAGES':encode('SomeChoiceType',{call,"octetstring"}).
-'H323-MESSAGES':decode('SomeChoiceType',Bytes). </pre>
- </section>
-
- <section>
- <title>Run-time Functions</title>
- <p>When an ASN.1 specification is compiled with the <c>ber</c>
- option, the module <c>asn1rt_nif</c> module and the NIF library in
- <c>asn1/priv_dir</c> will be needed at run-time.</p>
- <p>By invoking the function <c>info/0</c> in a generated module, one
- gets information about which compiler options were used.</p>
- </section>
-
- <section>
- <title>Errors</title>
- <p>Errors detected at
- compile time appear on the screen together with
- a line number indicating where in the source file the error
- was detected. If no errors are found, an Erlang ASN.1 module will
- be created.</p>
- <p>The run-time encoders and decoders execute within a catch and
- returns <c>{ok, Data}</c> or
- <c>{error, {asn1, Description}}</c> where
- <c>Description</c> is
- an Erlang term describing the error. </p>
- </section>
- </section>
-
- <section>
- <marker id="inlineExamples"></marker>
- <title>Multi-file Compilation</title>
- <p>There are various reasons for using multi-file compilation:</p>
- <list type="bulleted">
- <item>You want to choose the name for the generated module,
- perhaps because you need to compile the same specs for
- different encoding rules.</item>
- <item>You want only one resulting module.</item>
- </list>
- <p>You need to specify which ASN.1 specs you will
- compile in a module that must have the extension
- <c>.set.asn</c>. You chose name of the module and provide the
- names of the ASN.1 specs. For instance, if you have the specs
- <c>File1.asn</c>, <c>File2.asn</c> and <c>File3.asn</c> your
- module <c>MyModule.set.asn</c> will look like:</p>
- <pre>
-File1.asn
-File2.asn
-File3.asn </pre>
- <p>If you compile with:</p>
- <code type="none">
-~> erlc MyModule.set.asn </code>
- <p>the result will be one merged module <c>MyModule.erl</c> with
- the generated code from the three ASN.1 specs.
- </p>
- </section>
-
- <section>
- <title>A quick note about tags</title>
-
- <p>Tags used to be important for all users of ASN.1, because it
- was necessary to manually add tags to certain constructs in order
- for the ASN.1 specification to be valid. Here is an example of
- an old-style specification:</p>
-
- <pre>
-Tags DEFINITIONS ::=
-BEGIN
- Afters ::= CHOICE { cheese [0] IA5String,
- dessert [1] IA5String }
-END </pre>
-
- <p>Without the tags (the numbers in square brackets) the ASN.1
- compiler would refuse to compile the file.</p>
-
- <p>In 1994 the global tagging mode AUTOMATIC TAGS was introduced.
- By putting AUTOMATIC TAGS in the module header, the ASN.1 compiler
- will automatically add tags when needed. Here is the same
- specification in AUTOMATIC TAGS mode:</p>
-
- <pre>
-Tags DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
- Afters ::= CHOICE { cheese IA5String,
- dessert IA5String }
-END
-</pre>
-
- <p>Tags will not be mentioned any more in this manual.</p>
- </section>
-
- <section>
- <marker id="ASN1Types"></marker>
- <title>The ASN.1 Types</title>
- <p>This section describes the ASN.1 types including their
- functionality, purpose and how values are assigned in Erlang.
- </p>
- <p>ASN.1 has both primitive and constructed types:</p>
- <p></p>
- <table>
- <row>
- <cell align="left" valign="middle"><em>Primitive types</em></cell>
- <cell align="left" valign="middle"><em>Constructed types</em></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#BOOLEAN">BOOLEAN</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#SEQUENCE">SEQUENCE</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#INTEGER">INTEGER</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#SET">SET</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#REAL">REAL</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#CHOICE">CHOICE</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#NULL">NULL</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#SOF">SET OF and SEQUENCE OF</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#ENUMERATED">ENUMERATED</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#ANY">ANY</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#BIT STRING">BIT STRING</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#ANY">ANY DEFINED BY</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#OCTET STRING">OCTET STRING</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">EXTERNAL</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#Character Strings">Character Strings</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">EMBEDDED PDV</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#OBJECT IDENTIFIER">OBJECT IDENTIFIER</seealso></cell>
- <cell align="left" valign="middle"><seealso marker="#NegotiationTypes">CHARACTER STRING</seealso></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#Object Descriptor">Object Descriptor</seealso></cell>
- <cell align="left" valign="middle"></cell>
- </row>
- <row>
- <cell align="left" valign="middle"><seealso marker="#The TIME types">The TIME types</seealso></cell>
- <cell align="left" valign="middle"></cell>
- </row>
- <tcaption>The supported ASN.1 types</tcaption>
- </table>
- <marker id="TypeNameValue"></marker>
- <note>
- <p>Values of each ASN.1 type has its own representation in Erlang
- described in the following subsections. Users shall provide
- these values for encoding according to the representation, as
- in the example below.</p>
- </note>
- <pre>
-Operational ::= BOOLEAN --ASN.1 definition </pre>
- <p>In Erlang code it may look like:</p>
- <pre>
-Val = true,
-{ok,Bytes} = MyModule:encode('Operational', Val), </pre>
- <p>Below follows a description of how
- values of each type can be represented in Erlang.
- </p>
-
- <section>
- <marker id="BOOLEAN"></marker>
- <title>BOOLEAN</title>
- <p>Booleans in ASN.1 express values that can be either
- TRUE or FALSE.
- The meanings assigned to TRUE or FALSE is beyond the scope
- of this text. <br></br>
-
- In ASN.1 it is possible to have:</p>
- <pre>
-Operational ::= BOOLEAN
- </pre>
- <p>Assigning a value to the type Operational in Erlang is possible by
- using the following Erlang code:</p>
- <code type="erl">
-Myvar1 = true,
- </code>
- <p>Thus, in Erlang the atoms <c>true</c> and <c>false</c> are used
- to encode a boolean value.</p>
- </section>
-
- <section>
- <marker id="INTEGER"></marker>
- <title>INTEGER</title>
- <p>ASN.1 itself specifies indefinitely large integers, and the Erlang
- systems with versions 4.3 and higher, support very large
- integers, in practice indefinitely large integers.</p>
- <p>The concept of sub-typing can be applied to integers as well
- as to other ASN.1 types. The details of sub-typing are not
- explained here, for further info see [<cite id="X.680"></cite>]. A variety
- of syntaxes are allowed when defining a type as an integer:</p>
- <pre>
-T1 ::= INTEGER
-T2 ::= INTEGER (-2..7)
-T3 ::= INTEGER (0..MAX)
-T4 ::= INTEGER (0&lt;..MAX)
-T5 ::= INTEGER (MIN&lt;..-99)
-T6 ::= INTEGER {red(0),blue(1),white(2)}
- </pre>
- <p>The Erlang representation of an ASN.1 INTEGER is an integer or
- an atom if a so called <c>Named Number List</c> (see T6 above)
- is specified.</p>
- <p>Below is an example of Erlang code which assigns values for the
- above types: </p>
- <pre>
-T1value = 0,
-T2value = 6,
-T6value1 = blue,
-T6value2 = 0,
-T6value3 = white
- </pre>
- <p>The Erlang variables above are now bound to valid instances of
- ASN.1 defined types. This style of value can be passed directly
- to the encoder for transformation into a series of bytes.</p>
- <p>The decoder will return an atom if the value corresponds to a
- symbol in the Named Number List.</p>
- </section>
-
- <section>
- <marker id="REAL"></marker>
- <title>REAL</title>
- <p>The following ASN.1 type is used for real numbers:</p>
- <pre>
-R1 ::= REAL
- </pre>
- <p>It can be assigned a value in Erlang as:</p>
- <pre>
-R1value1 = "2.14",
-R1value2 = {256,10,-2},
- </pre>
- <p>In the last line note that the tuple {256,10,-2} is the real number
- 2.56 in a special notation, which will encode faster than simply
- stating the number as <c>"2.56"</c>. The arity three tuple is
- <c>{Mantissa,Base,Exponent}</c> i.e. Mantissa * Base^Exponent.</p>
- </section>
-
- <section>
- <marker id="NULL"></marker>
- <title>NULL</title>
- <p>Null is suitable in cases where supply and recognition of a value
- is important but the actual value is not.</p>
- <pre>
-Notype ::= NULL
- </pre>
- <p>The NULL type can be assigned in Erlang:</p>
- <pre>
-N1 = 'NULL',
- </pre>
- <p>The actual value is the quoted atom 'NULL'.</p>
- </section>
-
- <section>
- <marker id="ENUMERATED"></marker>
- <title>ENUMERATED</title>
- <p>The enumerated type can be used, when the value we wish to
- describe, may only take one of a set of predefined values.</p>
- <pre>
-DaysOfTheWeek ::= ENUMERATED {
- sunday(1),monday(2),tuesday(3),
- wednesday(4),thursday(5),friday(6),saturday(7) }
- </pre>
- <p>For example to assign a weekday value in Erlang use the same atom
- as in the <c>Enumerations</c> of the type definition:</p>
- <pre>
-Day1 = saturday,
- </pre>
- <p>The enumerated type is very similar to an integer type, when
- defined with a set of predefined values. An enumerated type
- differs from an integer in that it may only have specified
- values, whereas an integer can also have any other value.</p>
- </section>
-
- <section>
- <marker id="BIT STRING"></marker>
- <title>BIT STRING</title>
- <p>The BIT STRING type can be used to model information which
- is made up of arbitrary length series of bits. It is intended
- to be used for a selection of flags, not for binary files. <br></br>
-
- In ASN.1 BIT STRING definitions may look like:
- </p>
- <pre>
-Bits1 ::= BIT STRING
-Bits2 ::= BIT STRING {foo(0),bar(1),gnu(2),gnome(3),punk(14)}
- </pre>
- <p>There are two notations available for representation of
- BIT STRING values in Erlang and as input to the encode functions.</p>
- <list type="ordered">
- <item>A bitstring. By default, a BIT STRING with no
- symbolic names will be decoded to an Erlang bitstring.</item>
- <item>A list of atoms corresponding to atoms in the <c>NamedBitList</c>
- in the BIT STRING definition. A BIT STRING with symbolic
- names will always be decoded to this format.</item>
- </list>
- <p>Example:</p>
- <pre>
-Bits1Val1 = &lt;&lt;0:1,1:1,0:1,1:1,1:1&gt;&gt;,
-Bits2Val1 = [gnu,punk],
-Bits2Val2 = &lt;&lt;2#1110:4&gt;&gt;,
-Bits2Val3 = [bar,gnu,gnome],
- </pre>
- <p><c>Bits2Val2</c> and <c>Bits2Val3</c> above denote the same value.</p>
- <p><c>Bits2Val1</c> is assigned symbolic values. The assignment means
- that the bits corresponding to <c>gnu</c> and <c>punk</c> i.e. bits
- 2 and 14 are set to 1 and the rest set to 0. The symbolic values
- appear as a list of values. If a named value appears, which is not
- specified in the type definition, a run-time error will occur.</p>
- <p>BIT STRINGS may also be sub-typed with, for example, a SIZE
- specification:</p>
- <pre>
-Bits3 ::= BIT STRING (SIZE(0..31)) </pre>
- <p>This means that no bit higher than 31 can ever be set.</p>
-
- <section>
- <title>Deprecated representations for BIT STRING</title>
- <p>In addition to the representations described above, the
- following deprecated representations are available if the
- specification has been compiled with the
- <c>legacy_erlang_types</c> option:</p>
- <list type="ordered">
- <item>A list of binary digits (0 or 1). This format is
- accepted as input to the encode functions, and a BIT STRING
- will be decoded to this format if the
- <em>legacy_bit_string</em> option has been given.
- </item>
- <item>As <c>{Unused,Binary}</c> where <c>Unused</c> denotes
- how many trailing zero-bits 0 to 7 that are unused in the
- least significant byte in <c>Binary</c>. This format is
- accepted as input to the encode functions, and a <c>BIT
- STRING</c> will be decoded to this format if
- <em>compact_bit_string</em> has been given.
- </item>
- <item>A hexadecimal number (or an integer). This format
- should be avoided, since it is easy to misinterpret a BIT
- STRING value in this format.
- </item>
- </list>
- </section>
- </section>
-
- <section>
- <marker id="OCTET STRING"></marker>
- <title>OCTET STRING</title>
- <p>The OCTET STRING is the simplest of all ASN.1 types. The
- OCTET STRING only moves or transfers e.g. binary files or other
- unstructured information complying to two rules. Firstly, the
- bytes consist of octets and secondly, encoding is not
- required.</p>
- <p>It is possible to have the following ASN.1 type definitions:</p>
- <pre>
-O1 ::= OCTET STRING
-O2 ::= OCTET STRING (SIZE(28)) </pre>
- <p>With the following example assignments in Erlang:</p>
- <pre>
-O1Val = &lt;&lt;17,13,19,20,0,0,255,254&gt;&gt;,
-O2Val = &lt;&lt;"must be exactly 28 chars...."&gt;&gt;,</pre>
- <p>By default, an OCTET STRING is always represented as
- an Erlang binary. If the specification has been compiled with
- the <c>legacy_erlang_types</c> option, the encode functions
- will accept both lists and binaries, and the decode functions
- will decode an OCTET STRING to a list.</p>
- </section>
-
- <section>
- <marker id="Character Strings"></marker>
- <title>Character Strings</title>
- <p>ASN.1 supports a wide variety of character sets. The main difference
- between OCTET STRINGS and the Character strings is that OCTET
- STRINGS have no imposed semantics on the bytes delivered.</p>
- <p>However, when using for instance the IA5String (which closely
- resembles ASCII) the byte 65 (in decimal
- notation) <em>means</em> the character 'A'.
- </p>
- <p>For example, if a defined type is to be a VideotexString and
- an octet is received with the unsigned integer value X, then
- the octet should be interpreted as specified in the standard
- ITU-T T.100,T.101.
- </p>
- <p>The ASN.1 to Erlang compiler
- will not determine the correct interpretation of each BER
- (Basic Encoding Rules) string octet value with different
- Character strings. Interpretation of octets is the
- responsibility of the application. Therefore, from the BER
- string point of view, octets appear to be very similar to
- character strings and are compiled in the same way.
- </p>
- <p>It should be noted that when PER (Packed Encoding Rules) is
- used, there is a significant difference in the encoding scheme
- between OCTET STRINGS and other strings. The constraints
- specified for a type are especially important for PER, where
- they affect the encoding.
- </p>
- <p>Here are some examples:</p>
- <pre>
-Digs ::= NumericString (SIZE(1..3))
-TextFile ::= IA5String (SIZE(0..64000)) </pre>
- <p>with corresponding Erlang assignments:</p>
- <pre>
-DigsVal1 = "456",
-DigsVal2 = "123",
-TextFileVal1 = "abc...xyz...",
-TextFileVal2 = [88,76,55,44,99,121 .......... a lot of characters here ....] </pre>
- <p>The Erlang representation for "BMPString" and
- "UniversalString" is either a list of ASCII values or a list
- of quadruples. The quadruple representation associates to the
- Unicode standard representation of characters. The ASCII
- characters are all represented by quadruples beginning with
- three zeros like {0,0,0,65} for the 'A' character. When
- decoding a value for these strings the result is a list of
- quadruples, or integers when the value is an ASCII character.</p>
-
- <p>The following example shows how it works. We have the following
- specification in the file <c>PrimStrings.asn1</c>.</p>
- <pre>
-PrimStrings DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
- BMP ::= BMPString
-END
- </pre>
-
- <p>Encoding and decoding some strings:</p>
-
- <pre>
-1> <input>asn1ct:compile('PrimStrings', [ber]).</input>
-ok
-2> <input>{ok,Bytes1} = 'PrimStrings':encode('BMP', [{0,0,53,53},{0,0,45,56}]).</input>
-{ok,&lt;&lt;30,4,53,54,45,56>>}
-3> <input>'PrimStrings':decode('BMP', Bytes1).</input>
-{ok,[{0,0,53,53},{0,0,45,56}]}
-4> <input>{ok,Bytes2} = 'PrimStrings':encode('BMP', [{0,0,53,53},{0,0,0,65}]).</input>
-{ok,&lt;&lt;30,4,53,53,0,65>>}
-5> <input>'PrimStrings':decode('BMP', Bytes2).</input>
-{ok,[{0,0,53,53},65]}
-6> <input>{ok,Bytes3} = 'PrimStrings':encode('BMP', "BMP string").</input>
-{ok,&lt;&lt;30,20,0,66,0,77,0,80,0,32,0,115,0,116,0,114,0,105,0,110,0,103>>}
-7> <input>'PrimStrings':decode('BMP', Bytes3).</input>
-{ok,"BMP string"} </pre>
-
- <p>The UTF8String type is represented as a UTF-8 encoded binary in
- Erlang. Such binaries can be created directly using the binary syntax
- or by converting from a list of Unicode code points using the
- <c>unicode:characters_to_binary/1</c> function.</p>
-
- <p>Here are some examples showing how UTF-8 encoded binaries can
- be created and manipulated:</p>
-
- <pre>
-1> <input>Gs = "Мой маленький Гном".</input>
-[1052,1086,1081,32,1084,1072,1083,1077,1085,1100,1082,1080,
- 1081,32,1043,1085,1086,1084]
-2> <input>Gbin = unicode:characters_to_binary(Gs).</input>
-&lt;&lt;208,156,208,190,208,185,32,208,188,208,176,208,187,208,
- 181,208,189,209,140,208,186,208,184,208,185,32,208,147,
- 208,...>>
-3> <input>Gbin = &lt;&lt;"Мой маленький Гном"/utf8>>.</input>
-&lt;&lt;208,156,208,190,208,185,32,208,188,208,176,208,187,208,
- 181,208,189,209,140,208,186,208,184,208,185,32,208,147,
- 208,...>>
-4> <input>Gs = unicode:characters_to_list(Gbin).</input>
-[1052,1086,1081,32,1084,1072,1083,1077,1085,1100,1082,1080,
- 1081,32,1043,1085,1086,1084]
- </pre>
-
- <p>See the <seealso marker="stdlib:unicode">unicode</seealso> module
- for more details.</p>
-
- <p>In the following example we will use this ASN.1 specification:</p>
- <pre>
-UTF DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
- UTF ::= UTF8String
-END
- </pre>
-
- <p>Encoding and decoding a string with Unicode characters:</p>
-
- <pre>
-5> <input>asn1ct:compile('UTF', [ber]).</input>
-ok
-6> <input>{ok,Bytes1} = 'UTF':encode('UTF', &lt;&lt;"Гном"/utf8>>).</input>
-{ok,&lt;&lt;12,8,208,147,208,189,208,190,208,188>>}
-7> <input>{ok,Bin1} = 'UTF':decode('UTF', Bytes1).</input>
-{ok,&lt;&lt;208,147,208,189,208,190,208,188>>}
-8> <input>io:format("~ts\n", [Bin1]).</input>
-Гном
-ok
-9> <input>unicode:characters_to_list(Bin1).</input>
-[1043,1085,1086,1084]
- </pre>
- </section>
-
- <section>
- <marker id="OBJECT IDENTIFIER"></marker>
- <title>OBJECT IDENTIFIER</title>
- <p>The OBJECT IDENTIFIER is used whenever a unique identity is required.
- An ASN.1 module, a transfer syntax, etc. is identified with an
- OBJECT IDENTIFIER. Assume the example below:</p>
- <pre>
-Oid ::= OBJECT IDENTIFIER
- </pre>
- <p>Therefore, the example below is a valid Erlang instance of the
- type 'Oid'.</p>
- <pre>
-OidVal1 = {1,2,55},
- </pre>
- <p>The OBJECT IDENTIFIER value is simply a tuple with the
- consecutive values which must be integers.
- </p>
- <p>The first value is limited to the values 0, 1 or 2 and the
- second value must be in the range 0..39 when the first value
- is 0 or 1.
- </p>
- <p>The OBJECT IDENTIFIER is a very important type and it is
- widely used within different standards to uniquely identify
- various objects. In [<cite id="DUBUISSON"></cite>], there is an
- easy-to-understand description of the usage of
- OBJECT IDENTIFIER.</p>
- <p></p>
- </section>
-
- <section>
- <marker id="Object Descriptor"></marker>
- <title>Object Descriptor</title>
- <p>Values of this type can be assigned a value as an ordinary string
- like this:</p>
-
- <pre>
- "This is the value of an Object descriptor"</pre>
- </section>
-
- <section>
- <marker id="The TIME types"></marker>
- <title>The TIME Types</title>
- <p>Two different time types are defined within ASN.1, Generalized
- Time and UTC (Universal Time Coordinated), both are assigned a
- value as an ordinary string within double quotes i.e.
- "19820102070533.8".</p>
- <p>In case of DER encoding the compiler does not check the validity
- of the time values. The DER requirements upon those strings is
- regarded as a matter for the application to fulfill.</p>
- </section>
-
- <section>
- <marker id="SEQUENCE"></marker>
- <title>SEQUENCE</title>
- <p>The structured types of ASN.1 are constructed from other types
- in a manner similar to the concepts of array and struct in C.
- <br></br>
- A SEQUENCE in ASN.1 is
- comparable with a struct in C and a record in Erlang.
- A SEQUENCE may be defined as:</p>
- <pre>
-Pdu ::= SEQUENCE {
- a INTEGER,
- b REAL,
- c OBJECT IDENTIFIER,
- d NULL } </pre>
- <p>This is a 4-component structure called 'Pdu'. The major format
- for representation of SEQUENCE in Erlang is the record format.
- For each SEQUENCE and <c>SET</c> in an ASN.1 module an Erlang
- record declaration is generated. For <c>Pdu</c> above, a record
- like this is defined:</p>
- <pre>
--record('Pdu',{a, b, c, d}). </pre>
- <p>The record declarations for a module <c>M</c> are placed in a
- separate <c>M.hrl</c> file.</p>
- <p>Values can be assigned in Erlang as shown below:</p>
- <pre>
-MyPdu = #'Pdu'{a=22,b=77.99,c={0,1,2,3,4},d='NULL'}. </pre>
- <p>The decode functions will return a record as result when decoding
- a <c>SEQUENCE</c> or a <c>SET</c>.</p>
-
- <p>A <c>SEQUENCE</c> and a <c>SET</c> may contain a component
- with a <c>DEFAULT</c> key word followed by the actual value that
- is the default value. The <c>DEFAULT</c> keyword means that the
- application doing the encoding can omit encoding of the value,
- thus resulting in fewer bytes to send to the receiving
- application.</p>
-
- <p>An application can use the atom <c>asn1_DEFAULT</c> to indicate
- that the encoding should be omitted for that position in
- the SEQUENCE.</p>
-
- <p>Depending on the encoding rules, the encoder may also compare
- the given value to the default value and automatically omit the
- encoding if they are equal. How much effort the encoder makes to
- to compare the values depends on the encoding rules. The DER
- encoding rules forbids encoding a value equal to the default value,
- so it has a more thorough and time-consuming comparison than the
- encoders for the other encoding rules.</p>
-
- <p>In the following example we will use this ASN.1 specification:</p>
- <pre>
-File DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
-Seq1 ::= SEQUENCE {
- a INTEGER DEFAULT 1,
- b Seq2 DEFAULT {aa TRUE, bb 15}
-}
-
-Seq2 ::= SEQUENCE {
- aa BOOLEAN,
- bb INTEGER
-}
-
-Seq3 ::= SEQUENCE {
- bs BIT STRING {a(0), b(1), c(2)} DEFAULT {a, c}
-}
-END </pre>
- <p>Here is an example where the BER encoder is able to omit encoding
- of the default values:</p>
- <pre>
-1> <input>asn1ct:compile('File', [ber]).</input>
-ok
-2> <input>'File':encode('Seq1', {'Seq1',asn1_DEFAULT,asn1_DEFAULT}).</input>
-{ok,&lt;&lt;48,0>>}
-3> <input>'File':encode('Seq1', {'Seq1',1,{'Seq2',true,15}}).</input>
-{ok,&lt;&lt;48,0>>} </pre>
-
- <p>And here is an example with a named BIT STRING where the BER
- encoder will not omit the encoding:</p>
- <pre>
-4> <input>'File':encode('Seq3', {'Seq3',asn1_DEFAULT).</input>
-{ok,&lt;&lt;48,0>>}
-5> <input>'File':encode('Seq3', {'Seq3',&lt;&lt;16#101:3>>).</input>
-{ok,&lt;&lt;48,4,128,2,5,160>>} </pre>
-
- <p>The DER encoder will omit the encoding for the same BIT STRING:</p>
- <pre>
-6> <input>asn1ct:compile('File', [ber,der]).</input>
-ok
-7> <input>'File':encode('Seq3', {'Seq3',asn1_DEFAULT).</input>
-{ok,&lt;&lt;48,0>>}
-8> <input>'File':encode('Seq3', {'Seq3',&lt;&lt;16#101:3>>).</input>
-{ok,&lt;&lt;48,0>>} </pre>
- </section>
-
- <section>
- <marker id="SET"></marker>
- <title>SET</title>
- <p>In Erlang, the SET type is used exactly as SEQUENCE. Note
- that if the BER or DER encoding rules are used, decoding a
- SET is slower than decoding a SEQUENCE because the components
- must be sorted.</p>
- </section>
-
- <section>
- <title>Notes about extensibility for SEQUENCE and SET</title>
- <p>When a SEQUENCE or SET contains an extension marker and
- extension components like this:</p>
- <pre>
-SExt ::= SEQUENCE {
- a INTEGER,
- ...,
- b BOOLEAN }
- </pre>
- <p>It means that the type may get more components in newer
- versions of the ASN.1 spec. In this case it has got a new
- component <c>b</c>. Thus, incoming messages that will be decoded
- may have more or fever components than this one.
- </p>
- <p>The component <c>b</c> will be treated as
- an original component when encoding a message. In this case, as
- it is not an optional element, it must be encoded.
- </p>
- <p>During decoding the <c>b</c> field of the record will get the decoded
- value of the <c>b</c>
- component if present and otherwise the value <c>asn1_NOVALUE</c>.</p>
- </section>
-
- <section>
- <marker id="CHOICE"></marker>
- <title>CHOICE</title>
- <p>The CHOICE type is a space saver and is similar to the concept of a
- 'union' in the C language.</p>
- <p>Assume:</p>
- <pre>
-SomeModuleName DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
-T ::= CHOICE {
- x REAL,
- y INTEGER,
- z OBJECT IDENTIFIER }
-END </pre>
- <p>It is then possible to assign values:</p>
- <pre>
-TVal1 = {y,17},
-TVal2 = {z,{0,1,2}},
- </pre>
- <p>A CHOICE value is always represented as the tuple
- <c>{ChoiceAlternative, Val}</c> where <c>ChoiceAlternative</c>
- is an atom denoting the selected choice alternative.
- </p>
-
- <section>
- <title>Extensible CHOICE</title>
- <p>When a CHOICE contains an extension marker and the decoder detects
- an unknown alternative of the CHOICE the value is represented as:</p>
- <pre>
-{asn1_ExtAlt, BytesForOpenType}
- </pre>
- <p>Where <c>BytesForOpenType</c> is a list of bytes constituting the
- encoding of the "unknown" CHOICE alternative. </p>
- </section>
- </section>
-
- <section>
- <marker id="SOF"></marker>
- <title>SET OF and SEQUENCE OF</title>
- <p>The SET OF and SEQUENCE OF types correspond to the concept of an array
- found in several programming languages. The Erlang syntax for
- both of these types is straight forward. For example:</p>
- <pre>
-Arr1 ::= SET SIZE (5) OF INTEGER (4..9)
-Arr2 ::= SEQUENCE OF OCTET STRING </pre>
- <p>We may have the following in Erlang:</p>
- <pre>
-Arr1Val = [4,5,6,7,8],
-Arr2Val = ["abc",[14,34,54],"Octets"], </pre>
- <p>Please note that the definition of the SET OF type implies that
- the order of the components is undefined, but in practice there is
- no difference between SET OF and SEQUENCE OF. The ASN.1 compiler
- for Erlang does not randomize the order of the SET OF components
- before encoding.</p>
- <p>However, in case of a value of the type <c>SET OF</c>, the DER
- encoding format requires the elements to be sent in ascending
- order of their encoding, which implies an expensive sorting
- procedure in run-time. Therefore it is strongly recommended to
- use <c>SEQUENCE OF</c> instead of <c>SET OF</c> if it is possible.</p>
- </section>
-
- <section>
- <marker id="ANY"></marker>
- <title>ANY and ANY DEFINED BY</title>
- <p>The types <c>ANY</c> and <c>ANY DEFINED BY</c> have been removed
- from the standard since 1994. It is recommended not to use
- these types any more. They may, however, exist in some old ASN.1
- modules.
- The idea with this type was to leave a "hole" in a definition where
- one could put unspecified data of any kind, even non ASN.1 data.</p>
- <p>A value of this type is encoded as an <c>open type</c>.</p>
- <p>Instead of <c>ANY</c>/<c>ANY DEFINED BY</c> one should use
- <c>information object class</c>, <c>table constraints</c> and
- <c>parameterization</c>. In particular the construct
- <c>TYPE-IDENTIFIER.@Type</c> accomplish the same as the
- deprecated <c>ANY</c>.</p>
- <p>See also <seealso marker="#Information Object">Information object</seealso></p>
- </section>
-
- <section>
- <marker id="NegotiationTypes"></marker>
- <title>EXTERNAL, EMBEDDED PDV and CHARACTER STRING</title>
- <p>These types are used in presentation layer negotiation. They are
- encoded according to their associated type, see [<cite id="X.680"></cite>].</p>
- <p>The <c>EXTERNAL</c> type had a slightly different associated type
- before 1994. [<cite id="X.691"></cite>] states that encoding shall follow
- the older associate type. Therefore does generated encode/decode
- functions convert values of the newer format to the older format
- before encoding. This implies that it is allowed to use
- <c>EXTERNAL</c> type values of either format for encoding. Decoded
- values are always returned on the newer format.</p>
- </section>
-
- <section>
- <title>Embedded Named Types</title>
- <p>The structured types previously described may very well have other named types
- as their components. The general syntax to assign a value to the component C
- of a named ASN.1 type T in Erlang is the record syntax
- <c>#'T'{'C'=Value}</c>.
- Where <c>Value</c> may be a value of yet another type T2.</p>
- <p>For example:</p>
- <pre>
-EmbeddedExample DEFINITIONS AUTOMATIC TAGS ::=
-BEGIN
-B ::= SEQUENCE {
- a Arr1,
- b T }
-
-Arr1 ::= SET SIZE (5) OF INTEGER (4..9)
-
-T ::= CHOICE {
- x REAL,
- y INTEGER,
- z OBJECT IDENTIFIER }
- END </pre>
- <p>The SEQUENCE b can be encoded like this in Erlang:</p>
- <pre>
-1> 'EmbeddedExample':encode('B', {'B',[4,5,6,7,8],{x,"7.77"}}).
-{ok,&lt;&lt;5,56,0,8,3,55,55,55,46,69,45,50>>} </pre>
- </section>
- </section>
-
- <section>
- <title>Naming of Records in .hrl Files</title>
- <p>When an ASN.1 specification is compiled all defined types of
- type SET or SEQUENCE will result in a corresponding record in the
- generated hrl file. This is because the values for SET/SEQUENCE
- as mentioned in sections above are represented as records.</p>
- <p>Though there are some special cases of this functionality that
- are presented below.</p>
-
- <section>
- <title>Embedded Structured Types</title>
- <p>It is also possible in ASN.1 to have components that are themselves
- structured types.
- For example, it is possible to have:</p>
- <pre>
-Emb ::= SEQUENCE {
- a SEQUENCE OF OCTET STRING,
- b SET {
- a INTEGER,
- b INTEGER DEFAULT 66},
- c CHOICE {
- a INTEGER,
- b FooType } }
-
-FooType ::= [3] VisibleString </pre>
- <p>The following records are generated because of the type <c>Emb</c>:</p>
- <pre>
--record('Emb,{a, b, c}).
--record('Emb_b',{a, b = asn1_DEFAULT}). % the embedded SET type
- </pre>
- <p>Values of the <c>Emb</c> type can be assigned like this:</p>
- <code type="none">
-V = #'Emb'{a=["qqqq",[1,2,255]],
- b = #'Emb_b'{a=99},
- c ={b,"Can you see this"}}.
- </code>
- <p>For an embedded type of type SEQUENCE/SET in a SEQUENCE/SET
- the record name is extended with an underscore and the component
- name. If the embedded structure is deeper with SEQUENCE, SET or
- CHOICE types in the line, each component-/alternative-name will
- be added to the record-name.</p>
- <p>For example:</p>
- <pre>
-Seq ::= SEQUENCE{
- a CHOICE{
- b SEQUENCE {
- c INTEGER
- }
- }
-} </pre>
- <p>will result in the following record:</p>
- <pre>
--record('Seq_a_b',{c}). </pre>
- <p>If the structured type has a component with an embedded
- SEQUENCE OF/SET OF which embedded type in turn is a
- SEQUENCE/SET it will give a record with the SEQOF/SETOF
- addition as in the following example:</p>
- <pre>
-Seq ::= SEQUENCE {
- a SEQUENCE OF SEQUENCE {
- b
- }
- c SET OF SEQUENCE {
- d
- }
-} </pre>
- <p>This results in the records:</p>
- <pre>
--record('Seq_a_SEQOF'{b}).
--record('Seq_c_SETOF'{d}). </pre>
- <p>A parameterized type should be considered as an embedded
- type. Each time a such type is referenced an instance of it is
- defined. Thus in the following example a record with name
- <c>'Seq_b'</c> is generated in the .hrl file and used to hold
- values.</p>
- <pre>
-Seq ::= SEQUENCE {
- b PType{INTEGER}
-}
-
-PType{T} ::= SEQUENCE{
- id T
-} </pre>
- </section>
-
- <section>
- <title>Recursive Types</title>
- <p>Types may refer to themselves. Suppose:</p>
- <pre>
-Rec ::= CHOICE {
- nothing NULL,
- something SEQUENCE {
- a INTEGER,
- b OCTET STRING,
- c Rec }} </pre>
- <p>This type is recursive; that is, it refers to itself. This is allowed
- in ASN.1 and the ASN.1-to-Erlang compiler supports this recursive
- type. A value for this type is assigned in Erlang as shown below:</p>
- <pre>
-V = {something,#'Rec_something'{a = 77,
- b = "some octets here",
- c = {nothing,'NULL'}}}. </pre>
- </section>
- </section>
-
- <section>
- <title>ASN.1 Values</title>
- <p>Values can be assigned to ASN.1 type within the ASN.1 code
- itself, as opposed to the actions taken in the previous chapter where
- a value was assigned to an ASN.1 type in Erlang. The full value
- syntax of ASN.1 is supported and [X.680] describes in detail how
- to assign values in ASN.1. Below is a short example:</p>
- <pre>
-TT ::= SEQUENCE {
- a INTEGER,
- b SET OF OCTET STRING }
-
-tt TT ::= {a 77,b {"kalle","kula"}} </pre>
- <p>The value defined here could be used in several ways.
- Firstly, it could be used as the value in some DEFAULT component:</p>
- <pre>
-SS ::= SET {
- s OBJECT IDENTIFIER,
- val TT DEFAULT tt } </pre>
- <p>It could also be used from inside an Erlang program. If the above ASN.1
- code was defined in ASN.1 module <c>Values</c>, then the ASN.1 value
- <c>tt</c> can be reached from Erlang as
- a function call to <c>'Values':tt()</c> as in the example below.</p>
- <pre>
-1> <input>Val = 'Values':tt().</input>
-{'TT',77,["kalle","kula"]}
-2> <input>{ok,Bytes} = 'Values':encode('TT',Val).</input>
-{ok,&lt;&lt;48,18,128,1,77,161,13,4,5,107,97,108,108,101,4,4,
- 107,117,108,97&gt;&gt;}
-4> <input>'Values':decode('TT',Bytes).</input>
-{ok,{'TT',77,["kalle","kula"]}}
-5>
- </pre>
- <p>The above example shows that a function is generated by the compiler
- that returns a valid Erlang representation of the value, even though
- the value is of a complex type.</p>
- <p>Furthermore, there is a macro generated for each value in the .hrl
- file. So, the defined value <c>tt</c> can also be extracted by
- <c>?tt</c> in application code.</p>
- </section>
-
- <section>
- <title>Macros</title>
- <p>MACRO is not supported as the the type is no longer part of the
- ASN.1 standard.</p>
- </section>
-
- <section>
- <marker id="Information Object"></marker>
- <title>ASN.1 Information Objects (X.681)</title>
- <p>Information Object Classes, Information Objects and Information
- Object Sets (in the following called classes, objects and
- object sets respectively) are defined in the standard
- definition [<cite id="X.681"></cite>]. In the following only a brief
- explanation is given. </p>
- <p>These constructs makes it possible to define open types,
- i.e. values of that type can be of any ASN.1 type. It is also
- possible to define relationships between different types and
- values, since classes can hold types, values, objects, object
- sets and other classes in its fields.
- An Information Object Class may be defined in ASN.1 as:</p>
- <pre>
-GENERAL-PROCEDURE ::= CLASS {
- &amp;Message,
- &amp;Reply OPTIONAL,
- &amp;Error OPTIONAL,
- &amp;id PrintableString UNIQUE
-}
-WITH SYNTAX {
- NEW MESSAGE &amp;Message
- [REPLY &amp;Reply]
- [ERROR &amp;Error]
- ADDRESS &amp;id
-} </pre>
- <p>An object is an instance of a class and an object set is a set
- containing objects of one specified class. A definition may look like
- below.</p>
- <p>The object <c>object1</c> is an instance of the CLASS
- GENERAL-PROCEDURE and has one type field and one fixed type value
- field. The object <c>object2</c> also has an OPTIONAL field ERROR,
- which is a type field.</p>
- <pre>
-object1 GENERAL-PROCEDURE ::= {
- NEW MESSAGE PrintableString
- ADDRESS "home"
-}
-
-object2 GENERAL-PROCEDURE ::= {
- NEW MESSAGE INTEGER
- ERROR INTEGER
- ADDRESS "remote"
-} </pre>
- <p>The field ADDRESS is a UNIQUE field. Objects in an object set must
- have unique values in their UNIQUE field, as in GENERAL-PROCEDURES: </p>
- <pre>
-GENERAL-PROCEDURES GENERAL-PROCEDURE ::= {
- object1 | object2} </pre>
- <p>One can not encode a class, object or object set, only referring to
- it when defining other ASN.1 entities. Typically one refers to a
- class and to object sets by table constraints and component
- relation constraints [<cite id="X.682"></cite>] in ASN.1 types, as in: </p>
- <pre>
-StartMessage ::= SEQUENCE {
- msgId GENERAL-PROCEDURE.&amp;id ({GENERAL-PROCEDURES}),
- content GENERAL-PROCEDURE.&amp;Message ({GENERAL-PROCEDURES}{@msgId}),
- } </pre>
- <p>In the type <c>StartMessage</c> the constraint following the
- <c>content</c> field tells that in a value of type
- <c>StartMessage</c> the value in the <c>content</c> field must
- come from the same object that is chosen by the <c>msgId</c>
- field.</p>
- <p>So, the value <c>#'StartMessage'{msgId="home",content="Any Printable String"}</c> is legal to encode as a StartMessage
- value, while the value <c>#'StartMessage'{msgId="remote", content="Some String"}</c> is illegal since the constraint
- in StartMessage tells that when you have chosen a value from a
- specific object in the object set GENERAL-PROCEDURES in the
- msgId field you have to choose a value from that same object in
- the content field too. In this second case it should have been
- any INTEGER value.</p>
- <p><c>StartMessage</c> can in the <c>content</c> field be
- encoded with a value of any type that an object in the
- <c>GENERAL-PROCEDURES</c> object set has in its <c>NEW MESSAGE</c> field. This field refers to a type field
- <c>&amp;Message</c> in the class. The <c>msgId</c> field is always
- encoded as a PrintableString, since the field refers to a fixed type
- in the class.</p>
- <p>In practice, object sets are usually declared to be extensible so
- so that more objects can be added to the set later. Extensibility is
- indicated like this:</p>
- <pre>
-GENERAL-PROCEDURES GENERAL-PROCEDURE ::= {
- object1 | object2, ...} </pre>
- <p>When decoding a type that uses an extensible set constraint,
- there is always the possibility that the value in the UNIQUE
- field is unknown (i.e. the type has been encoded with a later
- version of the ASN.1 specification). When that happens, the
- unencoded data will be returned wrapped in a tuple like this:</p>
-
- <pre>
-{asn1_OPENTYPE,Binary}</pre>
- <p>where <c>Binary</c> is an Erlang binary that contains the encoded
- data. (If the option <c>legacy_erlang_types</c> has been given,
- just the binary will be returned.)</p>
- </section>
-
- <section>
- <title>Parameterization (X.683)</title>
- <p>Parameterization, which is defined in the standard [<cite id="X.683"></cite>], can be used when defining types, values, value
- sets, information object classes, information objects or
- information object sets.
- A part of a definition can be supplied as a parameter. For
- instance, if a Type is used in a definition with certain
- purpose, one want the type-name to express the intention. This
- can be done with parameterization.</p>
- <p>When many types (or another ASN.1 entity) only differs in some
- minor cases, but the structure of the types are similar, only
- one general type can be defined and the differences may be supplied
- through parameters. </p>
- <p>One example of use of parameterization is:</p>
- <pre>
-General{Type} ::= SEQUENCE
-{
- number INTEGER,
- string Type
-}
-
-T1 ::= General{PrintableString}
-
-T2 ::= General{BIT STRING}
- </pre>
- <p>An example of a value that can be encoded as type T1 is {12,"hello"}.</p>
- <p>Note that the compiler does not generate encode/decode functions for
- parameterized types, but only for the instances of the parameterized
- types. Therefore, if a file contains the types General{}, T1 and T2 above,
- encode/decode functions will only be generated for T1 and T2.
- </p>
- </section>
-</chapter>
-
diff --git a/lib/asn1/doc/src/asn1ct.xml b/lib/asn1/doc/src/asn1ct.xml
index 32ff2d52cf..30808a5ead 100644
--- a/lib/asn1/doc/src/asn1ct.xml
+++ b/lib/asn1/doc/src/asn1ct.xml
@@ -35,43 +35,45 @@
<modulesummary>ASN.1 compiler and compile-time support functions</modulesummary>
<description>
<p>The ASN.1 compiler takes an ASN.1 module as input and generates a
- corresponding Erlang module which can encode and decode the data-types
- specified. Alternatively the compiler takes a specification module
- (se below) specifying all input modules and generates one module with
- encode/decode functions. There are also some generic functions which
- can be used in during development of applications which handles ASN.1
- data (encoded as BER or PER).</p>
+ corresponding Erlang module, which can encode and decode the specified
+ data types. Alternatively, the compiler takes a specification module
+ specifying all input modules, and generates a module with
+ encode/decode functions. In addition, some generic functions
+ can be used during development of applications that handles ASN.1
+ data (encoded as <c>BER</c> or <c>PER</c>).</p>
+
<note>
- <p>By default in OTP 17, the representation of the BIT STRING
- and OCTET STRING types as Erlang terms have changed. BIT
- STRING values are now Erlang bitstrings and OCTET STRING values
- are binaries. Also, an undecoded open type will now be wrapped in
- a <c>asn1_OPENTYPE</c> tuple. For details see <seealso
- marker="asn1_ug#BIT STRING">BIT STRING</seealso>, <seealso
- marker="asn1_ug#OCTET STRING">OCTET STRING</seealso>, and
- <seealso marker="asn1_ug#Information%20Object">ASN.1 Information Objects</seealso> in User's Guide.</p>
- <p>To revert to the old representation of the types, use the
- <c>legacy_erlang_types</c> option.</p>
+ <p>By default in OTP 17, the representation of the <c>BIT STRING</c>
+ and <c>OCTET STRING</c> types as Erlang terms were changed. <c>BIT
+ STRING</c> values are now Erlang bit strings and <c>OCTET STRING</c>
+ values are binaries. Also, an undecoded open type is now wrapped in
+ an <c>asn1_OPENTYPE</c> tuple. For details, see <seealso
+ marker="asn1_getting_started#BIT STRING">BIT STRING</seealso>, <seealso
+ marker="asn1_getting_started#OCTET STRING">OCTET STRING</seealso>, and
+ <seealso marker="asn1_getting_started#Information Object">ASN.1 Information Objects</seealso> in the User's Guide.</p>
+ <p>To revert to the old representation of the types, use option
+ <c>legacy_erlang_types</c>.</p>
</note>
+
<note>
- <p>In R16, the options have been simplified. The back-end is chosen
+ <p>In OTP R16, the options were simplified. The back end is chosen
using one of the options <c>ber</c>, <c>per</c>, or <c>uper</c>.
- The options <c>optimize</c>, <c>nif</c>, and <c>driver</c> options
- are no longer necessary (and the ASN.1 compiler will print a
- warning if they are used). The options <c>ber_bin</c>, <c>per_bin</c>,
- and <c>uper_bin</c> options will still work, but will print a warning.
+ Options <c>optimize</c>, <c>nif</c>, and <c>driver</c> options
+ are no longer necessary (and the ASN.1 compiler generates a
+ warning if they are used). Options <c>ber_bin</c>, <c>per_bin</c>,
+ and <c>uper_bin</c> options still work, but generates a warning.
</p>
- <p>Another change in R16 is that the generated <c>encode/2</c>
- function always returns a binary.
- The <c>encode/2</c> function for the BER back-end used to return
- an iolist.</p>
+ <p>Another change in OTP R16 is that the generated function
+ <c>encode/2</c> always returns a binary. Function <c>encode/2</c>
+ for the <c>BER</c> back end used to return an iolist.</p>
</note>
</description>
+
<funcs>
<func>
<name>compile(Asn1module) -> ok | {error, Reason}</name>
<name>compile(Asn1module, Options) -> ok | {error, Reason}</name>
- <fsummary>Compile an ASN.1 module and generate encode/decode functions according to the encoding rules BER or PER.</fsummary>
+ <fsummary>Compiles an ASN.1 module and generates encode/decode functions according to encoding rules BER or PER.</fsummary>
<type>
<v>Asn1module = atom() | string()</v>
<v>Options = [Option| OldOption]</v>
@@ -85,79 +87,82 @@
<v>Prefix = string()</v>
</type>
<desc>
- <p>Compiles the ASN.1 module <c>Asn1module</c> and generates an
- Erlang module <c>Asn1module.erl</c> with encode and decode
+ <p>Compiles the <c>ASN.1</c> module <c>Asn1module</c> and generates
+ an Erlang module <c>Asn1module.erl</c> with encode and decode
functions for the types defined in <c>Asn1module</c>. For each
- ASN.1 value defined in the module an Erlang function which
+ ASN.1 value defined in the module, an Erlang function that
returns the value in Erlang representation is generated.</p>
- <p>If <c>Asn1module</c> is a filename without extension first
- <c>".asn1"</c> is assumed, then <c>".asn"</c> and finally
+ <p>If <c>Asn1module</c> is a filename without extension, first
+ <c>".asn1"</c> is assumed, then <c>".asn"</c>, and finally
<c>".py"</c> (to be compatible with the old ASN.1 compiler).
- Of course <c>Asn1module</c> can be a full pathname (relative or
+ <c>Asn1module</c> can be a full pathname (relative or
absolute) including filename with (or without) extension.
<marker id="asn1set"></marker>
</p>
- <p>If one wishes to compile a set of Asn1 modules into one
- Erlang file with encode/decode functions one has to list all
+ <p>If it is needed to compile a set of <c>ASN.1</c> modules into an
+ Erlang file with encode/decode functions, ensure to list all
involved files in a configuration file. This configuration
- file must have a double extension ".set.asn", (".asn" can
- alternatively be ".asn1" or ".py"). The input files' names
- must be listed, within quotation marks (""), one at each row
+ file must have a double extension <c>".set.asn"</c>
+ (<c>".asn"</c> can alternatively be <c>".asn1"</c> or <c>".py"</c>).
+ List the input file names
+ within quotation marks (""), one at each row
in the file. If the input files are <c>File1.asn</c>,
- <c>File2.asn</c> and <c>File3.asn</c> the configuration file
- shall look like:</p>
+ <c>File2.asn</c>, and <c>File3.asn</c>, the configuration file
+ must look as follows:</p>
<pre>
File1.asn
File2.asn
-File3.asn </pre>
- <p>The output files will in this case get their names from the
- configuration file. If the configuration file has the name
- <c>SetOfFiles.set.asn</c> the name of the output files will be
- <c>SetOfFiles.hrl, SetOfFiles.erl and SetOfFiles.asn1db</c>.</p>
- <p>Sometimes in a system of ASN.1 modules there are different
- default tag modes, e.g. AUTOMATIC, IMPLICIT or EXPLICIT. The
- multi file compilation resolves the default tagging as if
+File3.asn</pre>
+ <p>The output files in this case get their names from the
+ configuration file. If the configuration file is named
+ <c>SetOfFiles.set.asn</c>, the names of the output files are
+ <c>SetOfFiles.hrl, SetOfFiles.erl, and SetOfFiles.asn1db</c>.</p>
+ <p>Sometimes in a system of <c>ASN.1</c> modules, different
+ default tag modes, for example, <c>AUTOMATIC</c>, <c>IMPLICIT</c>,
+ or <c>EXPLICIT</c>. The
+ multi-file compilation resolves the default tagging as if
the modules were compiled separately.</p>
- <p>Another unwanted effect that may occur in multi file compilation
- is name collisions. The compiler solves this problem in two
- ways: If the definitions are identical then the output module
- keeps only one definition with the original name. But if
- definitions only have same name and differs in the definition,
- then they will be renamed. The new names will be the definition
- name and the original module name concatenated.</p>
- <p>If any name collision have occurred the compiler reports a
- "NOTICE: ..." message that tells if a definition was renamed,
+ <p>Name collisions is another unwanted effect that can occur in
+ multi file-compilation. The compiler solves this problem in one
+ of two ways:</p>
+ <list type="bulleted">
+ <item>If the definitions are identical, the output module
+ keeps only one definition with the original name.</item>
+ <item>If the definitions have the same name and differs in the
+ definition, they are renamed. The new names are the definition
+ name and the original module name concatenated.</item>
+ </list>
+ <p>If a name collision occurs, the compiler reports a
+ <c>"NOTICE: ..."</c> message that tells if a definition was renamed,
and the new name that must be used to encode/decode data.</p>
-
- <p>
- <c>Options</c> is a list with options specific for the asn1
+ <p><c>Options</c> is a list with options specific for the <c>ASN.1</c>
compiler and options that are applied to the Erlang compiler.
- The latter are those that not is recognized as asn1 specific.
- Available options are:
+ The latter are not recognized as <c>ASN.1</c> specific. The
+ available options are as follows:
</p>
<taglist>
<tag><c>ber | per | uper</c></tag>
<item>
<p>
The encoding rule to be used. The supported encoding rules
- are BER (Basic Encoding Rules),
- PER aligned (Packed Encoding Rules) and PER unaligned.
- If the encoding rule option is omitted <c>ber</c>
+ are Basic Encoding Rules (BER),
+ Packed Encoding Rules (PER) aligned, and PER unaligned.
+ If the encoding rule option is omitted, <c>ber</c>
is the default.
</p>
<p>
The generated Erlang module always gets the same name
- as the ASN.1 module and as a consequence of this only one
- encoding rule per ASN.1 module can be used at runtime.
+ as the <c>ASN.1</c> module. Therefore, only one
+ encoding rule per <c>ASN.1</c> module can be used at runtime.
</p>
</item>
<tag><c>der</c></tag>
<item>
<p>
- By this option the Distinguished Encoding Rules (DER) is chosen.
+ With this option the Distinguished Encoding Rules (DER) is chosen.
DER is regarded as a specialized variant of the BER encoding
- rule, therefore the <c>der</c> option only makes sense together
- with the <c>ber</c> option.
+ rule. Therefore, this option only makes sense together
+ with option <c>ber</c>.
This option
sometimes adds sorting and value checks when encoding, which
implies a slower encoding. The decoding routines are the same
@@ -167,118 +172,123 @@ File3.asn </pre>
<tag><c>compact_bit_string</c></tag>
<item>
<p>
- The BIT STRING type will be decoded to the "compact notation".
+ The <c>BIT STRING</c> type is decoded to "compact notation".
<em>This option is not recommended for new code.</em>
</p>
- <p>For details see
- <seealso marker="asn1_ug#BIT STRING">
- BIT STRING type section in the Users Guide
- </seealso>.
+ <p>For details, see Section
+ <seealso marker="asn1_getting_started#BIT STRING">
+ BIT STRING</seealso> in the User's Guide.
</p>
- <p>This option implies the <c>legacy_erlang_types</c> option.</p>
+ <p>This option implies option <c>legacy_erlang_types</c>.</p>
</item>
<tag><c>legacy_bit_string</c></tag>
<item>
<p>
- The BIT STRING type will be decoded to the legacy
- format, i.e. a list of zeroes and ones.
+ The <c>BIT STRING</c> type is decoded to the legacy
+ format, that is, a list of zeroes and ones.
<em>This option is not recommended for new code.</em>
</p>
- <p>For details see
- <seealso marker="asn1_ug#BIT STRING">
- BIT STRING type section in the Users Guide
- </seealso>.
- <p>This option implies the <c>legacy_erlang_types</c> option.</p>
- </p>
+ <p>For details, see Section
+ <seealso marker="asn1_getting_started#BIT STRING">BIT STRING</seealso>
+ in the User's Guide</p>
+ <p>This option implies option <c>legacy_erlang_types</c>.</p>
</item>
<tag><c>legacy_erlang_types</c></tag>
<item>
- <p>Use the same Erlang types to represent BIT STRING and
- OCTET STRING as in R16. For details see <seealso
- marker="asn1_ug#BIT STRING">BIT STRING</seealso> and
- <seealso marker="asn1_ug#OCTET STRING">OCTET
- STRING</seealso> in User's Guide.</p>
- <p><em>This option is not recommended for
- new code.</em></p>
+ <p>Use the same Erlang types to represent <c>BIT STRING</c> and
+ <c>OCTET STRING</c> as in OTP R16.</p>
+ <p>For details, see Section <seealso
+ marker="asn1_getting_started#BIT STRING">BIT STRING</seealso> and Section
+ <seealso marker="asn1_getting_started#OCTET STRING">OCTET
+ STRING</seealso> in the User's Guide.</p>
+ <p><em>This option is not recommended for new code.</em></p>
</item>
<tag><c>{n2n, EnumTypeName}</c></tag>
<item>
<p>
- Tells the compiler to generate functions for conversion between
- names (as atoms) and numbers and vice versa for the EnumTypeName specified. There can be multiple occurrences of this option in order to specify several type names. The type names must be declared as ENUMERATIONS in the ASN.1 spec.
- If the EnumTypeName does not exist in the ASN.1 spec the
- compilation will stop with an error code.
- The generated conversion functions are named
+ Tells the compiler to generate functions for conversion
+ between names (as atoms) and numbers and conversely for
+ the specified <c>EnumTypeName</c>. There can be multiple
+ occurrences of this option to specify several type names.
+ The type names must be declared as <c>ENUMERATIONS</c> in
+ the ASN.1 specification.</p>
+ <p>
+ If <c>EnumTypeName</c> does not exist in the ASN.1 specification,
+ the compilation stops with an error code.</p>
+ <p>
+ The generated conversion functions are named
<c>name2num_EnumTypeName/1</c> and
<c>num2name_EnumTypeName/1</c>.
</p>
</item>
<tag><c>noobj</c></tag>
<item>
- <p>Do not compile (i.e do not produce object code) the generated
- <c>.erl</c> file. If this option is omitted the generated Erlang module
- will be compiled.</p>
+ <p>Do not compile (that is, do not produce object code) the
+ generated <c>.erl</c> file. If this option is omitted, the
+ generated Erlang module is compiled.</p>
</item>
<tag><c>{i, IncludeDir}</c></tag>
<item>
<p>Adds <c>IncludeDir</c> to the search-path for
- <c>.asn1db</c> and asn1 source files. The compiler tries
- to open a <c>.asn1db</c> file when a module imports
- definitions from another ASN.1 module. If no
- <c>.asn1db</c> file is found the asn1 source file is
- parsed. Several <c>{i, IncludeDir}</c> can be given.
+ <c>.asn1db</c> and <c>ASN.1</c> source files. The compiler
+ tries to open an <c>.asn1db</c> file when a module imports
+ definitions from another <c>ASN.1</c> module. If no
+ <c>.asn1db</c> file is found, the <c>ASN.1</c> source file is
+ parsed. Several <c>{i, IncludeDir}</c> can be given.
</p>
</item>
<tag><c>{outdir, Dir}</c></tag>
<item>
- <p>Specifies the directory <c>Dir</c> where all generated files
- shall be placed. If omitted the files are placed in the
- current directory.</p>
+ <p>Specifies directory <c>Dir</c> where all generated files
+ are to be placed. If this option is omitted, the files are
+ placed in the current directory.</p>
</item>
<tag><c>asn1config</c></tag>
<item>
- <p>When one of the specialized decodes, exclusive or
- selective decode, is wanted one has to give instructions in
- a configuration file. The option <c>asn1config</c> enables
- specialized decodes and takes the configuration file, which
- has the same name as the ASN.1 spec but with extension
- <c>.asn1config</c>, in concern.
+ <p>When using one of the specialized decodes, exclusive or
+ selective decode, instructions must be given in
+ a configuration file. Option <c>asn1config</c> enables
+ specialized decodes and takes the configuration file in
+ concern. The configuration file has
+ the same name as the ASN.1 specification, but with extension
+ <c>.asn1config</c>.
</p>
- <p>The instructions for exclusive decode must follow the
- <seealso marker="asn1_spec#Exclusive Instruction">instruction and grammar in the User's Guide</seealso>.
+ <p>For instructions for exclusive decode, see Section
+ <seealso marker="asn1_spec#Exclusive Instruction">Exclusive
+ Decode</seealso> in the User's Guide.
</p>
- <p>You can also find the instructions for selective decode
- in the
- <seealso marker="asn1_spec#Selective Instruction">User's Guide</seealso>.
+ <p>For instructions for selective decode, see Section
+ <seealso marker="asn1_spec#Selective Instruction">Selective
+ Decode</seealso> in the User's Guide.
</p>
</item>
<tag><c>undec_rest</c></tag>
<item>
- <p>A buffer that holds a message, being decoded may
- also have some following bytes. Now it is possible to get
- those following bytes returned together with the decoded
- value. If an asn1 spec is compiled with this option a tuple
- <c>{ok, Value, Rest}</c> is returned. <c>Rest</c> may be a
+ <p>A buffer that holds a message, being decoded it can also
+ have some following bytes. Those following bytes can now
+ be returned together with the decoded value. If an
+ ASN.1 specification is compiled with this option, a tuple
+ <c>{ok, Value, Rest}</c> is returned. <c>Rest</c> can be a
list or a binary. Earlier versions of the compiler ignored
those following bytes.</p>
</item>
<tag><c>no_ok_wrapper</c></tag>
<item>
- <p>If this option is given, the generated <c>encode/2</c>
- and <c>decode/2</c> functions will not wrap a successful
+ <p>With this option, the generated <c>encode/2</c>
+ and <c>decode/2</c> functions do not wrap a successful
return value in an <c>{ok,...}</c> tuple. If any error
- occurs, there will be an exception.</p>
+ occurs, an exception will be raised.</p>
</item>
<tag><c>{macro_name_prefix, Prefix}</c></tag>
<item>
<p>All macro names generated by the compiler are prefixed with
- <c>Prefix</c>. This is useful when multiple protocols that contains
+ <c>Prefix</c>. This is useful when multiple protocols that contain
macros with identical names are included in a single module.</p>
</item>
<tag><c>{record_name_prefix, Prefix}</c></tag>
<item>
<p>All record names generated by the compiler are prefixed with
- <c>Prefix</c>. This is useful when multiple protocols that contains
+ <c>Prefix</c>. This is useful when multiple protocols that contain
records with identical names are included in a single module.</p>
</item>
<tag><c>verbose</c></tag>
@@ -291,27 +301,27 @@ File3.asn </pre>
<p>Causes warnings to be treated as errors.</p>
</item>
</taglist>
- <p>Any additional option that is applied will be passed to
- the final step when the generated .erl file is compiled.
+ <p>Any more option that is applied is passed to
+ the final step when the generated <c>.erl</c> file is compiled.
</p>
<p>The compiler generates the following files:</p>
<list type="bulleted">
- <item>
- <p><c>Asn1module.hrl</c> (if any SET or SEQUENCE is defined)</p>
+ <item><c>Asn1module.hrl</c> (if any <c>SET</c> or <c>SEQUENCE</c>
+ is defined)
</item>
- <item>
- <p><c>Asn1module.erl</c> the Erlang module with encode, decode and value functions.</p>
+ <item><c>Asn1module.erl</c> - Erlang module with encode, decode,
+ and value functions
</item>
- <item>
- <p><c>Asn1module.asn1db</c> intermediate format used by the compiler when modules IMPORTS
- definitions from each other.</p>
+ <item><c>Asn1module.asn1db</c> - Intermediate format used by the
+ compiler when modules <c>IMPORT</c> definitions from each other.
</item>
</list>
</desc>
</func>
+
<func>
<name>encode(Module, Type, Value)-> {ok, Bytes} | {error, Reason}</name>
- <fsummary>Encode an ASN.1 value.</fsummary>
+ <fsummary>Encodes an ASN.1 value.</fsummary>
<type>
<v>Module = Type = atom()</v>
<v>Value = term()</v>
@@ -319,11 +329,11 @@ File3.asn </pre>
<v>Reason = term()</v>
</type>
<desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the ASN.1 module
- <c>Module</c>. To get as fast execution as possible the
- encode function only performs rudimentary tests that the input
- <c>Value</c>
- is a correct instance of <c>Type</c>. The length of strings is for example
+ <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c> module
+ <c>Module</c>. To get as fast execution as possible, the
+ encode function performs only the rudimentary tests that input
+ <c>Value</c> is a correct instance of <c>Type</c>. So, for example,
+ the length of strings is
not always checked. Returns <c>{ok, Bytes}</c> if successful or
<c>{error, Reason}</c> if an error occurred.
</p>
@@ -331,6 +341,7 @@ File3.asn </pre>
Use <c>Module:encode(Type, Value)</c> instead.</p>
</desc>
</func>
+
<func>
<name>decode(Module, Type, Bytes) -> {ok, Value} | {error, Reason}</name>
<fsummary>Decode from Bytes into an ASN.1 value.</fsummary>
@@ -346,26 +357,37 @@ File3.asn </pre>
Use <c>Module:decode(Type, Bytes)</c> instead.</p>
</desc>
</func>
+
<func>
<name>value(Module, Type) -> {ok, Value} | {error, Reason}</name>
- <fsummary>Create an ASN.1 value for test purposes.</fsummary>
+ <fsummary>Creates an ASN.1 value for test purposes.</fsummary>
<type>
<v>Module = Type = atom()</v>
<v>Value = term()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Returns an Erlang term which is an example of a valid Erlang
- representation of a value of the ASN.1 type <c>Type</c>. The value
+ <p>Returns an Erlang term that is an example of a valid Erlang
+ representation of a value of the <c>ASN.1</c> type <c>Type</c>. The value
is a random value and subsequent calls to this function will for most
types return different values.</p>
+ <note>
+ <p>Currently, the <c>value</c> function has many limitations.
+ Essentially, it will mostly work for old specifications based
+ on the 1997 standard for ASN.1, but not for most modern-style
+ applications. Another limitation is that the <c>value</c> function
+ may not work if options that change code generations strategies
+ such as the options <c>macro_name_prefix</c> and
+ <c>record_name_prefix</c> have been used.</p>
+ </note>
</desc>
</func>
+
<func>
<name>test(Module) -> ok | {error, Reason}</name>
<name>test(Module, Type | Options) -> ok | {error, Reason}</name>
<name>test(Module, Type, Value | Options) -> ok | {error, Reason}</name>
- <fsummary>Perform a test of encode and decode for types in an ASN.1 module.</fsummary>
+ <fsummary>Performs a test of encode and decode for types in an ASN.1 module.</fsummary>
<type>
<v>Module = Type = atom()</v>
<v>Value = term()</v>
@@ -376,9 +398,17 @@ File3.asn </pre>
<p>Performs a test of encode and decode of types in <c>Module</c>.
The generated functions are called by this function.
This function is useful during test to secure that the generated
- encode and decode functions and the general runtime support work
- as expected.</p>
-
+ encode and decode functions as well as the general runtime support
+ work as expected.</p>
+ <note>
+ <p>Currently, the <c>test</c> functions have many limitations.
+ Essentially, they will mostly work for old specifications based
+ on the 1997 standard for ASN.1, but not for most modern-style
+ applications. Another limitation is that the <c>test</c> functions
+ may not work if options that change code generations strategies
+ such as the options <c>macro_name_prefix</c> and
+ <c>record_name_prefix</c> have been used.</p>
+ </note>
<list type="bulleted">
<item>
<p><c>test/1</c> iterates over all types in <c>Module</c>.</p>
@@ -390,14 +420,12 @@ File3.asn </pre>
<p><c>test/3</c> tests type <c>Type</c> with <c>Value</c>.</p>
</item>
</list>
-
- <p>Schematically the following happens for each type in the module:</p>
+ <p>Schematically, the following occurs for each type in the module:</p>
<code type="none">
{ok, Value} = asn1ct:value(Module, Type),
{ok, Bytes} = asn1ct:encode(Module, Type, Value),
{ok, Value} = asn1ct:decode(Module, Type, Bytes).</code>
-
- <p>The <c>test</c> functions utilizes the <c>*.asn1db</c> files
+ <p>The <c>test</c> functions use the <c>*.asn1db</c> files
for all included modules. If they are located in a different
directory than the current working directory, use the include
option to add paths. This is only needed when automatically
diff --git a/lib/asn1/doc/src/asn1rt.xml b/lib/asn1/doc/src/asn1rt.xml
index 3cf56b01ca..f5c334c2ac 100644
--- a/lib/asn1/doc/src/asn1rt.xml
+++ b/lib/asn1/doc/src/asn1rt.xml
@@ -46,7 +46,7 @@
<func>
<name>decode(Module,Type,Bytes) -> {ok,Value}|{error,Reason}</name>
- <fsummary>Decode from bytes into an ASN.1 value.</fsummary>
+ <fsummary>Decodes from Bytes into an ASN.1 value.</fsummary>
<type>
<v>Module = Type = atom()</v>
<v>Value = Reason = term()</v>
@@ -61,7 +61,7 @@
<func>
<name>encode(Module,Type,Value)-> {ok,Bytes} | {error,Reason}</name>
- <fsummary>Encode an ASN.1 value.</fsummary>
+ <fsummary>Encodes an ASN.1 value.</fsummary>
<type>
<v>Module = Type = atom()</v>
<v>Value = term()</v>
@@ -69,12 +69,12 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the ASN.1
+ <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c>
module <c>Module</c>. Returns a binary if successful. To get
- as fast execution as possible the encode function only
- performs rudimentary tests that the input <c>Value</c> is a
- correct instance of <c>Type</c>. The length of strings is, for
- example, not always checked. </p>
+ as fast execution as possible, the encode function performs
+ only the rudimentary test that input <c>Value</c> is a correct
+ instance of <c>Type</c>. For example, the length of strings is
+ not always checked.</p>
<p>Use <c>Module:encode(Type, Value)</c> instead of this function.</p>
</desc>
</func>
@@ -88,23 +88,23 @@
<v>Reason = term()</v>
</type>
<desc>
- <p><c>info/1</c> returns the version of the asn1 compiler that was
+ <p>Returns the version of the <c>ASN.1</c> compiler that was
used to compile the module. It also returns the compiler options
- that was used.</p>
+ that were used.</p>
<p>Use <c>Module:info()</c> instead of this function.</p>
</desc>
</func>
<func>
<name>utf8_binary_to_list(UTF8Binary) -> {ok,UnicodeList} | {error,Reason}</name>
- <fsummary>Transforms an utf8 encoded binary to a unicode list.</fsummary>
+ <fsummary>Transforms an UTF8 encoded binary to a unicode list.</fsummary>
<type>
<v>UTF8Binary = binary()</v>
<v>UnicodeList = [integer()]</v>
<v>Reason = term()</v>
</type>
<desc>
- <p><c>utf8_binary_to_list/1</c> Transforms a UTF8 encoded binary
+ <p>Transforms a UTF8 encoded binary
to a list of integers, where each integer represents one
character as its unicode value. The function fails if the binary
is not a properly encoded UTF8 string.</p>
@@ -114,14 +114,14 @@
<func>
<name>utf8_list_to_binary(UnicodeList) -> {ok,UTF8Binary} | {error,Reason}</name>
- <fsummary>Transforms an unicode list ot an utf8 binary.</fsummary>
+ <fsummary>Transforms an unicode list to a UTF8 binary.</fsummary>
<type>
<v>UnicodeList = [integer()]</v>
<v>UTF8Binary = binary()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p><c>utf8_list_to_binary/1</c> Transforms a list of integers,
+ <p>Transforms a list of integers,
where each integer represents one character as its unicode
value, to a UTF8 encoded binary.</p>
<p>Use <seealso marker="stdlib:unicode#characters_to_binary-1">unicode:characters_to_binary/1</seealso> instead of this function.</p>
diff --git a/lib/asn1/doc/src/notes.xml b/lib/asn1/doc/src/notes.xml
index cf87c01658..9feb673c04 100644
--- a/lib/asn1/doc/src/notes.xml
+++ b/lib/asn1/doc/src/notes.xml
@@ -31,6 +31,21 @@
<p>This document describes the changes made to the asn1 application.</p>
+<section><title>Asn1 3.0.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>The ASN.1 compiler would crash if a SEQUENCE ended
+ with a double set of ellipses (<c>...</c>).</p>
+ <p>
+ Own Id: OTP-12546 Aux Id: seq12815 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Asn1 3.0.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/asn1/doc/src/part.xml b/lib/asn1/doc/src/part.xml
index 735ec2e616..104aeb1f34 100644
--- a/lib/asn1/doc/src/part.xml
+++ b/lib/asn1/doc/src/part.xml
@@ -29,11 +29,14 @@
<file>part.sgml</file>
</header>
<description>
- <p>The <em>Asn1</em> application
- contains modules with compile-time and run-time support for ASN.1.
+ <p>The <c>ASN.1</c> application
+ contains modules with compile-time and runtime support for
+ Abstract Syntax Notation One (ASN.1).
</p>
</description>
- <xi:include href="asn1_ug.xml"/>
+ <xi:include href="asn1_introduction.xml"/>
+ <xi:include href="asn1_overview.xml"/>
+ <xi:include href="asn1_getting_started.xml"/>
<xi:include href="asn1_spec.xml"/>
</part>
diff --git a/lib/asn1/doc/src/ref_man.xml b/lib/asn1/doc/src/ref_man.xml
index 0a0ed5416a..e157f542f3 100644
--- a/lib/asn1/doc/src/ref_man.xml
+++ b/lib/asn1/doc/src/ref_man.xml
@@ -21,7 +21,7 @@
</legalnotice>
- <title>Asn1 Reference Manual</title>
+ <title>ASN.1 Reference Manual</title>
<prepared>OTP Team</prepared>
<docno></docno>
<date>1997-10-04</date>
@@ -29,8 +29,8 @@
<file>application.sgml</file>
</header>
<description>
- <p>The <em>Asn1</em> application
- contains modules with compile-time and run-time support for ASN.1.</p>
+ <p>The <c>ASN.1</c> application
+ contains modules with compile-time and runtime support for ASN.1.</p>
</description>
<xi:include href="asn1ct.xml"/>
<xi:include href="asn1rt.xml"/>
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile
index 6798da0072..40f440423d 100644
--- a/lib/asn1/src/Makefile
+++ b/lib/asn1/src/Makefile
@@ -206,6 +206,7 @@ $(EBIN)/asn1ct_constructed_per.beam: asn1ct_constructed_per.erl asn1_records.hrl
$(EBIN)/asn1ct_func.beam: asn1ct_func.erl
$(EBIN)/asn1ct_gen.beam: asn1ct_gen.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_ber_bin_v2.beam: asn1ct_gen_ber_bin_v2.erl asn1_records.hrl
+$(EBIN)/asn1ct_gen_check.beam: asn1_records.hrl
$(EBIN)/asn1ct_gen_per.beam: asn1ct_gen_per.erl asn1_records.hrl
$(EBIN)/asn1ct_gen_per_rt2ct.beam: asn1ct_gen_per_rt2ct.erl asn1_records.hrl
$(EBIN)/asn1ct_imm.beam: asn1ct_imm.erl
diff --git a/lib/asn1/src/asn1.app.src b/lib/asn1/src/asn1.app.src
index 02cbba0f10..1f8805ff5e 100644
--- a/lib/asn1/src/asn1.app.src
+++ b/lib/asn1/src/asn1.app.src
@@ -11,5 +11,5 @@
]},
{env, []},
{applications, [kernel, stdlib]},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-7.0"]}
]}.
diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl
index 48d9dd16d7..5577969727 100644
--- a/lib/asn1/src/asn1_db.erl
+++ b/lib/asn1/src/asn1_db.erl
@@ -19,7 +19,8 @@
%%
-module(asn1_db).
--export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/3,dbget/2]).
+-export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2,
+ dbput/3,dbget/2]).
-export([dbstop/0]).
-record(state, {parent, monitor, includes, table}).
@@ -44,6 +45,7 @@ dbload(Module) ->
dbnew(Module, Erule) -> req({new, Module, Erule}).
dbsave(OutFile, Module) -> cast({save, OutFile, Module}).
dbput(Module, K, V) -> cast({set, Module, K, V}).
+dbput(Module, Kvs) -> cast({set, Module, Kvs}).
dbget(Module, K) -> req({get, Module, K}).
dbstop() -> Resp = req(stop), erase(?MODULE), Resp.
@@ -82,6 +84,10 @@ loop(#state{parent = Parent, monitor = MRef, table = Table,
[{_, Modtab}] = ets:lookup(Table, Mod),
ets:insert(Modtab, {K2, V}),
loop(State);
+ {set, Mod, Kvs} ->
+ [{_, Modtab}] = ets:lookup(Table, Mod),
+ ets:insert(Modtab, Kvs),
+ loop(State);
{From, {get, Mod, K2}} ->
%% XXX If there is no information for Mod, get_table/3
%% will attempt to load information from an .asn1db
diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl
index 6c1cf1b12a..84435b2d21 100644
--- a/lib/asn1/src/asn1_records.hrl
+++ b/lib/asn1/src/asn1_records.hrl
@@ -81,9 +81,19 @@
module :: atom(),
val :: atom()}).
--record(state,{module,mname,type,tname,value,vname,erule,parameters=[],
- inputmodules,abscomppath=[],recordtopname=[],options,
- sourcedir}).
+-record(state,
+ {module,
+ mname,
+ tname,
+ erule,
+ parameters=[],
+ inputmodules=[],
+ abscomppath=[],
+ recordtopname=[],
+ options,
+ sourcedir,
+ error_context %Top-level thingie (contains line numbers)
+ }).
%% state record used by back-end at partial decode
%% active is set to 'yes' when a partial decode function is generated.
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index df341e5aab..a26d63c97d 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -34,7 +34,8 @@
%% Application internal exports
-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,
vsn/0,
- get_name_of_def/1,get_pos_of_def/1]).
+ get_name_of_def/1,get_pos_of_def/1,
+ unset_pos_mod/1]).
-export([read_config_data/1,get_gen_state_field/1,
partial_inc_dec_toptype/1,update_gen_state/2,
get_tobe_refed_func/1,reset_gen_state/0,is_function_generated/1,
@@ -166,46 +167,26 @@ set_scan_parse_pass(#st{files=Files}=St) ->
{error,St#st{error=Error}}
end.
-set_scan_parse_pass_1([F|Fs], St) ->
+set_scan_parse_pass_1([F|Fs], #st{file=File}=St) ->
case asn1ct_tok:file(F) of
{error,Error} ->
throw(Error);
Tokens when is_list(Tokens) ->
- case catch asn1ct_parser2:parse(Tokens) of
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
[M|set_scan_parse_pass_1(Fs, St)];
- {error,ErrorTerm} ->
- throw(handle_parse_error(ErrorTerm, St))
+ {error,Errors} ->
+ throw(Errors)
end
end;
set_scan_parse_pass_1([], _) -> [].
-parse_pass(#st{code=Tokens}=St) ->
- case catch asn1ct_parser2:parse(Tokens) of
+parse_pass(#st{file=File,code=Tokens}=St) ->
+ case asn1ct_parser2:parse(File, Tokens) of
{ok,M} ->
{ok,St#st{code=M}};
- {error,ErrorTerm} ->
- {error,St#st{error=handle_parse_error(ErrorTerm, St)}}
- end.
-
-handle_parse_error(ErrorTerm, #st{file=File,opts=Opts}) ->
- case ErrorTerm of
- {{Line,_Mod,Message},_TokTup} ->
- if
- is_integer(Line) ->
- BaseName = filename:basename(File),
- error("syntax error at line ~p in module ~s:~n",
- [Line,BaseName], Opts);
- true ->
- error("syntax error in module ~p:~n",
- [File], Opts)
- end,
- print_error_message(Message),
- Message;
- {Line,_Mod,[Message,Token]} ->
- error("syntax error: ~p ~p at line ~p~n",
- [Message,Token,Line], Opts),
- {Line,[Message,Token]}
+ {error,Errors} ->
+ {error,St#st{error=Errors}}
end.
merge_pass(#st{file=Base,code=Code}=St) ->
@@ -559,7 +540,10 @@ unset_pos_mod(Def) when is_record(Def,pvaluesetdef) ->
unset_pos_mod(Def) when is_record(Def,pobjectdef) ->
Def#pobjectdef{pos=undefined};
unset_pos_mod(Def) when is_record(Def,pobjectsetdef) ->
- Def#pobjectsetdef{pos=undefined}.
+ Def#pobjectsetdef{pos=undefined};
+unset_pos_mod(#'ComponentType'{} = Def) ->
+ Def#'ComponentType'{pos=undefined};
+unset_pos_mod(Def) -> Def.
get_pos_of_def(#typedef{pos=Pos}) ->
Pos;
@@ -1406,33 +1390,6 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes).
vsn() ->
?vsn.
-
-
-print_error_message([got,H|T]) when is_list(H) ->
- io:format(" got:"),
- print_listing(H,"and"),
- print_error_message(T);
-print_error_message([expected,H|T]) when is_list(H) ->
- io:format(" expected one of:"),
- print_listing(H,"or"),
- print_error_message(T);
-print_error_message([H|T]) ->
- io:format(" ~p",[H]),
- print_error_message(T);
-print_error_message([]) ->
- io:format("~n").
-
-print_listing([H1,H2|[]],AndOr) ->
- io:format(" ~p ~s ~p",[H1,AndOr,H2]);
-print_listing([H1,H2|T],AndOr) ->
- io:format(" ~p,",[H1]),
- print_listing([H2|T],AndOr);
-print_listing([H],_AndOr) ->
- io:format(" ~p",[H]);
-print_listing([],_) ->
- ok.
-
-
specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
case lists:member(asn1config,Options) of
true ->
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index 240f1cbb16..99392d6eaa 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -23,8 +23,6 @@
%% Main Module for ASN.1 compile time functions
%-compile(export_all).
-%% Avoid warning for local function error/1 clashing with autoimported BIF.
--compile({no_auto_import,[error/1]}).
-export([check/2,storeindb/2,format_error/1]).
%-define(debug,1).
-include("asn1_records.hrl").
@@ -60,17 +58,9 @@
-define(N_BMPString, 30).
-define(TAG_PRIMITIVE(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=0}).
-define(TAG_CONSTRUCTED(Num),
- case S#state.erule of
- ber ->
- #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32};
- _ -> []
- end).
+ #tag{class='UNIVERSAL',number=Num,type='IMPLICIT',form=32}).
-record(newt,{type=unchanged,tag=unchanged,constraint=unchanged,inlined=no}). % used in check_type to update type and tag
@@ -249,26 +239,18 @@ check_exports(S,Module = #module{}) ->
{exports,all} ->
[];
{exports,ExportList} when is_list(ExportList) ->
- IsNotDefined =
+ IsNotDefined =
fun(X) ->
- case catch get_referenced_type(S,X) of
- {error,{asn1,_}} ->
- true;
- _ -> false
+ try
+ _ = get_referenced_type(S,X),
+ false
+ catch {error,_} ->
+ true
end
end,
- case lists:filter(IsNotDefined,ExportList) of
- [] ->
- [];
- NoDefExp ->
- GetName =
- fun(T = #'Externaltypereference'{type=N})->
- %%{exported,undefined,entity,N}
- NewS=S#state{type=T,tname=N},
- error({export,"exported undefined entity",NewS})
- end,
- lists:map(GetName,NoDefExp)
- end
+ [return_asn1_error(S, Ext, {undefined_export, Undef}) ||
+ Ext = #'Externaltypereference'{type=Undef} <- ExportList,
+ IsNotDefined(Ext)]
end.
check_imports(S, #module{imports={imports,Imports}}) ->
@@ -276,53 +258,18 @@ check_imports(S, #module{imports={imports,Imports}}) ->
check_imports_1(_S, [], Acc) ->
Acc;
-check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc0) ->
+check_imports_1(S, [#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs], Acc) ->
Module = name_of_def(ModuleRef),
- Refs0 = [{catch get_referenced_type(S, Ref),Ref} || Ref <- Imports],
- Refs = [{M,R} || {{M,_},R} <- Refs0],
- {Illegal,Other} = lists:splitwith(fun({error,_}) -> true;
- (_) -> false
- end, Refs),
- ChainedRefs = [R || {M,R} <- Other, M =/= Module],
- IllegalRefs = [R || {error,R} <- Illegal] ++
- [R || {M,R} <- ChainedRefs,
- ok =/= chained_import(S, Module, M, name_of_def(R))],
- Acc = [return_asn1_error(S, Ref, {undefined_import,name_of_def(Ref),Module}) ||
- Ref <- IllegalRefs] ++ Acc0,
- check_imports_1(S, SFMs, Acc).
-
-chained_import(S,ImpMod,DefMod,Name) ->
- %% Name is a referenced structure that is not defined in ImpMod,
- %% but must be present in the Imports list of ImpMod. The chain of
- %% imports of Name must end in DefMod.
- GetImports =
- fun(_M_) ->
- case asn1_db:dbget(_M_,'MODULE') of
- #module{imports={imports,ImportList}} ->
- ImportList;
- _ -> []
- end
- end,
- FindNameInImports =
- fun([],N,_) -> {no_mod,N};
- ([#'SymbolsFromModule'{symbols=Imports,module=ModuleRef}|SFMs],N,F) ->
- case [name_of_def(X) || X <- Imports, name_of_def(X) =:= N] of
- [] -> F(SFMs,N,F);
- [N] -> {name_of_def(ModuleRef),N}
- end
- end,
- case GetImports(ImpMod) of
- [] ->
- error;
- Imps ->
- case FindNameInImports(Imps,Name,FindNameInImports) of
- {no_mod,_} ->
- error;
- {DefMod,_} -> ok;
- {OtherMod,_} ->
- chained_import(S,OtherMod,DefMod,Name)
- end
- end.
+ Refs = [{try get_referenced_type(S, Ref)
+ catch throw:Error -> Error end,
+ Ref}
+ || Ref <- Imports],
+ CreateError = fun(Ref) ->
+ Error = {undefined_import,name_of_def(Ref),Module},
+ return_asn1_error(S, Ref, Error)
+ end,
+ Errors = [CreateError(Ref) || {{error, _}, Ref} <- Refs],
+ check_imports_1(S, SFMs, Errors ++ Acc).
checkt(S0, Names) ->
Check = fun do_checkt/3,
@@ -335,7 +282,7 @@ checkt(S0, Names) ->
check_fold(S0, lists:reverse(CtxtSwitch), Check) ++ Types.
do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
- NewS = S#state{type=Type0,tname=Name},
+ NewS = S#state{tname=Name},
try check_type(NewS, Type0, TypeSpec) of
#type{}=Ts ->
case Type0#typedef.checked of
@@ -350,7 +297,7 @@ do_checkt(S, Name, #typedef{typespec=TypeSpec}=Type0) ->
end
catch
{error,Reason} ->
- error({type,Reason,NewS});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
pobjectsetdef ->
@@ -384,33 +331,32 @@ do_checkv(S, Name, Value)
is_record(Value, typedef); %Value set may be parsed as object set.
is_record(Value, pvaluedef);
is_record(Value, pvaluesetdef) ->
- NewS = S#state{value=Value},
- try check_value(NewS, Value) of
+ try check_value(S, Value) of
{valueset,VSet} ->
Pos = asn1ct:get_pos_of_def(Value),
CheckedVSDef = #typedef{checked=true,pos=Pos,
name=Name,typespec=VSet},
- asn1_db:dbput(NewS#state.mname, Name, CheckedVSDef),
+ asn1_db:dbput(S#state.mname, Name, CheckedVSDef),
{valueset,Name};
V ->
%% update the valuedef
- asn1_db:dbput(NewS#state.mname, Name, V),
+ asn1_db:dbput(S#state.mname, Name, V),
ok
catch
{error,Reason} ->
- error({value,Reason,NewS});
+ Reason;
{pobjectsetdef} ->
{pobjectsetdef,Name};
{objectsetdef} ->
{objectsetdef,Name};
- {objectdef} ->
+ {asn1_class, _} ->
%% this is an object, save as typedef
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = Value,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- asn1_db:dbput(NewS#state.mname, Name, NewDef),
+ asn1_db:dbput(S#state.mname, Name, NewDef),
{objectdef,Name}
end.
@@ -419,7 +365,7 @@ checkp(S, Names) ->
check_fold(S, Names, fun do_checkp/3).
do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
- S = S0#state{type=Type0,tname=Name},
+ S = S0#state{tname=Name},
try check_ptype(S, Type0, TypeSpec) of
#type{}=Ts ->
Type = Type0#ptypedef{checked=true,typespec=Ts},
@@ -427,7 +373,7 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
ok
catch
{error,Reason} ->
- error({type,Reason,S});
+ Reason;
{asn1_class,_ClassDef} ->
{asn1_class,Name};
{asn1_param_class,_} ->
@@ -438,100 +384,81 @@ do_checkp(S0, Name, #ptypedef{typespec=TypeSpec}=Type0) ->
checkc(S, Names) ->
check_fold(S, Names, fun do_checkc/3).
-do_checkc(S0, Name, Class0) ->
- {Class1,ClassSpec} =
- case Class0 of
- #classdef{} ->
- {Class0,Class0};
- #typedef{} ->
- {#classdef{name=Name},Class0#typedef.typespec}
- end,
- S = S0#state{type=Class0,tname=Name},
- try check_class(S, ClassSpec) of
- C ->
- Class = Class1#classdef{checked=true,typespec=C},
- asn1_db:dbput(S#state.mname, Name, Class),
- ok
- catch
- {error,Reason} ->
- error({class,Reason,S})
- end.
+do_checkc(S, Name, Class) ->
+ try
+ case is_classname(Name) of
+ false ->
+ asn1_error(S, {illegal_class_name,Name});
+ true ->
+ do_checkc_1(S, Name, Class)
+ end
+ catch {error,Reason} -> Reason
+ end.
+
+do_checkc_1(S, Name, #classdef{}=Class) ->
+ C = check_class(S, Class),
+ store_class(S, true, Class#classdef{typespec=C}, Name),
+ ok;
+do_checkc_1(S, Name, #typedef{typespec=#type{def=Def}=TS}) ->
+ C = check_class(S, TS),
+ {Mod,Pos} = case Def of
+ #'Externaltypereference'{module=M, pos=P} ->
+ {M,P};
+ {pt, #'Externaltypereference'{module=M, pos=P}, _} ->
+ {M,P}
+ end,
+ Class = #classdef{name=Name, typespec=C, pos=Pos, module=Mod},
+ store_class(S, true, Class, Name),
+ ok.
+
+%% is_classname(Atom) -> true|false.
+is_classname(Name) when is_atom(Name) ->
+ lists:all(fun($-) -> true;
+ (D) when $0 =< D, D =< $9 -> true;
+ (UC) when $A =< UC, UC =< $Z -> true;
+ (_) -> false
+ end, atom_to_list(Name)).
-checko(S,[Name|Os],Acc,ExclO,ExclOS) ->
- ?dbg("Checking object ~p~n",[Name]),
- Result =
- case asn1_db:dbget(S#state.mname,Name) of
- undefined ->
- error({type,{internal_error,'???'},S});
- Object when is_record(Object,typedef) ->
- NewS = S#state{type=Object,tname=Name},
- case catch(check_object(NewS,Object,Object#typedef.typespec)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- O ->
- NewObj = Object#typedef{checked=true,typespec=O},
- asn1_db:dbput(NewS#state.mname,Name,NewObj),
- if
- is_record(O,'Object') ->
- case O#'Object'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,[Name|ExclO],ExclOS}
- end;
- is_record(O,'ObjectSet') ->
- case O#'ObjectSet'.gen of
- true ->
- {ok,ExclO,ExclOS};
- false ->
- {ok,ExclO,[Name|ExclOS]}
- end
- end
- end;
- PObject when is_record(PObject,pobjectdef) ->
- NewS = S#state{type=PObject,tname=Name},
- case (catch check_pobject(NewS,PObject)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- PO ->
- NewPObj = PObject#pobjectdef{def=PO},
- asn1_db:dbput(NewS#state.mname,Name,NewPObj),
- {ok,[Name|ExclO],ExclOS}
- end;
- PObjSet when is_record(PObjSet,pvaluesetdef) ->
- %% this is a parameterized object set. Might be a parameterized
- %% value set, couldn't it?
- NewS = S#state{type=PObjSet,tname=Name},
- case (catch check_pobjectset(NewS,PObjSet)) of
- {error,Reason} ->
- error({type,Reason,NewS});
- {'EXIT',Reason} ->
- error({type,{internal_error,Reason},NewS});
- {asn1,Reason} ->
- error({type,Reason,NewS});
- POS ->
- %%NewPObjSet = PObjSet#pvaluesetdef{valueset=POS},
- asn1_db:dbput(NewS#state.mname,Name,POS),
- {ok,ExclO,[Name|ExclOS]}
- end
- end,
- case Result of
- {ok,NewExclO,NewExclOS} ->
- checko(S,Os,Acc,NewExclO,NewExclOS);
- _ ->
- checko(S,Os,[Result|Acc],ExclO,ExclOS)
+checko(S0,[Name|Os],Acc,ExclO,ExclOS) ->
+ Item = asn1_db:dbget(S0#state.mname, Name),
+ S = S0#state{error_context=Item},
+ try checko_1(S, Item, Name, ExclO, ExclOS) of
+ {NewExclO,NewExclOS} ->
+ checko(S, Os, Acc, NewExclO, NewExclOS)
+ catch
+ throw:{error, Error} ->
+ checko(S, Os, [Error|Acc], ExclO, ExclOS)
end;
checko(_S,[],Acc,ExclO,ExclOS) ->
{lists:reverse(Acc),lists:reverse(ExclO),lists:reverse(ExclOS)}.
+checko_1(S, #typedef{typespec=TS}=Object, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ O = check_object(NewS, Object, TS),
+ NewObj = Object#typedef{checked=true,typespec=O},
+ asn1_db:dbput(NewS#state.mname, Name, NewObj),
+ case O of
+ #'Object'{gen=true} ->
+ {ExclO,ExclOS};
+ #'Object'{gen=false} ->
+ {[Name|ExclO],ExclOS};
+ #'ObjectSet'{gen=true} ->
+ {ExclO,ExclOS};
+ #'ObjectSet'{gen=false} ->
+ {ExclO,[Name|ExclOS]}
+ end;
+checko_1(S, #pobjectdef{}=PObject, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ PO = check_pobject(NewS, PObject),
+ NewPObj = PObject#pobjectdef{def=PO},
+ asn1_db:dbput(NewS#state.mname, Name, NewPObj),
+ {[Name|ExclO],ExclOS};
+checko_1(S, #pvaluesetdef{}=PObjSet, Name, ExclO, ExclOS) ->
+ NewS = S#state{tname=Name},
+ POS = check_pobjectset(NewS, PObjSet),
+ asn1_db:dbput(NewS#state.mname, Name, POS),
+ {ExclO,[Name|ExclOS]}.
+
check_class(S,CDef=#classdef{checked=Ch,name=Name,typespec=TS}) ->
case Ch of
true -> TS;
@@ -551,22 +478,16 @@ check_class(S = #state{mname=M,tname=T},ClassSpec)
Tref = #'Externaltypereference'{type=TName} ->
{MName,RefType} = get_referenced_type(S,Tref),
#classdef{} = CD = get_class_def(S, RefType),
- NewState = update_state(S#state{type=RefType,
- tname=TName}, MName),
+ NewState = update_state(S#state{tname=TName}, MName),
check_class(NewState, CD);
{pt,ClassRef,Params} ->
%% parameterized class
{_,PClassDef} = get_referenced_type(S,ClassRef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
+ NewParaList = match_parameters(S, Params),
instantiate_pclass(S,PClassDef,NewParaList)
end;
-check_class(S,C) when is_record(C,objectclass) ->
- NewFieldSpec = check_class_fields(S,C#objectclass.fields),
- C#objectclass{fields=NewFieldSpec};
-check_class(_S,{poc,_ObjSet,_Params}) ->
- 'fix this later';
+check_class(S, #objectclass{}=C) ->
+ check_objectclass(S, C);
check_class(S,ClassName) ->
{RefMod,Def} = get_referenced_type(S,ClassName),
case Def of
@@ -579,8 +500,7 @@ check_class(S,ClassName) ->
false ->
Name=ClassName#'Externaltypereference'.type,
store_class(S,idle,ClassDef,Name),
-% NewS = S#state{mname=RefMod,type=Def,tname=Name},
- NewS = update_state(S#state{type=Def,tname=Name},RefMod),
+ NewS = update_state(S#state{tname=Name}, RefMod),
CheckedTS = check_class(NewS,ClassDef#classdef.typespec),
store_class(S,true,ClassDef#classdef{typespec=CheckedTS},Name),
CheckedTS
@@ -594,11 +514,20 @@ check_class(S,ClassName) ->
end
end.
+check_objectclass(S, #objectclass{fields=Fs0,syntax=Syntax0}=C) ->
+ Fs = check_class_fields(S, Fs0),
+ case Syntax0 of
+ {'WITH SYNTAX',Syntax1} ->
+ Syntax = preprocess_syntax(S, Syntax1, Fs),
+ C#objectclass{fields=Fs,syntax={preprocessed_syntax,Syntax}};
+ _ ->
+ C#objectclass{fields=Fs}
+ end.
+
instantiate_pclass(S=#state{parameters=_OldArgs},PClassDef,Params) ->
#ptypedef{args=Args,typespec=Type} = PClassDef,
MatchedArgs = match_args(S,Args, Params, []),
-% NewS = S#state{type=Type,parameters=MatchedArgs++OldArgs,abscomppath=[]},
- NewS = S#state{type=Type,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs,abscomppath=[]},
check_class(NewS,#classdef{name=S#state.tname,typespec=Type}).
store_class(S,Mode,ClassDef,ClassName) ->
@@ -613,6 +542,12 @@ check_class_fields(S,[F|Fields],Acc) ->
case element(1,F) of
fixedtypevaluefield ->
{_,Name,Type,Unique,OSpec} = F,
+ case {Unique,OSpec} of
+ {'UNIQUE',{'DEFAULT',_}} ->
+ asn1_error(S, {unique_and_default,Name});
+ {_,_} ->
+ ok
+ end,
RefType = check_type(S,#typedef{typespec=Type},Type),
{fixedtypevaluefield,Name,RefType,Unique,OSpec};
object_or_fixedtypevalue_field ->
@@ -621,7 +556,7 @@ check_class_fields(S,[F|Fields],Acc) ->
Cat =
case asn1ct_gen:type(asn1ct_gen:get_inner(Type2#type.def)) of
Def when is_record(Def,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,Def),
+ {_,D} = get_referenced_type(S, Def, true),
D;
{undefined,user} ->
%% neither of {primitive,bif} or {constructed,bif}
@@ -644,18 +579,14 @@ check_class_fields(S,[F|Fields],Acc) ->
objectset_or_fixedtypevalueset_field ->
{_,Name,Type,OSpec} = F,
RefType =
- case (catch check_type(S,#typedef{typespec=Type},Type)) of
- {asn1_class,_ClassDef} ->
+ try check_type(S,#typedef{typespec=Type},Type) of
+ #type{} = CheckedType ->
+ CheckedType
+ catch {asn1_class,_ClassDef} ->
case if_current_checked_type(S,Type) of
- true ->
- Type#type.def;
- _ ->
- check_class(S,Type)
- end;
- CheckedType when is_record(CheckedType,type) ->
- CheckedType;
- _ ->
- error({class,"internal error, check_class_fields",S})
+ true -> Type#type.def;
+ _ -> check_class(S,Type)
+ end
end,
if
is_record(RefType,'Externaltypereference') ->
@@ -733,38 +664,34 @@ check_pobjectset(S,PObjSet) ->
PObjSet
end.
+-record(osi, %Object set information.
+ {st,
+ classref,
+ uniq,
+ ext
+ }).
+
check_object(_S,ObjDef,ObjSpec) when (ObjDef#typedef.checked == true) ->
ObjSpec;
check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
?dbg("check_object ~p~n",[ObjectDef]),
-%% io:format("check_object,object: ~p~n",[ObjectDef]),
-% {MName,_ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- ClassDef =
- case get_referenced_type(S,ClassRef) of
- {MName,ClDef=#classdef{checked=false}} ->
- NewState = update_state(S#state{type=ClDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass=
- check_class(NewState,ClDef),
- #classdef{checked=true,
- typespec=ObjClass};
- {_,_ClDef} when is_record(_ClDef,classdef) ->
- _ClDef;
- {MName,_TDef=#typedef{checked=false,pos=Pos,
- name=_TName,typespec=TS}} ->
- ClDef = #classdef{pos=Pos,name=_TName,typespec=TS},
- NewState = update_state(S#state{type=_TDef,
- tname=ClassRef#'Externaltypereference'.type},MName),
- ObjClass =
- check_class(NewState,ClDef),
- ClDef#classdef{checked=true,typespec=ObjClass};
- {_,_ClDef} ->
- _ClDef
+ _ = check_externaltypereference(S,ClassRef),
+ {ClassDef, NewClassRef} =
+ case get_referenced_type(S, ClassRef, true) of
+ {MName,#classdef{checked=false, name=CLName}=ClDef} ->
+ Type = ClassRef#'Externaltypereference'.type,
+ NewState = update_state(S#state{tname=Type}, MName),
+ ObjClass = check_class(NewState, ClDef),
+ {ClDef#classdef{checked=true, typespec=ObjClass},
+ #'Externaltypereference'{module=MName, type=CLName}};
+ {MName,#classdef{name=CLName}=ClDef} ->
+ {ClDef, #'Externaltypereference'{module=MName, type=CLName}};
+ _ ->
+ asn1_error(S, illegal_object)
end,
NewObj =
case ObjectDef of
- Def when is_tuple(Def), (element(1,Def)==object) ->
+ {object,_,_}=Def ->
NewSettingList = check_objectdefn(S,Def,ClassDef),
#'Object'{def=NewSettingList};
{po,{object,DefObj},ArgsList} ->
@@ -778,425 +705,287 @@ check_object(S,_ObjDef,#'Object'{classname=ClassRef,def=ObjectDef}) ->
instantiate_po(S,ClassDef,Object,ArgList);
#'Externalvaluereference'{} ->
{_,Object} = get_referenced_type(S,ObjectDef),
- check_object(S,Object,Object#typedef.typespec);
+ check_object(S, Object, object_to_check(S, Object));
[] ->
- %% An object with no fields. All class fields must be
- %% optional or default. Check that all fields in
- %% class are 'OPTIONAL' or 'DEFAULT'
- class_fields_optional_check(S,ClassDef),
- #'Object'{def={object,defaultsyntax,[]}};
- _ ->
- exit({error,{no_object,ObjectDef},S})
+ %% An object with no fields (parsed as a value).
+ Def = {object,defaultsyntax,[]},
+ NewSettingList = check_objectdefn(S, Def, ClassDef),
+ #'Object'{def=NewSettingList};
+ _ ->
+ asn1_error(S, illegal_object)
end,
- Gen = gen_incl(S,NewObj#'Object'.def,
- (ClassDef#classdef.typespec)#objectclass.fields),
+ Fields = (ClassDef#classdef.typespec)#objectclass.fields,
+ Gen = gen_incl(S,NewObj#'Object'.def, Fields),
NewObj#'Object'{classname=NewClassRef,gen=Gen};
-
-
-check_object(S,
- _ObjSetDef,
- ObjSet=#'ObjectSet'{class=ClassRef}) ->
-%% io:format("check_object,SET: ~p~n",[ObjSet#'ObjectSet'.set]),
- ?dbg("check_object set: ~p~n",[ObjSet#'ObjectSet'.set]),
- {_,ClassDef} = get_referenced_type(S,ClassRef),
- NewClassRef = check_externaltypereference(S,ClassRef),
- {UniqueFieldName,UniqueInfo} =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} ->
- {{unique,undefined},{unique,undefined}};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
+check_object(S, _, #'ObjectSet'{class=ClassRef0,set=Set0}=ObjSet0) ->
+ {_,ClassDef} = get_referenced_type(S, ClassRef0),
+ ClassRef = check_externaltypereference(S, ClassRef0),
+ {UniqueFieldName,UniqueInfo} =
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique -> {{unique,undefined},{unique,undefined}};
Other -> {element(1,Other),Other}
end,
- NewObjSet=
- case prepare_objset(ObjSet#'ObjectSet'.set) of
- {set,SET,EXT} ->
- CheckedSet = check_object_list(S,NewClassRef,SET),
- NewSet = get_unique_valuelist(S,CheckedSet,UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=extensionmark(NewSet,EXT)};
-
- {'SingleValue',ERef = #'Externalvaluereference'{}} ->
- {RefedMod,ObjDef} = get_referenced_type(S,ERef),
- #'Object'{def=CheckedObj} =
- check_object(S,ObjDef,ObjDef#typedef.typespec),
-
- NewSet = get_unique_valuelist(S,[{{RefedMod,get_datastr_name(ObjDef)},
- CheckedObj}],
- UniqueInfo),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- ['EXTENSIONMARK'] ->
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=['EXTENSIONMARK']};
-
- OSref when is_record(OSref,'Externaltypereference') ->
- {_,OS=#typedef{typespec=OSdef}} = get_referenced_type(S,OSref),
- check_object(S,OS,OSdef);
-
- {Type,{'EXCEPT',Exclusion}} when is_record(Type,type) ->
- {_,TDef} = get_referenced_type(S,Type#type.def),
- OS = TDef#typedef.typespec,
- NewSet = reduce_objectset(OS#'ObjectSet'.set,Exclusion),
- NewOS = OS#'ObjectSet'{set=NewSet},
- check_object(S,TDef#typedef{typespec=NewOS},
- NewOS);
- #type{def={pt,DefinedObjSet,ParamList}} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParamList],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
-
- %% actually this is an ObjectSetFromObjects construct, it
- %% is when the object set is retrieved from an object
- %% field.
- #type{def=#'ObjectClassFieldType'{classname=ObjName,
- fieldname=FieldName}} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,_,ObjName},FieldName} ->
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- OS=TDef#typedef.typespec,
- %% should get the right object set here. Get the field
- %% FieldName out of the object set OS of class
- %% OS#'ObjectSet'.class
- OS2=check_object(S,TDef,OS),
- NewSet=object_set_from_objects(S,RefedObjMod,FieldName,OS2),
- ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet};
- {'ObjectSetFromObjects',{_,ObjName},FieldName} ->
- %% This is a ObjectSetFromObjects, i.e.
- %% ObjectSetFromObjects ::= ReferencedObjects "." FieldName
- %% with a defined object as ReferencedObjects. And
- %% the FieldName of the Class (object) contains an object set.
- {RefedObjMod,TDef} = get_referenced_type(S,ObjName),
- O1 = TDef#typedef.typespec,
- O2 = check_object(S,TDef,O1),
- NewSet = object_set_from_objects(S,RefedObjMod,FieldName,O2),
- OS2=ObjSet#'ObjectSet'{uniquefname=UniqueFieldName,
- set=NewSet},
- %%io:format("ObjectSet: ~p~n",[OS2]),
- OS2;
- {pos,{objectset,_,DefinedObjSet},Params} ->
- {_,PObjSetDef} = get_referenced_type(S,DefinedObjSet),
- NewParamList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- Params],
- instantiate_pos(S,ClassRef,PObjSetDef,NewParamList);
- Unknown ->
- exit({error,{unknown_object_set,Unknown},S})
- end,
- NewSet2 = remove_duplicate_objects(NewObjSet#'ObjectSet'.set),
- NewObjSet2 = NewObjSet#'ObjectSet'{set=NewSet2},
- Gen = gen_incl_set(S,NewObjSet2#'ObjectSet'.set,
- ClassDef),
- ?dbg("check_object done~n",[]),
- NewObjSet2#'ObjectSet'{class=NewClassRef,gen=Gen}.
+ OSI0 = #osi{st=S,classref=ClassRef,uniq=UniqueInfo,ext=false},
+ {Set1,OSI1} = if
+ is_list(Set0) ->
+ check_object_set_list(Set0, OSI0);
+ true ->
+ check_object_set(Set0, OSI0)
+ end,
+ Ext = case Set1 of
+ [] ->
+ %% FIXME: X420 does not compile unless we force
+ %% empty sets to be extensible. There should be
+ %% a better way.
+ true;
+ [_|_] ->
+ OSI1#osi.ext
+ end,
+ Set2 = remove_duplicate_objects(S, Set1),
+ Set = case Ext of
+ false -> Set2;
+ true -> Set2 ++ ['EXTENSIONMARK']
+ end,
+ ObjSet = ObjSet0#'ObjectSet'{uniquefname=UniqueFieldName,set=Set},
+ Gen = gen_incl_set(S, Set, ClassDef),
+ ObjSet#'ObjectSet'{class=ClassRef,gen=Gen}.
+
+check_object_set({element_set,Root0,Ext0}, OSI0) ->
+ OSI = case Ext0 of
+ none -> OSI0;
+ _ -> OSI0#osi{ext=true}
+ end,
+ case {Root0,Ext0} of
+ {empty,empty} -> {[],OSI};
+ {empty,Ext} -> check_object_set(Ext, OSI);
+ {Root,none} -> check_object_set(Root, OSI);
+ {Root,empty} -> check_object_set(Root, OSI);
+ {Root,Ext} -> check_object_set_list([Root,Ext], OSI)
+ end;
+check_object_set(#'Externaltypereference'{}=Ref, #osi{st=S}=OSI) ->
+ {_,#typedef{typespec=OSdef}=OS} = get_referenced_type(S, Ref),
+ ObjectSet = check_object(S, OS, OSdef),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set(#'Externalvaluereference'{}=Ref, #osi{st=S}=OSI) ->
+ {RefedMod,ObjName,#'Object'{def=Def}} = check_referenced_object(S, Ref),
+ ObjList = check_object_set_mk(RefedMod, ObjName, Def, OSI),
+ {ObjList,OSI};
+check_object_set({'EXCEPT',Incl0,Excl0}, OSI) ->
+ {Incl1,_} = check_object_set(Incl0, OSI),
+ {Excl1,_} = check_object_set(Excl0, OSI),
+ Exclude = sofs:set([N || {N,_} <- Excl1], [name]),
+ Incl2 = [{Name,Obj} || {Name,_,_}=Obj <- Incl1],
+ Incl3 = sofs:relation(Incl2, [{name,object}]),
+ Incl4 = sofs:drestriction(Incl3, Exclude),
+ Incl5 = sofs:to_external(Incl4),
+ Incl = [Obj || {_,Obj} <- Incl5],
+ {Incl,OSI};
+check_object_set({object,_,_}=Obj0, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ #'Object'{def=Def} =
+ check_object(S, #typedef{typespec=Obj0},
+ #'Object'{classname=ClassRef,def=Obj0}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set(#'ObjectClassFieldType'{classname=ObjName,
+ fieldname=FieldNames},
+ #osi{st=S}=OSI) ->
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({'ObjectSetFromObjects',Obj,FieldNames}, #osi{st=S}=OSI) ->
+ ObjName = element(tuple_size(Obj), Obj),
+ Set = check_ObjectSetFromObjects(S, ObjName, FieldNames),
+ check_object_set_objset_list(Set, OSI);
+check_object_set({pt,DefinedObjSet,ParamList0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ ParamList = match_parameters(S, ParamList0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, ParamList),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pos,{objectset,_,DefinedObjSet},Params0}, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ {_,PObjSetDef} = get_referenced_type(S, DefinedObjSet),
+ Params = match_parameters(S, Params0),
+ ObjectSet = instantiate_pos(S, ClassRef, PObjSetDef, Params),
+ check_object_set_objset(ObjectSet, OSI);
+check_object_set({pv,{simpledefinedvalue,DefinedObject},Params}=PV, OSI) ->
+ #osi{st=S,classref=ClassRef} = OSI,
+ Args = match_parameters(S, Params),
+ #'Object'{def=Def} =
+ check_object(S, PV,
+ #'Object'{classname=ClassRef ,
+ def={po,{object,DefinedObject},Args}}),
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+check_object_set({'SingleValue',Val}, OSI) ->
+ check_object_set(Val, OSI);
+check_object_set({'ValueFromObject',{object,Object},FieldNames}, OSI) ->
+ #osi{st=S} = OSI,
+ case extract_field(S, Object, FieldNames) of
+ #'Object'{def=Def} ->
+ ObjList = check_object_set_mk(Def, OSI),
+ {ObjList,OSI};
+ _ ->
+ asn1_error(S, illegal_object)
+ end;
+check_object_set(#type{def=Def}, OSI) ->
+ check_object_set(Def, OSI);
+check_object_set({union,A0,B0}, OSI0) ->
+ {A,OSI1} = check_object_set(A0, OSI0),
+ {B,OSI} = check_object_set(B0, OSI1),
+ {A++B,OSI}.
+
+check_object_set_list([H|T], OSI0) ->
+ {Set0,OSI1} = check_object_set(H, OSI0),
+ {Set1,OSI2} = check_object_set_list(T, OSI1),
+ {Set0++Set1,OSI2};
+check_object_set_list([], OSI) ->
+ {[],OSI}.
+
+check_object_set_objset(#'ObjectSet'{set=Set}, OSI) ->
+ check_object_set_objset_list(Set, OSI).
+
+check_object_set_objset_list(Set, OSI) ->
+ check_object_set_objset_list_1(Set, OSI, []).
+
+check_object_set_objset_list_1(['EXTENSIONMARK'|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI#osi{ext=true}, Acc);
+check_object_set_objset_list_1([H|T], OSI, Acc) ->
+ check_object_set_objset_list_1(T, OSI, [H|Acc]);
+check_object_set_objset_list_1([], OSI, Acc) ->
+ {Acc,OSI}.
+
+check_object_set_mk(Fields, OSI) ->
+ check_object_set_mk(no_mod, no_name, Fields, OSI).
+
+check_object_set_mk(M, N, Def, #osi{uniq={unique,undefined}}) ->
+ {_,_,Fields} = Def,
+ [{{M,N},no_unique_value,Fields}];
+check_object_set_mk(M, N, Def, #osi{uniq={UniqField,_}}) ->
+ {_,_,Fields} = Def,
+ case lists:keyfind(UniqField, 1, Fields) of
+ {UniqField,#valuedef{value=Val}} ->
+ [{{M,N},Val,Fields}];
+ false ->
+ case Fields of
+ [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
+ %% FIXME: If object is missing the unique field and
+ %% only contains a reference to an empty object set,
+ %% we will remove the entire object as a workaround
+ %% to get X420 to compile. There should be a better
+ %% way.
+ [];
+ _ ->
+ [{{M,N},no_unique_value,Fields}]
+ end
+ end.
%% remove_duplicate_objects/1 remove duplicates of objects.
%% For instance may Set contain objects of same class from
%% different object sets that in fact might be duplicates.
-remove_duplicate_objects(Set) when is_list(Set) ->
- Pred = fun({A,B,_},{A,C,_}) when B =< C -> true;
- ({A,_,_},{B,_,_}) when A < B -> true;
- ('EXTENSIONMARK','EXTENSIONMARK') -> true;
- (T,A) when is_tuple(T),is_atom(A) -> true;% EXTENSIONMARK last in list
- (_,_) -> false
- end,
- lists:usort(Pred,Set).
+remove_duplicate_objects(S, Set0) when is_list(Set0) ->
+ Set1 = [{Id,Orig} || {_,Id,_}=Orig <- Set0],
+ Set2 = sofs:relation(Set1),
+ Set3 = sofs:relation_to_family(Set2),
+ Set = sofs:to_external(Set3),
+ remove_duplicate_objects_1(S, Set).
+
+remove_duplicate_objects_1(S, [{no_unique_value,Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{_,[_]=Objs}|T]) ->
+ Objs ++ remove_duplicate_objects_1(S, T);
+remove_duplicate_objects_1(S, [{Id,[_|_]=Objs}|T]) ->
+ MakeSortable = fun(What) -> sortable_type(S, What) end,
+ Tagged = order_tag_set(Objs, MakeSortable),
+ case lists:ukeysort(1, Tagged) of
+ [{_,Obj}] ->
+ [Obj|remove_duplicate_objects_1(S, T)];
+ [_|_] ->
+ asn1_error(S, {non_unique_object,Id})
+ end;
+remove_duplicate_objects_1(_, []) ->
+ [].
-%%
-extensionmark(L,true) ->
- case lists:member('EXTENSIONMARK',L) of
- true -> L;
- _ -> L ++ ['EXTENSIONMARK']
+order_tag_set([{_, _, Fields}=Orig|Fs], Fun) ->
+ Pair = {[{FId, traverse(F, Fun)} || {FId, F} <- Fields], Orig},
+ [Pair|order_tag_set(Fs, Fun)];
+order_tag_set([], _) -> [].
+
+sortable_type(S, #'Externaltypereference'{}=ERef) ->
+ try get_referenced_type(S, ERef) of
+ {_,#typedef{}=OI} ->
+ OI#typedef{pos=undefined,name=undefined}
+ catch
+ _:_ ->
+ ERef
end;
-extensionmark(L,_) ->
- L.
+sortable_type(_, #typedef{}=TD) ->
+ asn1ct:unset_pos_mod(TD#typedef{name=undefined});
+sortable_type(_, Type) ->
+ asn1ct:unset_pos_mod(Type).
+
+traverse(Structure0, Fun) ->
+ Structure = Fun(Structure0),
+ traverse_1(Structure, Fun).
+
+traverse_1(#typedef{typespec=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#typedef{typespec=TS};
+traverse_1(#valuedef{type=TS0} = VD, Fun) ->
+ TS = traverse(TS0, Fun),
+ VD#valuedef{type=TS};
+traverse_1(#type{def=TS0} = TD, Fun) ->
+ TS = traverse(TS0, Fun),
+ TD#type{def=TS};
+traverse_1(#'SEQUENCE'{components=Cs0} = Seq, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Seq#'SEQUENCE'{components=Cs};
+traverse_1({'SEQUENCE OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SEQUENCE OF',Type};
+traverse_1({'SET OF',Type0}, Fun) ->
+ Type = traverse(Type0, Fun),
+ {'SET OF',Type};
+traverse_1(#'SET'{components=Cs0} = Set, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ Set#'SET'{components=Cs};
+traverse_1({'CHOICE', Cs0}, Fun) ->
+ Cs = traverse_seq_set(Cs0, Fun),
+ {'CHOICE', Cs};
+traverse_1(Leaf, _) ->
+ Leaf.
+
+traverse_seq_set(List, Fun) when is_list(List) ->
+ traverse_seq_set_1(List, Fun);
+traverse_seq_set({Set, Ext}, Fun) ->
+ {traverse_seq_set_1(Set, Fun), traverse_seq_set_1(Ext, Fun)};
+traverse_seq_set({Set1, Set2, Set3}, Fun) ->
+ {traverse_seq_set_1(Set1, Fun),
+ traverse_seq_set_1(Set2, Fun),
+ traverse_seq_set_1(Set3, Fun)}.
+
+traverse_seq_set_1([#'ComponentType'{} = CT0|Cs], Fun) ->
+ CT = #'ComponentType'{typespec=TS0} = Fun(CT0),
+ TS = traverse(TS0, Fun),
+ [CT#'ComponentType'{typespec=TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([{'COMPONENTS OF', _} = CO0|Cs], Fun) ->
+ {'COMPONENTS OF', TS0} = Fun(CO0),
+ TS = traverse(TS0, Fun),
+ [{'COMPONENTS OF', TS}|traverse_seq_set_1(Cs, Fun)];
+traverse_seq_set_1([], _) ->
+ [].
-object_to_check(#typedef{typespec=ObjDef}) ->
+object_to_check(_, #typedef{typespec=ObjDef}) ->
ObjDef;
-object_to_check(#valuedef{type=ClassName,value=ObjectRef}) ->
+object_to_check(S, #valuedef{type=Class,value=ObjectRef}) ->
%% If the object definition is parsed as an object the ClassName
- %% is parsed as a type
- #'Object'{classname=ClassName#type.def,def=ObjectRef}.
-
-prepare_objset({'SingleValue',Set}) when is_list(Set) ->
- {set,Set,false};
-prepare_objset(L=['EXTENSIONMARK']) ->
- L;
-prepare_objset(Set) when is_list(Set) ->
- {set,Set,false};
-prepare_objset({{'SingleValue',Set},Ext}) ->
- {set,merge_sets(Set,Ext),true};
-%%prepare_objset({Set,Ext}) when is_list(Set),is_list(Ext) ->
-%% {set,lists:append([Set,Ext]),true};
-prepare_objset({Set,Ext}) when is_list(Set) ->
- {set,merge_sets(Set,Ext),true};
-prepare_objset({{object,definedsyntax,_ObjFields}=Set,Ext}) ->
- {set,merge_sets(Set, Ext),true};
-prepare_objset(ObjDef={object,definedsyntax,_ObjFields}) ->
- {set,[ObjDef],false};
-prepare_objset({ObjDef=#type{},Ext}) when is_list(Ext) ->
- {set,[ObjDef|Ext],true};
-prepare_objset({#type{}=Type,#type{}=Ext}) ->
- {set,[Type,Ext],true};
-prepare_objset(Ret) ->
- Ret.
-
-class_fields_optional_check(S,#classdef{typespec=ClassSpec}) ->
- Fields = ClassSpec#objectclass.fields,
- class_fields_optional_check1(S,Fields).
-
-class_fields_optional_check1(_S,[]) ->
- ok;
-class_fields_optional_check1(S,[{typefield,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluefield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{fixedtypevaluesetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectfield,_,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest);
-class_fields_optional_check1(S,[{objectsetfield,_,_,'OPTIONAL'}|Rest]) ->
- class_fields_optional_check1(S,Rest).
-
-%% ObjectSetFromObjects functionality
-
-%% The fieldname is a list of field names.They may be objects or
-%% object sets. If ObjectSet is an object set the resulting object set
-%% is the union of object sets if the last field name is an object
-%% set. If the last field is an object the resulting object set is
-%% the set of objects in ObjectSet.
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet) ->
- object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,[]).
-object_set_from_objects(S,RefedObjMod,FieldName,ObjectSet,InterSect)
- when is_record(ObjectSet,'ObjectSet') ->
- #'ObjectSet'{class=Cl,set=Set} = ObjectSet,
- {_,ClassDef} = get_referenced_type(S,Cl),
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Set,InterSect,[]);
-object_set_from_objects(S,RefedObjMod,FieldName,Object,InterSect)
- when is_record(Object,'Object') ->
- #'Object'{classname=Cl,def=Def}=Object,
- object_set_from_objects(S,RefedObjMod,Cl,FieldName,[Def],InterSect,[]).
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,['EXTENSIONMARK'|Os],
- InterSect,Acc) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,%%Acc);
- ['EXTENSIONMARK'|Acc]);
-object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,[O|Os],InterSect,Acc) ->
- case object_set_from_objects2(S,mod_of_obj(RefedObjMod,element(1,O)),
- ClassDef,FieldName,element(3,O),InterSect) of
- ObjS when is_list(ObjS) ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,ObjS++Acc);
- Obj ->
- object_set_from_objects(S,RefedObjMod,ClassDef,FieldName,Os,InterSect,[Obj|Acc])
- end;
-object_set_from_objects(_S,_RefedObjMod,_ClassDef,_FieldName,[],InterSect,Acc) ->
- %% For instance may Acc contain objects of same class from
- %% different object sets that in fact might be duplicates.
- remove_duplicate_objects(osfo_intersection(InterSect,Acc)).
-%% Acc.
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{valuefieldreference,OName}],
- Fields,_InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef);
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,ClassDef,[{typefieldreference,OSName}],
- Fields,_InterSect) ->
- %% this is an object set
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} ->
- case TDef#typedef.typespec of
- #'ObjectSet'{class=_NextClName,set=NextSet} ->%% = TDef#typedef.typespec,
- NextSet;
- #'Object'{def=_ObjDef} ->
- mk_object_set_from_object(S,RefedObjMod,TDef,ClassDef)
-%% ObjDef
- %% error({error,{internal,unexpected_object,TDef}})
- end;
- _ ->
- [] % it may be an absent optional field
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{valuefieldreference,OName}|Rest],
- Fields,InterSect) ->
- %% this is an object
- case lists:keysearch(OName,1,Fields) of
- {value,{_,TDef}} ->
- #'Object'{classname=NextClName,def=ODef}=TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects2(S,RefedObjMod,NextClass,Rest,NextFields,InterSect);
- _ ->
- []
- end;
-object_set_from_objects2(S,RefedObjMod,_ClassDef,[{typefieldreference,OSName}|Rest],
- Fields,InterSect) ->
- %% this is an object set
- Next = {NextClName,NextSet} =
- case lists:keysearch(OSName,1,Fields) of
- {value,{_,TDef}} when is_record(TDef,'ObjectSet') ->
- #'ObjectSet'{class=NextClN,set=NextS} = TDef,
- {NextClN,NextS};
- {value,{_,#typedef{typespec=OS}}} ->
- %% objectsets in defined syntax will come here as typedef{}
- %% #'ObjectSet'{class=NextClN,set=NextS} = OS,
- case OS of
- #'ObjectSet'{class=NextClN,set=NextS} ->
- {NextClN,NextS};
- #'Object'{classname=NextClN,def=NextDef} ->
- {NextClN,[NextDef]}
- end;
+ %% is parsed as a type.
+ case Class of
+ #type{def=#'Externaltypereference'{}=Def} ->
+ #'Object'{classname=Def,def=ObjectRef};
_ ->
- {[],[]}
- end,
- case Next of
- {[],[]} ->
- [];
- _ ->
- {_,NextClass} = get_referenced_type(S,NextClName),
- object_set_from_objects(S,RefedObjMod,NextClass,Rest,NextSet,InterSect,[])
- end.
-
-mk_object_set_from_object(S,RefedObjMod,TDef,Class) ->
- #'Object'{classname=_NextClName,def=ODef} = TDef#typedef.typespec,
- {_,_,NextFields}=ODef,
-
- UniqueFieldName =
- case (catch get_unique_fieldname(S,Class)) of
- {error,'__undefined_',_} -> {unique,undefined};
- {asn1,Msg,_} -> error({class,Msg,S});
- {'EXIT',Msg} -> error({class,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- VDef = get_unique_value(S,NextFields,UniqueFieldName),
- %% XXXXXXXXXXX
- case VDef of
- [] ->
- ['EXTENSIONMARK'];
- _ ->
- {{RefedObjMod,get_datastr_name(TDef)},VDef,NextFields}
+ asn1_error(S, illegal_object)
end.
-
-
-mod_of_obj(_RefedObjMod,{NewMod,ObjName})
- when is_atom(NewMod),is_atom(ObjName) ->
- NewMod;
-mod_of_obj(RefedObjMod,_) ->
- RefedObjMod.
-
-
-merge_sets(Root,{'SingleValue',Ext}) ->
- merge_sets(Root,Ext);
-merge_sets(Root,Ext) when is_list(Root),is_list(Ext) ->
- Root ++ Ext;
-merge_sets(Root,Ext) when is_list(Ext) ->
- [Root|Ext];
-merge_sets(Root,Ext) when is_list(Root) ->
- Root++[Ext];
-merge_sets(Root,Ext) ->
- [Root]++[Ext].
-
-reduce_objectset(ObjectSet,Exclusion) ->
- case Exclusion of
- {'SingleValue',#'Externalvaluereference'{value=Name}} ->
- case lists:keysearch(Name,1,ObjectSet) of
- {value,El} ->
- lists:subtract(ObjectSet,[El]);
- _ ->
- ObjectSet
- end
- end.
-
-%% Checks a list of objects or object sets and returns a list of selected
-%% information for the code generation.
-check_object_list(S,ClassRef,ObjectList) ->
- check_object_list(S,ClassRef,ObjectList,[]).
-
-check_object_list(S,ClassRef,[ObjOrSet|Objs],Acc) ->
- ?dbg("check_object_list: ~p~n",[ObjOrSet]),
- case ObjOrSet of
- ObjDef when is_tuple(ObjDef),(element(1,ObjDef)==object) ->
- Def =
- check_object(S,#typedef{typespec=ObjDef},
-% #'Object'{classname={objectclassname,ClassRef},
- #'Object'{classname=ClassRef,
- def=ObjDef}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def#'Object'.def}|Acc]);
- {'SingleValue',Ref = #'Externalvaluereference'{}} ->
- ?dbg("{SingleValue,Externalvaluereference}~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,Ref),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- ObjRef when is_record(ObjRef,'Externalvaluereference') ->
- ?dbg("Externalvaluereference~n",[]),
- {RefedMod,ObjName,
- #'Object'{def=Def}} = check_referenced_object(S,ObjRef),
- check_object_list(S,ClassRef,Objs,[{{RefedMod,ObjName},Def}|Acc]);
- {'ValueFromObject',{_,Object},FieldName} ->
- {_,Def} = get_referenced_type(S,Object),
- TypeDef = get_fieldname_element(S,Def,FieldName),
- (TypeDef#typedef.typespec)#'ObjectSet'.set;
- ObjSet when is_record(ObjSet,type) ->
- ObjSetDef =
- case ObjSet#type.def of
- Ref when is_record(Ref,'Externaltypereference') ->
- {_,D} = get_referenced_type(S,ObjSet#type.def),
- D;
- Other ->
- throw({asn1_error,{'unknown objecset',Other,S}})
- end,
- #'ObjectSet'{set=ObjectsInSet} =
- check_object(S,ObjSetDef,ObjSetDef#typedef.typespec),
- AccList = transform_set_to_object_list(ObjectsInSet,[]),
- check_object_list(S,ClassRef,Objs,AccList++Acc);
- union ->
- check_object_list(S,ClassRef,Objs,Acc);
- {pos,{objectset,_,DefinedObjectSet},Params} ->
- OSDef = #type{def={pt,DefinedObjectSet,Params}},
- #'ObjectSet'{set=Set} =
- check_object(S,ObjOrSet,#'ObjectSet'{class=ClassRef,
- set=OSDef}),
- check_object_list(S,ClassRef,Objs,Set ++ Acc);
- {pv,{simpledefinedvalue,DefinedObject},Params} ->
- Args = [match_parameters(S,Param,S#state.parameters)||
- Param<-Params],
- #'Object'{def=Def} =
- check_object(S,ObjOrSet,
- #'Object'{classname=ClassRef ,
- def={po,{object,DefinedObject},
- Args}}),
- check_object_list(S,ClassRef,Objs,[{{no_mod,no_name},Def}|Acc]);
- {'ObjectSetFromObjects',Os,FieldName} when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,[]),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- {{'ObjectSetFromObjects',Os,FieldName},InterSection}
- when is_tuple(Os) ->
- NewSet =
- check_ObjectSetFromObjects(S, element(tuple_size(Os), Os),
- FieldName,InterSection),
- check_object_list(S,ClassRef,Objs,NewSet++Acc);
- Other ->
- exit({error,{'unknown object',Other},S})
- end;
-%% Finally reverse the accumulated list and if there are any extension
-%% marks in the object set put one indicator of that in the end of the
-%% list.
-check_object_list(_,_,[],Acc) ->
- lists:reverse(Acc).
check_referenced_object(S,ObjRef)
when is_record(ObjRef,'Externalvaluereference')->
@@ -1213,195 +1002,134 @@ check_referenced_object(S,ObjRef)
check_object(update_state(S,RefedMod),ObjectDef,ObjectDef#typedef.typespec)}
end.
-check_ObjectSetFromObjects(S,ObjName,FieldName,InterSection) ->
- {RefedMod,TDef} = get_referenced_type(S,ObjName),
- ObjOrSet = check_object(update_state(S,RefedMod),TDef,TDef#typedef.typespec),
- InterSec = prepare_intersection(S,InterSection),
- _NewSet = object_set_from_objects(S,RefedMod,FieldName,ObjOrSet,InterSec).
+check_ObjectSetFromObjects(S, ObjName, Fields) ->
+ {_,Obj0} = get_referenced_type(S, ObjName),
+ case check_object(S, Obj0, Obj0#typedef.typespec) of
+ #'ObjectSet'{}=Obj1 ->
+ get_fieldname_set(S, Obj1, Fields);
+ #'Object'{classname=Class,
+ def={object,_,ObjFs}} ->
+ ObjSet = #'ObjectSet'{class=Class,
+ set=[{'_','_',ObjFs}]},
+ get_fieldname_set(S, ObjSet, Fields)
+ end.
-prepare_intersection(_S,[]) ->
- [];
-prepare_intersection(S,{'EXCEPT',ObjRef}) ->
- except_names(S,ObjRef);
-prepare_intersection(_S,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-except_names(_S,{'SingleValue',#'Externalvaluereference'{value=ObjName}}) ->
- [{except,ObjName}];
-except_names(_,T) ->
- exit({error,{internal_error,not_implemented,object_set_from_objects,T}}).
-
-osfo_intersection(InterSect,ObjList) ->
- Res = [X|| X = {{_,N},_,_} <- ObjList,
- lists:member({except,N},InterSect) == false],
- case lists:member('EXTENSIONMARK',ObjList) of
- true ->
- Res ++ ['EXTENSIONMARK'];
+%% get_type_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% Type
+get_type_from_object(S, Object, FieldNames)
+ when is_record(Object, 'Externaltypereference');
+ is_record(Object, 'Externalvaluereference') ->
+ extract_field(S, Object, FieldNames).
+
+%% get_value_from_object(State, ObjectOrObjectSet, [{RefType,FieldName}]) ->
+%% UntaggedValue
+get_value_from_object(S, Def, FieldNames) ->
+ case extract_field(S, Def, FieldNames) of
+ #valuedef{value=Val} ->
+ Val;
+ {valueset,_}=Val ->
+ Val;
_ ->
- Res
+ asn1_error(S, illegal_value)
end.
-%% get_fieldname_element/3
-%% gets the type/value/object/... of the referenced element in FieldName
-%% FieldName is a list and may have more than one element.
-%% Each element in FieldName can be either {typefieldreference,AnyFieldName}
-%% or {valuefieldreference,AnyFieldName}
-%% Def is the def of the first object referenced by FieldName
-get_fieldname_element(S,Def,[{_RefType,FieldName}]) when is_record(Def,typedef) ->
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps));
-get_fieldname_element(S,Def,[{_RefType,FieldName}|Rest])
- when is_record(Def,typedef) ->
- %% As FieldName is followd by other FieldNames it has to be an
- %% object or objectset.
- {_,_,ObjComps} = (Def#typedef.typespec)#'Object'.def,
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,ObjComps)),
- ObjDef = fun(#'Object'{def=D}) -> D;
- (#'ObjectSet'{set=Set}) -> Set
- end
- (NewDef),
- case ObjDef of
+%% extract_field(State, ObjectOrObjectSet, [{RefType,FieldName}])
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the
+%% referenced object or object set. The list of field name tuples
+%% may have more than one element. All field names but the last
+%% refers to either an object or object set.
+
+extract_field(S, Def0, FieldNames) ->
+ {_,Def1} = get_referenced_type(S, Def0),
+ Def2 = check_object(S, Def1, Def1#typedef.typespec),
+ Def = Def1#typedef{typespec=Def2},
+ get_fieldname_element(S, Def, FieldNames).
+
+%% get_fieldname_element(State, Element, [{RefType,FieldName}]
+%% RefType = typefieldreference | valuefieldreference
+%%
+%% Get the type, value, object, object set, or value set from the referenced
+%% element. The list of field name tuples may have more than one element.
+%% All field names but the last refers to either an object or object set.
+
+get_fieldname_element(S, Object0, [{_RefType,FieldName}|Fields]) ->
+ Object = case Object0 of
+ #typedef{typespec=#'Object'{def=Obj}} -> Obj;
+ {_,_,_}=Obj -> Obj
+ end,
+ case check_fieldname_element(S, FieldName, Object) of
+ #'Object'{def=D} when Fields =/= [] ->
+ get_fieldname_element(S, D, Fields);
+ #'ObjectSet'{}=Set ->
+ get_fieldname_set(S, Set, Fields);
+ Result when Fields =:= [] ->
+ Result
+ end;
+get_fieldname_element(_S, Def, []) ->
+ Def.
+
+get_fieldname_set(S, #'ObjectSet'{set=Set0}, T) ->
+ get_fieldname_set_1(S, Set0, T, []).
+
+get_fieldname_set_1(S, ['EXTENSIONMARK'=Ext|T], Fields, Acc) ->
+ get_fieldname_set_1(S, T, Fields, [Ext|Acc]);
+get_fieldname_set_1(S, [H|T], Fields, Acc) ->
+ try get_fieldname_element(S, H, Fields) of
L when is_list(L) ->
- [get_fieldname_element(S,X,Rest) || X <- L];
- _ ->
- get_fieldname_element(S,ObjDef,Rest)
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ {valueset,L} ->
+ get_fieldname_set_1(S, T, Fields, L++Acc);
+ Other ->
+ get_fieldname_set_1(S, T, Fields, [Other|Acc])
+ catch
+ throw:{error,_} ->
+ get_fieldname_set_1(S, T, Fields, Acc)
end;
-get_fieldname_element(S,{object,_,Fields},[{_RefType,FieldName}|Rest]) ->
- NewDef = check_fieldname_element(S,lists:keysearch(FieldName,1,Fields)),
- get_fieldname_element(S,NewDef,Rest);
-get_fieldname_element(_S,Def,[]) ->
- Def;
-get_fieldname_element(_S,Def,[{_RefType,_FieldName}|_RestFName])
- when is_record(Def,typedef) ->
- ok.
+get_fieldname_set_1(_, [], _Fields, Acc) ->
+ case Acc of
+ [#valuedef{}|_] ->
+ {valueset,Acc};
+ _ ->
+ Acc
+ end.
-check_fieldname_element(S,{value,{_,Def}}) ->
- check_fieldname_element(S,Def);
-check_fieldname_element(S, #typedef{typespec=Ts}=TDef) ->
+check_fieldname_element(S, Name, {_,_,Fields}) ->
+ case lists:keyfind(Name, 1, Fields) of
+ {Name,Def} ->
+ check_fieldname_element_1(S, Def);
+ false ->
+ asn1_error(S, {undefined_field,Name})
+ end.
+
+check_fieldname_element_1(S, #typedef{typespec=Ts}=TDef) ->
case Ts of
#'Object'{} ->
check_object(S, TDef, Ts);
_ ->
check_type(S, TDef, Ts)
end;
-check_fieldname_element(S, #valuedef{}=VDef) ->
+check_fieldname_element_1(S, #valuedef{}=VDef) ->
try
check_value(S, VDef)
catch
- throw:{objectdef} ->
+ throw:{asn1_class, _} ->
#valuedef{checked=C,pos=Pos,name=N,type=Type,
value=Def} = VDef,
ClassName = Type#type.def,
NewSpec = #'Object'{classname=ClassName,def=Def},
NewDef = #typedef{checked=C,pos=Pos,name=N,typespec=NewSpec},
- check_fieldname_element(S, NewDef)
+ check_fieldname_element_1(S, NewDef)
end;
-check_fieldname_element(S,Eref)
- when is_record(Eref,'Externaltypereference');
- is_record(Eref,'Externalvaluereference') ->
- {_,TDef}=get_referenced_type(S,Eref),
- check_fieldname_element(S,TDef);
-check_fieldname_element(S,Other) ->
- throw({error,{assigned_object_error,"not_assigned_object",Other,S}}).
+check_fieldname_element_1(_S, {value_tag,Val}) ->
+ #valuedef{value=Val};
+check_fieldname_element_1(S, Eref)
+ when is_record(Eref, 'Externaltypereference');
+ is_record(Eref, 'Externalvaluereference') ->
+ {_,TDef} = get_referenced_type(S, Eref),
+ check_fieldname_element_1(S, TDef).
-transform_set_to_object_list([{Name,_UVal,Fields}|Objs],Acc) ->
- transform_set_to_object_list(Objs,[{Name,{object,generatesyntax,Fields}}|Acc]);
-transform_set_to_object_list(['EXTENSIONMARK'|Objs],Acc) ->
-%% transform_set_to_object_list(Objs,['EXTENSIONMARK'|Acc]);
- transform_set_to_object_list(Objs,Acc);
-transform_set_to_object_list([],Acc) ->
- Acc.
-
-get_unique_valuelist(_S,ObjSet,{unique,undefined}) -> % no unique field in object
- lists:map(fun({N,{_,_,F}})->{N,no_unique_value,F};
- (V={_,_,_}) ->V;
- ({A,B}) -> {A,no_unique_value,B}
- end, ObjSet);
-get_unique_valuelist(S,ObjSet,{UFN,Opt}) ->
- get_unique_vlist(S,ObjSet,UFN,Opt,[]).
-
-
-get_unique_vlist(_S,[],_,_,[]) ->
- ['EXTENSIONMARK'];
-get_unique_vlist(S,[],_,Opt,Acc) ->
- case catch check_uniqueness(remove_duplicate_objects(Acc)) of
- {asn1_error,_} when Opt =/= 'OPTIONAL' ->
- error({'ObjectSet',"not unique objects in object set",S});
- {asn1_error,_} ->
- lists:reverse(Acc);
- _ ->
- lists:reverse(Acc)
- end;
-get_unique_vlist(S,['EXTENSIONMARK'|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,Acc);
-get_unique_vlist(S,[{ObjName,Obj}|Rest],UniqueFieldName,Opt,Acc) ->
- {_,_,Fields} = Obj,
- NewObjInf =
- case get_unique_value(S,Fields,UniqueFieldName) of
- #valuedef{value=V} -> [{ObjName,V,Fields}];
- [] -> []; % maybe the object only was a reference to an
- % empty object set.
- no_unique_value -> [{ObjName,no_unique_value,Fields}]
- end,
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,NewObjInf++Acc);
-
-get_unique_vlist(S,[V={_,_,_}|Rest],UniqueFieldName,Opt,Acc) ->
- get_unique_vlist(S,Rest,UniqueFieldName,Opt,[V|Acc]).
-
-get_unique_value(S,Fields,UniqueFieldName) ->
- Module = S#state.mname,
- case lists:keysearch(UniqueFieldName,1,Fields) of
- {value,Field} ->
- case element(2,Field) of
- VDef when is_record(VDef,valuedef) ->
- VDef;
- {'ValueFromObject',Object,Name} ->
- case Object of
- {object,Ext} when is_record(Ext,'Externaltypereference') ->
- OtherModule = Ext#'Externaltypereference'.module,
- ExtObjName = Ext#'Externaltypereference'.type,
- ObjDef = asn1_db:dbget(OtherModule,ExtObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(OtherModule,element(3,ObjSpec),Name);
- {object,{_,_,ObjName}} ->
- ObjDef = asn1_db:dbget(Module,ObjName),
- ObjSpec = ObjDef#typedef.typespec,
- get_unique_value(Module,element(3,ObjSpec),Name);
- {po,Object,_Params} ->
- exit({error,{'parameterized object not implemented yet',
- Object},S})
- end;
- Value when is_atom(Value);is_number(Value) ->
- #valuedef{value=Value,module=Module};
- {'CHOICE',{C,Value}} when is_atom(C) ->
- %% #valuedef{value=normalize_value(S,element(3,Field),VDef,[])}
- case Value of
- Scalar when is_atom(Scalar);is_number(Scalar) ->
- #valuedef{value=Value,module=Module};
- Eref = #'Externalvaluereference'{} ->
- element(2,get_referenced_type(S,Eref))
- end
- end;
- false ->
- case Fields of
- [{_,#typedef{typespec=#'ObjectSet'{set=['EXTENSIONMARK']}}}] ->
- [];
- _ ->
- no_unique_value
- end
- end.
-
-check_uniqueness(NameValueList) ->
- check_uniqueness1(lists:keysort(2,NameValueList)).
-
-check_uniqueness1([]) ->
- true;
-check_uniqueness1([_]) ->
- true;
-check_uniqueness1([{_,N,_},{_,N,_}|_Rest]) ->
- throw({asn1_error,{'objects in set must have unique values in UNIQUE fields',N}});
-check_uniqueness1([_|Rest]) ->
- check_uniqueness1(Rest).
-
%% instantiate_po/4
%% ClassDef is the class of Object,
%% Object is the Parameterized object, which is referenced,
@@ -1410,8 +1138,7 @@ check_uniqueness1([_|Rest]) ->
instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_record(Object,pobjectdef) ->
FormalParams = get_pt_args(Object),
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=Object,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=Object,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,Object,#'Object'{classname=Object#pobjectdef.class,
def=Object#pobjectdef.def}).
@@ -1421,20 +1148,14 @@ instantiate_po(S=#state{parameters=_OldArgs},_ClassDef,Object,ArgsList) when is_
%% on the right side of the assignment,
%% ArgsList is the list of actual parameters, i.e. real objects
instantiate_pos(S=#state{parameters=_OldArgs},ClassRef,ObjectSetDef,ArgsList) ->
-% ClassName = ClassDef#classdef.name,
FormalParams = get_pt_args(ObjectSetDef),
OSet = case get_pt_spec(ObjectSetDef) of
- {valueset,Set} ->
-% #'ObjectSet'{class=name2Extref(S#state.mname,
-% ClassName),set=Set};
- #'ObjectSet'{class=ClassRef,set=Set};
- Set when is_record(Set,'ObjectSet') -> Set;
- _ ->
- error({type,"parameterized object set failure",S})
+ {valueset,Set} -> #'ObjectSet'{class=ClassRef,set=Set};
+ Set when is_record(Set,'ObjectSet') -> Set;
+ _ -> asn1_error(S, invalid_objectset)
end,
MatchedArgs = match_args(S,FormalParams,ArgsList,[]),
-% NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs++OldArgs},
- NewS = S#state{type=ObjectSetDef,parameters=MatchedArgs},
+ NewS = S#state{parameters=MatchedArgs},
check_object(NewS,ObjectSetDef,OSet).
@@ -1468,7 +1189,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
check_object(S,TDef,TDef#typedef.typespec);
ERef ->
{_,T} = get_referenced_type(S,ERef),
- check_object(S,T,object_to_check(T))
+ check_object(S, T, object_to_check(S, T))
end,
case gen_incl(S,ObjDef#'Object'.def,
ClassFields) of
@@ -1485,7 +1206,7 @@ gen_incl1(S,Fields,[C|CFields]) ->
end.
get_objclass_fields(S,Eref=#'Externaltypereference'{}) ->
- {_,ClassDef} = get_referenced_type(S,Eref),
+ {_,ClassDef} = get_referenced_type(S,Eref, true),
get_objclass_fields(S,ClassDef);
get_objclass_fields(S,CD=#classdef{typespec=#'Externaltypereference'{}}) ->
get_objclass_fields(S,CD#classdef.typespec);
@@ -1501,10 +1222,10 @@ gen_incl_set(S,Fields,#typedef{typespec=#type{def=Eref}})
{_,CDef} = get_referenced_type(S,Eref),
gen_incl_set(S,Fields,CDef);
gen_incl_set(S,Fields,ClassDef) ->
- case catch get_unique_fieldname(S,ClassDef) of
- Tuple when tuple_size(Tuple) =:= 3 ->
+ case get_unique_fieldname(S, ClassDef) of
+ no_unique ->
false;
- _ ->
+ {_, _} ->
gen_incl_set1(S,Fields,
(ClassDef#classdef.typespec)#objectclass.fields)
end.
@@ -1529,475 +1250,390 @@ gen_incl_set1(S,[Object|Rest],CFields)->
gen_incl_set1(S,Rest,CFields)
end.
-check_objectdefn(S,Def,CDef) when is_record(CDef,classdef) ->
- WithSyntax = (CDef#classdef.typespec)#objectclass.syntax,
- ClassFields = (CDef#classdef.typespec)#objectclass.fields,
+
+%%%
+%%% Check an object definition.
+%%%
+
+check_objectdefn(S, Def, #classdef{typespec=ObjClass}) ->
+ #objectclass{syntax=Syntax0,fields=ClassFields} = ObjClass,
case Def of
{object,defaultsyntax,Fields} ->
- check_defaultfields(S,Fields,ClassFields);
+ check_defaultfields(S, Fields, ClassFields);
{object,definedsyntax,Fields} ->
- {_,WSSpec} = WithSyntax,
- NewFields =
- case catch( convert_definedsyntax(S,Fields,WSSpec,
- ClassFields,[])) of
- {asn1,{_ErrorType,ObjToken,ClassToken}} ->
- throw({asn1,{'match error in object',ObjToken,
- 'found in object',ClassToken,'found in class'}});
- Err={asn1,_} -> throw(Err);
- Err={'EXIT',_} -> throw(Err);
- DefaultFields when is_list(DefaultFields) ->
- DefaultFields
- end,
- {object,defaultsyntax,NewFields};
- {object,_ObjectId} -> % This is a DefinedObject
- fixa;
- Other ->
- exit({error,{objectdefn,Other}})
+ Syntax = get_syntax(S, Syntax0, ClassFields),
+ case match_syntax(S, Syntax, Fields, []) of
+ {match,NewFields,[]} ->
+ {object,defaultsyntax,NewFields};
+ {match,_,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[What|_]} ->
+ syntax_match_error(S, What);
+ {nomatch,[]} ->
+ syntax_match_error(S)
+ end
+ end.
+
+
+%%%
+%%% Pre-process the simplified syntax so that it can be more
+%%% easily matched.
+%%%
+
+get_syntax(_, {preprocessed_syntax,Syntax}, _) ->
+ Syntax;
+get_syntax(S, {'WITH SYNTAX',Syntax}, ClassFields) ->
+ preprocess_syntax(S, Syntax, ClassFields).
+
+preprocess_syntax(S, Syntax0, Cs) ->
+ Syntax = preprocess_syntax_1(S, Syntax0, Cs, true),
+ Present0 = preprocess_get_fields(Syntax, []),
+ Present1 = lists:sort(Present0),
+ Present = ordsets:from_list(Present1),
+ case Present =:= Present1 of
+ false ->
+ Dupl = Present1 -- Present,
+ asn1_error(S, {syntax_duplicated_fields,Dupl});
+ true ->
+ ok
+ end,
+ Mandatory0 = get_mandatory_class_fields(Cs),
+ Mandatory = ordsets:from_list(Mandatory0),
+ case ordsets:subtract(Mandatory, Present) of
+ [] ->
+ Syntax;
+ [_|_]=Missing ->
+ asn1_error(S, {syntax_missing_mandatory_fields,Missing})
end.
+preprocess_syntax_1(S, [H|T], Cs, Mandatory) when is_list(H) ->
+ [{optional,preprocess_syntax_1(S, H, Cs, false)}|
+ preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{valuefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [{typefieldreference,Name}|T], Cs, Mandatory) ->
+ F = preprocess_check_field(S, Name, Cs, Mandatory),
+ [F|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S,[{Token,_}|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(S, [Token|T], Cs, Mandatory) when is_atom(Token) ->
+ [{token,Token}|preprocess_syntax_1(S, T, Cs, Mandatory)];
+preprocess_syntax_1(_, [], _, _) -> [].
+
+preprocess_check_field(S, Name, Cs, Mandatory) ->
+ case lists:keyfind(Name, 2, Cs) of
+ Tuple when is_tuple(Tuple) ->
+ case not Mandatory andalso is_mandatory_class_field(Tuple) of
+ true ->
+ asn1_error(S, {syntax_mandatory_in_optional_group,Name});
+ false ->
+ {field,Tuple}
+ end;
+ false ->
+ asn1_error(S, {syntax_undefined_field,Name})
+ end.
+
+preprocess_get_fields([{field,F}|T], Acc) ->
+ Name = element(2, F),
+ preprocess_get_fields(T, [Name|Acc]);
+preprocess_get_fields([{optional,L}|T], Acc) ->
+ preprocess_get_fields(T, preprocess_get_fields(L, Acc));
+preprocess_get_fields([_|T], Acc) ->
+ preprocess_get_fields(T, Acc);
+preprocess_get_fields([], Acc) ->
+ Acc.
+
+%%%
+%%% Match the actual fields in the object definition to
+%%% the pre-processed simplified syntax.
+%%%
+
+match_syntax(S, [{token,Token}|T], [A|As]=Args, Acc) ->
+ case A of
+ {word_or_setting,_,#'Externaltypereference'{type=Token}} ->
+ match_syntax(S, T, As, Acc);
+ {Token,Line} when is_integer(Line) ->
+ match_syntax(S, T, As, Acc);
+ _ ->
+ {nomatch,Args}
+ end;
+match_syntax(S, [{field,Field}|T]=Fs, [A|As0]=Args0, Acc) ->
+ try match_syntax_type(S, Field, A) of
+ {match,Match} ->
+ match_syntax(S, T, As0, lists:reverse(Match)++Acc);
+ {params,_Name,#ptypedef{args=Params}=P,Ref} ->
+ {Args,As} = lists:split(length(Params), As0),
+ Val = match_syntax_params(S, P, Ref, Args),
+ match_syntax(S, Fs, [Val|As], Acc)
+ catch
+ _:_ ->
+ {nomatch,Args0}
+ end;
+match_syntax(S, [{optional,L}|T], As0, Acc) ->
+ case match_syntax(S, L, As0, []) of
+ {match,Match,As} ->
+ match_syntax(S, T, As, lists:reverse(Match)++Acc);
+ {nomatch,As0} ->
+ match_syntax(S, T, As0, Acc);
+ {nomatch,_}=NoMatch ->
+ NoMatch
+ end;
+match_syntax(_, [_|_], [], _Acc) ->
+ {nomatch,[]};
+match_syntax(_, [], As, Acc) ->
+ {match,Acc,As}.
+
+match_syntax_type(S, Type, {value_tag,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(S, Type, {word_or_setting,_,Val}) ->
+ match_syntax_type(S, Type, Val);
+match_syntax_type(_S, _Type, {Atom,Line})
+ when is_atom(Atom), is_integer(Line) ->
+ throw(nomatch);
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type,
+ #'Externalvaluereference'{}=ValRef0) ->
+ try get_referenced_type(S, ValRef0) of
+ {M,#valuedef{}=ValDef} ->
+ match_syntax_type(update_state(S, M), Type, ValDef)
+ catch
+ throw:{error,_} ->
+ ValRef = #valuedef{name=Name,
+ type=T,
+ value=ValRef0,
+ module=S#state.mname},
+ match_syntax_type(S, Type, ValRef)
+ end;
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_}, #valuedef{}=Val0) ->
+ Val = check_value(S, Val0),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{},_,_},
+ {'ValueFromObject',{object,Object},FieldNames}) ->
+ Val = extract_field(S, Object, FieldNames),
+ {match,[{Name,Val}]};
+match_syntax_type(S, {fixedtypevaluefield,Name,#type{}=T,_,_}=Type, Any) ->
+ ValDef = #valuedef{name=Name,type=T,value=Any,module=S#state.mname},
+ match_syntax_type(S, Type, ValDef);
+match_syntax_type(_S, {fixedtypevaluesetfield,Name,#type{},_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectfield,Name,_,_,_}, #'Externalvaluereference'{}=Ref) ->
+ {M,Obj} = get_referenced_type(S, Ref),
+ check_object(S, Obj, object_to_check(S, Obj)),
+ {match,[{Name,Ref#'Externalvaluereference'{module=M}}]};
+match_syntax_type(S, {objectfield,Name,Class,_,_}, {object,_,_}=ObjDef) ->
+ InlinedObjName = list_to_atom(lists:concat([S#state.tname,
+ '_',Name])),
+ ObjSpec = #'Object'{classname=Class,def=ObjDef},
+ CheckedObj = check_object(S, #typedef{typespec=ObjSpec}, ObjSpec),
+ InlObj = #typedef{checked=true,name=InlinedObjName,typespec=CheckedObj},
+ ObjKey = {InlinedObjName, InlinedObjName},
+ insert_once(S, inlined_objects, ObjKey),
+ %% Which module to use here? Could it be other than top_module?
+ asn1_db:dbput(get(top_module), InlinedObjName, InlObj),
+ {match,[{Name,InlObj}]};
+match_syntax_type(_S, {objectfield,Name,_,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(S, {objectsetfield,Name,CDef0,_}, Any) ->
+ CDef = case CDef0 of
+ #type{def=CDef1} -> CDef1;
+ CDef1 -> CDef1
+ end,
+ case match_syntax_objset(S, Any, CDef) of
+ #typedef{typespec=#'ObjectSet'{}=Ts0}=Def ->
+ Ts = check_object(S, Def, Ts0),
+ {match,[{Name,Def#typedef{checked=true,typespec=Ts}}]};
+ _ ->
+ syntax_match_error(S, Any)
+ end;
+match_syntax_type(S, {typefield,Name0,_}, #type{def={pt,_,_}=Def}=Actual) ->
+ %% This is an inlined type. If constructed type, save in data base.
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ #'Externaltypereference'{type=PtName} = element(2, Def),
+ NameList = [PtName,S#state.tname],
+ Name = list_to_atom(asn1ct_gen:list2name(NameList)),
+ NewTDef = #typedef{checked=true,name=Name,typespec=T},
+ asn1_db:dbput(S#state.mname, Name, NewTDef),
+ insert_once(S, parameterized_objects, {Name,type,NewTDef}),
+ {match,[{Name0,NewTDef}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'ObjectClassFieldType'{}}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ {match,[{Name,ocft_def(T)}]};
+match_syntax_type(S, {typefield,Name,_}, #type{def=#'Externaltypereference'{}=Ref}) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(S, {typefield,Name,_}, #type{def=Def}=Actual) ->
+ T = check_type(S, #typedef{typespec=Actual}, Actual),
+ TypeName = asn1ct_gen:type(asn1ct_gen:get_inner(Def)),
+ {match,[{Name,#typedef{checked=true,name=TypeName,typespec=T}}]};
+match_syntax_type(S, {typefield,Name,_}, #'Externaltypereference'{}=Ref) ->
+ match_syntax_external(S, Name, Ref);
+match_syntax_type(_S, {variabletypevaluefield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, {variabletypevaluesetfield,Name,_,_}, Any) ->
+ {match,[{Name,Any}]};
+match_syntax_type(_S, _Type, _Actual) ->
+ throw(nomatch).
+
+match_syntax_params(S0, #ptypedef{name=Name}=PtDef,
+ #'Externaltypereference'{module=M,type=N}=ERef0, Args) ->
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),tname=Name},
+ Type = check_type(S, PtDef, #type{def={pt,ERef0,Args}}),
+ ERefName = new_reference_name(N),
+ ERef = #'Externaltypereference'{type=ERefName,module=S0#state.mname},
+ TDef = #typedef{checked=true,name=ERefName,typespec=Type},
+ insert_once(S0, parameterized_objects, {ERefName,type,TDef}),
+ asn1_db:dbput(S0#state.mname, ERef#'Externaltypereference'.type, TDef),
+ ERef.
+
+match_syntax_external(#state{mname=Mname}=S0, Name, Ref0) ->
+ {M,T0} = get_referenced_type(S0, Ref0),
+ Ref1 = Ref0#'Externaltypereference'{module=M},
+ case T0 of
+ #ptypedef{} ->
+ {params,Name,T0,Ref1};
+ #typedef{checked=false}=TDef0 when Mname =/= M ->
+ %% This typedef is an imported type (or maybe a set.asn
+ %% compilation).
+ S = S0#state{mname=M,module=load_asn1_module(S0, M),
+ tname=get_datastr_name(TDef0)},
+ Type = check_type(S, TDef0, TDef0#typedef.typespec),
+ TDef = TDef0#typedef{checked=true,typespec=Type},
+ asn1_db:dbput(M, get_datastr_name(TDef), TDef),
+ {match,[{Name,merged_name(S, Ref1)}]};
+ TDef ->
+ %% This might be a renamed type in a set of specs,
+ %% so rename the ref.
+ Type = asn1ct:get_name_of_def(TDef),
+ Ref = Ref1#'Externaltypereference'{type=Type},
+ {match,[{Name,Ref}]}
+ end.
+
+match_syntax_objset(_S, {element_set,_,_}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, #'Externaltypereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(S, #'Externalvaluereference'{}=Ref, _) ->
+ {_,T} = get_referenced_type(S, Ref),
+ T;
+match_syntax_objset(_, [_|_]=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset(S, {object,definedsyntax,Words}, ClassDef) ->
+ case Words of
+ [Word] ->
+ match_syntax_objset_1(S, Word, ClassDef);
+ [_|_] ->
+ %% More than one word does not make sense.
+ none
+ end;
+match_syntax_objset(S, #type{def=#'Externaltypereference'{}=Set}, ClassDef) ->
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset(_, #type{}, _) ->
+ none.
+
+match_syntax_objset_1(S, {setting,_,Set}, ClassDef) ->
+ %% Word that starts with an uppercase letter.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, {word_or_setting,_,Set}, ClassDef) ->
+ %% Word in uppercase/hyphens only.
+ match_syntax_objset(S, Set, ClassDef);
+match_syntax_objset_1(S, #type{def={'TypeFromObject', {object,Object}, FNs}},
+ ClassDef) ->
+ Set = extract_field(S, Object, FNs),
+ [_|_] = Set,
+ #typedef{checked=true,typespec=#'ObjectSet'{class=ClassDef,set=Set}};
+match_syntax_objset_1(_, #type{def=#'ObjectClassFieldType'{}}=Set, ClassDef) ->
+ make_objset(ClassDef, Set);
+match_syntax_objset_1(_, {object,_,_}=Object, ClassDef) ->
+ make_objset(ClassDef, [Object]).
+
+make_objset(ClassDef, Set) ->
+ #typedef{typespec=#'ObjectSet'{class=ClassDef,set=Set}}.
+
+syntax_match_error(S) ->
+ asn1_error(S, syntax_nomatch).
+
+syntax_match_error(S, What0) ->
+ What = printable_string(What0),
+ asn1_error(S, {syntax_nomatch,What}).
+
+printable_string(Def) ->
+ printable_string_1(Def).
+
+printable_string_1({word_or_setting,_,Def}) ->
+ printable_string_1(Def);
+printable_string_1({value_tag,V}) ->
+ printable_string_1(V);
+printable_string_1({#seqtag{val=Val1},Val2}) ->
+ atom_to_list(Val1) ++ " " ++ printable_string_1(Val2);
+printable_string_1(#type{def=Def}) ->
+ atom_to_list(asn1ct_gen:get_inner(Def));
+printable_string_1(#'Externaltypereference'{type=Type}) ->
+ atom_to_list(Type);
+printable_string_1(#'Externalvaluereference'{value=Type}) ->
+ atom_to_list(Type);
+printable_string_1({Atom,Line}) when is_atom(Atom), is_integer(Line) ->
+ q(Atom);
+printable_string_1({object,definedsyntax,L}) ->
+ q(string:join([printable_string_1(Item) || Item <- L], " "));
+printable_string_1([_|_]=Def) ->
+ case lists:all(fun is_integer/1, Def) of
+ true ->
+ lists:flatten(io_lib:format("~p", [Def]));
+ false ->
+ q(string:join([printable_string_1(Item) || Item <- Def], " "))
+ end;
+printable_string_1(Def) ->
+ lists:flatten(io_lib:format("~p", [Def])).
+
+q(S) ->
+ lists:concat(["\"",S,"\""]).
+
check_defaultfields(S, Fields, ClassFields) ->
Present = ordsets:from_list([F || {F,_} <- Fields]),
Mandatory0 = get_mandatory_class_fields(ClassFields),
Mandatory = ordsets:from_list(Mandatory0),
All = ordsets:from_list([element(2, F) || F <- ClassFields]),
- #state{type=T,tname=Obj} = S,
+ #state{tname=Obj} = S,
case ordsets:subtract(Present, All) of
[] ->
ok;
[_|_]=Invalid ->
- asn1_error(S, T, {invalid_fields,Invalid,Obj})
+ asn1_error(S, {invalid_fields,Invalid,Obj})
end,
case ordsets:subtract(Mandatory, Present) of
[] ->
check_defaultfields_1(S, Fields, ClassFields, []);
[_|_]=Missing ->
- asn1_error(S, T, {missing_mandatory_fields,Missing,Obj})
+ asn1_error(S, {missing_mandatory_fields,Missing,Obj})
end.
check_defaultfields_1(_S, [], _ClassFields, Acc) ->
{object,defaultsyntax,lists:reverse(Acc)};
check_defaultfields_1(S, [{FName,Spec}|Fields], ClassFields, Acc) ->
CField = lists:keyfind(FName, 2, ClassFields),
- {NewField,RestFields} =
- convert_to_defaultfield(S, FName, [Spec|Fields], CField),
- check_defaultfields_1(S, RestFields, ClassFields, [NewField|Acc]).
+ {match,Match} = match_syntax_type(S, CField, Spec),
+ check_defaultfields_1(S, Fields, ClassFields, Match++Acc).
-convert_definedsyntax(_S,[],[],_ClassFields,Acc) ->
- lists:reverse(Acc);
-convert_definedsyntax(S,Fields,WithSyntax,ClassFields,Acc) ->
- {MatchedField,RestFields,RestWS} =
- match_field(S,Fields,WithSyntax,ClassFields),
- if
- is_list(MatchedField) ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- lists:append(MatchedField,Acc));
- true ->
- convert_definedsyntax(S,RestFields,RestWS,ClassFields,
- [MatchedField|Acc])
- end.
+get_mandatory_class_fields(ClassFields) ->
+ [element(2, F) || F <- ClassFields,
+ is_mandatory_class_field(F)].
-get_mandatory_class_fields([{fixedtypevaluefield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectfield,Name,_,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{objectsetfield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{typefield,Name,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluefield,Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([{variabletypevaluesetfield,
- Name,_,'MANDATORY'}|T]) ->
- [Name|get_mandatory_class_fields(T)];
-get_mandatory_class_fields([_|T]) ->
- get_mandatory_class_fields(T);
-get_mandatory_class_fields([]) -> [].
-
-match_field(S,Fields,WithSyntax,ClassFields) ->
- match_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_field(S,Fields,[W|Ws],ClassFields,Acc) when is_list(W) ->
- case catch(match_optional_field(S,Fields,W,ClassFields,[])) of
- {'EXIT',_} ->
- match_field(Fields,Ws,ClassFields,Acc); %% add S
-%% {[Result],RestFields} ->
-%% {Result,RestFields,Ws};
- {Result,RestFields} when is_list(Result) ->
- {Result,RestFields,Ws};
- _ ->
- match_field(S,Fields,Ws,ClassFields,Acc)
- end;
-match_field(S,Fields,WithSyntax,ClassFields,_Acc) ->
- match_mandatory_field(S,Fields,WithSyntax,ClassFields,[]).
-
-match_optional_field(_S,RestFields,[],_,Ret) ->
- {Ret,RestFields};
-%% An additional optional field within an optional field
-match_optional_field(S,Fields,[W|Ws],ClassFields,Ret) when is_list(W) ->
- case catch match_optional_field(S,Fields,W,ClassFields,[]) of
- {'EXIT',_} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {'EXIT',_} ->
- {Ret,Fields};
- {asn1,{optional_matcherror,_,_}} when length(Ws) > 0 ->
- match_optional_field(S,Fields,Ws,ClassFields,Ret);
- {asn1,{optional_matcherror,_,_}} ->
- {Ret,Fields};
- {OptionalField,RestFields} ->
- match_optional_field(S,RestFields,Ws,ClassFields,
- lists:append(OptionalField,Ret))
- end;
-%% identify and skip word
-match_optional_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-match_optional_field(S,[],_,ClassFields,Ret) ->
- match_optional_field(S,[],[],ClassFields,Ret);
-%% identify and skip comma
-match_optional_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_optional_field(S,Rest,Ws,ClassFields,Ret);
-%% am optional setting inside another optional setting may be "double-listed"
-match_optional_field(S,[Setting],DefinedSyntax,ClassFields,Ret)
- when is_list(Setting) ->
- match_optional_field(S,Setting,DefinedSyntax,ClassFields,Ret);
-%% identify and save field data
-match_optional_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Ret) ->
- ?dbg("matching optional field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- Type when is_record(Type,type) -> Type;
- {'ValueFromObject',_,_} -> Setting;
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{optional_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_optional_field(S,RestFields,Ws,ClassFields,[NewField|Ret])
- end;
-match_optional_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Ret) ->
- throw({asn1,{optional_matcherror,WorS,W}}).
-
-match_mandatory_field(_S,[],[],_,[Acc]) ->
- {Acc,[],[]};
-match_mandatory_field(_S,[],[],_,Acc) ->
- {Acc,[],[]};
-match_mandatory_field(S,[],[H|T],CF,Acc) when is_list(H) ->
- match_mandatory_field(S,[],T,CF,Acc);
-match_mandatory_field(_S,[],WithSyntax,_,_Acc) ->
- throw({asn1,{mandatory_matcherror,[],WithSyntax}});
-%match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,[Acc]) when is_list(W) ->
-match_mandatory_field(_S,Fields,WithSyntax=[W|_Ws],_ClassFields,Acc) when is_list(W), length(Acc) >= 1 ->
- {Acc,Fields,WithSyntax};
-%% identify and skip word
-%%match_mandatory_field(S,[{_,_,WorS}|Rest],
-match_mandatory_field(S,[{_,_,#'Externaltypereference'{type=WorS}}|Rest],
- [WorS|Ws],ClassFields,Acc) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Acc);
-%% identify and skip comma
-match_mandatory_field(S,[{WorS,_}|Rest],[{WorS,_}|Ws],ClassFields,Ret) ->
- match_mandatory_field(S,Rest,Ws,ClassFields,Ret);
-%% identify and save field data
-match_mandatory_field(S,[Setting|Rest],[{_,W}|Ws],ClassFields,Acc) ->
- ?dbg("matching field setting: ~p with user friendly syntax: ~p~n",[Setting,W]),
- WorS =
- case Setting of
- {object,_,_} -> Setting;
- {_,_,WordOrSetting} -> WordOrSetting;
- Type when is_record(Type,type) -> Type;
- Other -> Other
- end,
- case lists:keysearch(W,2,ClassFields) of
- false ->
- throw({asn1,{mandatory_matcherror,WorS,W}});
- {value,CField} ->
- {NewField,RestFields} =
- convert_to_defaultfield(S,W,[WorS|Rest],CField),
- match_mandatory_field(S,RestFields,Ws,ClassFields,[NewField|Acc])
- end;
-
-match_mandatory_field(_S,[WorS|_Rest],[W|_Ws],_ClassFields,_Acc) ->
- throw({asn1,{mandatory_matcherror,WorS,W}}).
-
-%% Converts a field of an object from defined syntax to default syntax
-%% A field may be a type, a fixed type value, an object, an objectset,
-%%
-convert_to_defaultfield(S,ObjFieldName,[OFS|RestOFS],CField)->
- ?dbg("convert field: ~p of type: ~p~n",[ObjFieldName,element(1,CField)]),
- CurrMod = S#state.mname,
- Strip_value_tag =
- fun({value_tag,ValueSetting}) -> ValueSetting;
- (VS) -> VS
- end,
- ObjFieldSetting = Strip_value_tag(OFS),
- RestSettings = [Strip_value_tag(X)||X <- RestOFS],
- case element(1,CField) of
- typefield ->
- TypeDef=
- case ObjFieldSetting of
- TypeRec when is_record(TypeRec,type) -> TypeRec#type.def;
- TDef when is_record(TDef,typedef) ->
- TDef#typedef{checked=true,
- typespec=check_type(S,TDef,
- TDef#typedef.typespec)};
- _ -> ObjFieldSetting
- end,
- {Type,SettingsLeft} =
- if
- is_record(TypeDef,typedef) -> {TypeDef,RestSettings};
- is_record(TypeDef,'ObjectClassFieldType') ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- {oCFT_def(S,T),RestSettings};
-% #typedef{checked=true,name=Name,typespec=IT};
- is_tuple(TypeDef), element(1,TypeDef) == pt ->
- %% this is an inlined type. If constructed
- %% type save in data base
- T=check_type(S,#typedef{typespec=ObjFieldSetting},ObjFieldSetting),
- #'Externaltypereference'{type=PtName} =
- element(2,TypeDef),
- NameList = [PtName,S#state.tname],
- NewName = list_to_atom(asn1ct_gen:list2name(NameList)),
- NewTDef=#typedef{checked=true,name=NewName,
- typespec=T},
- asn1_db:dbput(S#state.mname,NewName,NewTDef),
- %%asn1ct_gen:insert_once(parameterized_objects,{NewName,type,NewTDef}),
- insert_once(S,parameterized_objects,
- {NewName,type,NewTDef}),
- {NewTDef,RestSettings};
- is_tuple(TypeDef), element(1,TypeDef)=='SelectionType' ->
- T=check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- Name = type_name(S,T),
- {#typedef{checked=true,name=Name,typespec=T},RestSettings};
- true ->
- case asn1ct_gen:type(asn1ct_gen:get_inner(TypeDef)) of
- ERef = #'Externaltypereference'{module=CurrMod} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
-
- ERef = #'Externaltypereference'{} ->
- {RefMod,T} = get_referenced_type(S,ERef),
- check_and_save(S,ERef#'Externaltypereference'{module=RefMod},T,RestSettings);
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- T = check_type(S,#typedef{typespec=ObjFieldSetting},
- ObjFieldSetting),
- {#typedef{checked=true,name=Bif,typespec=T},RestSettings};
- _ ->
- %this case should not happen any more
- {Mod,T} =
- get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=ObjFieldSetting}),
- case Mod of
- CurrMod ->
- {T,RestSettings};
- ExtMod ->
- #typedef{name=Name} = T,
- {T#typedef{name={ExtMod,Name}},RestSettings}
- end
- end
- end,
- {{ObjFieldName,Type},SettingsLeft};
- fixedtypevaluefield ->
- case ObjFieldName of
- Val when is_atom(Val) ->
- %% ObjFieldSetting can be a value,an objectidentifiervalue,
- %% an element in an enumeration or namednumberlist etc.
- ValRef =
- case ObjFieldSetting of
- ValSetting=#'Externalvaluereference'{} ->
- ValSetting;
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- get_fieldname_element(S,Object#typedef{typespec=ChObject},
- FieldName);
- ValSetting = #valuedef{} ->
- ValSetting;
- ValSetting ->
- #valuedef{type=element(3,CField),
- value=ValSetting,
- module=S#state.mname}
- end,
- ?dbg("fixedtypevaluefield ValRef: ~p~n",[ValRef]),
- case ValRef of
- #valuedef{} ->
- {{ObjFieldName,check_value(S,ValRef)},RestSettings};
- _ ->
- ValDef =
- case catch get_referenced_type(S,ValRef) of
- {error,_} ->
- NewValDef =
- #valuedef{name=Val,
- type=element(3,CField),
- value=ObjFieldSetting,
- module=S#state.mname},
- check_value(S,NewValDef);
- {M,VDef} when is_record(VDef,valuedef) ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- VDef);%% XXX
- {M,VDef} ->
- check_value(update_state(S,M),
- %%S#state{mname=M},
- #valuedef{name=Val,
- type=element(3,CField),
- value=VDef,
- module=M})
- end,
- {{ObjFieldName,ValDef},RestSettings}
- end;
- Val ->
- {{ObjFieldName,Val},RestSettings}
- end;
- fixedtypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- objectfield ->
- CheckObject =
- fun(O) ->
- O#typedef{checked=true,typespec=
- check_object(S,O,O#typedef.typespec)}
- end,
- ObjectSpec =
- case ObjFieldSetting of
- Ref when is_record(Ref,'Externalvaluereference') ->
- %% The object O might be a #valuedef{} if
- %% e.g. the definition looks like
- %% myobj SOMECLASS ::= referencedObject
- {M,O} = get_referenced_type(S,Ref),
- check_object(S,O,object_to_check(O)),
- Ref#'Externalvaluereference'{module=M};
-
- {'ValueFromObject',{_,ObjRef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ObjRef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- CheckObject(ObjFromObj);
- ObjDef={object,_,_} ->
- %% An object defined inlined in another object
- %% class is an objectfield, that implies that
- %% {objectsetfield,TypeFieldName,DefinedObjecClass,
- %% OptionalitySpec}
- %% DefinedObjecClass = #'Externaltypereference'{}|
- %% 'TYPE-IDENTIFIER' | 'ABSTRACT-SYNTAX'
- ClassName = element(3,CField),
- InlinedObjName=
- list_to_atom(lists:concat([S#state.tname]++
- ['_',ObjFieldName])),
-
- ObjSpec = #'Object'{classname=ClassName,
- def=ObjDef},
- CheckedObj=
- check_object(S,#typedef{typespec=ObjSpec},ObjSpec),
- InlObj = #typedef{checked=true,name=InlinedObjName,
- typespec=CheckedObj},
- ObjKey = {InlinedObjName,InlinedObjName},
- %% asn1ct_gen:insert_once(inlined_objects,ObjKey),
- insert_once(S,inlined_objects,ObjKey),
- %% Which module to use here? Could it be other than top_module ?
- %% asn1_db:dbput(S#state.mname,InlinedObjName,InlObj),
- asn1_db:dbput(get(top_module),InlinedObjName,InlObj),
- InlObj;
- #type{def=Eref} when is_record(Eref,'Externaltypereference') ->
- {_,O} = get_referenced_type(S,Eref),
- CheckObject(O);
- Other ->
- {_,O} = get_referenced_type(S,#'Externaltypereference'{module=S#state.mname,type=Other}),
- CheckObject(O)
- end,
- {{ObjFieldName,ObjectSpec},RestSettings};
- variabletypevaluefield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
- variabletypevaluesetfield ->
- {{ObjFieldName,ObjFieldSetting},RestSettings};
-%% objectset_or_fixedtypevalueset_field ->
-%% ok;
- objectsetfield ->
- ObjSetSpec = get_objectset_def(S,ObjFieldSetting,CField),
- ?dbg("objectsetfield, ObjSetSpec:~p~n",[ObjSetSpec]),
- {{ObjFieldName,
- ObjSetSpec#typedef{checked=true,
- typespec=check_object(S,ObjSetSpec,
- ObjSetSpec#typedef.typespec)}},RestSettings}
- end.
-
-get_objectset_def(S,Ref,CField)
- when is_record(Ref,'Externaltypereference');
- is_record(Ref,'Externalvaluereference') ->
- {_M,T}=get_referenced_type(S,Ref),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,ObjectList,CField) when is_list(ObjectList) ->
- %% an objctset defined in the object,though maybe
- %% parsed as a SequenceOfValue
- %% The ObjectList may be a list of references to
- %% objects, a ValueFromObject
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,ObjectList,CField);
-get_objectset_def(S,'EXTENSIONMARK',CField) ->
- ?dbg("objectsetfield: ~p~n",[CField]),
- get_objectset_def2(S,['EXTENSIONMARK'],CField);
-get_objectset_def(_S,ObjFieldSetting={'SingleValue',_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(_S,ObjFieldSetting={{'SingleValue',_},_},CField) ->
- %% a Union of defined objects
- ?dbg("objectsetfield, SingleValue~n",[]),
- union_of_defed_objs(CField,ObjFieldSetting);
-get_objectset_def(S,{object,_,[#type{def={'TypeFromObject',
- {object,RefedObj},
- FieldName}}]},_CField) ->
- %% This case occurs when an ObjectSetFromObjects
- %% production is used
- {_M,Def} = get_referenced_type(S,RefedObj),
- get_fieldname_element(S,Def,FieldName);
-get_objectset_def(S,{object,_,[{setting,_,ERef}]},CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField);
-get_objectset_def(S,#type{def=ERef},_CField)
- when is_record(ERef,'Externaltypereference') ->
- {_,T} = get_referenced_type(S,ERef),
- T;
-get_objectset_def(S,ObjFieldSetting,CField)
- when is_atom(ObjFieldSetting) ->
- ERef = #'Externaltypereference'{module=S#state.mname,
- type=ObjFieldSetting},
- {_,T} = get_referenced_type(S,ERef),
- get_objectset_def2(S,T,CField).
-
-get_objectset_def2(_S,T = #typedef{typespec=#'Object'{}},_CField) ->
- #typedef{typespec=#'Object'{classname=Class,def=Def}} = T,
- T#typedef{typespec=#'ObjectSet'{class=Class,set=[Def]}};
-get_objectset_def2(_S,Set,CField) when is_list(Set) ->
- {_,_,Type,_} = CField,
- ClassDef = Type#type.def,
- #typedef{typespec=#'ObjectSet'{class=ClassDef,
- set=Set}};
-get_objectset_def2(_S,T = #typedef{typespec=#'ObjectSet'{}},_CField) ->
- T;
-get_objectset_def2(S,T,_CField) ->
- asn1ct:warning("get_objectset_def2: uncontrolled object set structure:~n~p~n",
- [T],S,"get_objectset_def2: uncontrolled object set structure").
-
-type_name(S,#type{def=Def}) ->
- CurrMod = S#state.mname,
- case asn1ct_gen:type(asn1ct_gen:get_inner(Def)) of
- #'Externaltypereference'{module=CurrMod,type=Name} ->
- Name;
- #'Externaltypereference'{module=Mod,type=Name} ->
- {Mod,Name};
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- Bif
- end.
+is_mandatory_class_field({fixedtypevaluefield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectfield,_,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({objectsetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({typefield,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluefield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field({variabletypevaluesetfield,_,_,'MANDATORY'}) ->
+ true;
+is_mandatory_class_field(_) ->
+ false.
merged_name(#state{inputmodules=[]},ERef) ->
ERef;
@@ -2013,38 +1649,18 @@ merged_name(S,ERef=#'Externaltypereference'{module=M}) ->
ERef
end.
-oCFT_def(S,T) ->
- case get_OCFT_inner(S,T) of
- ERef=#'Externaltypereference'{} -> ERef;
- {Name,Type} -> #typedef{checked=true,name=Name,typespec=Type};
- 'ASN1_OPEN_TYPE' ->
- #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
- end.
-
-get_OCFT_inner(_S,T) ->
-% Module=S#state.mname,
- Def = T#type.def,
- case Def#'ObjectClassFieldType'.type of
+ocft_def(#type{def=#'ObjectClassFieldType'{type=OCFT}}=T) ->
+ case OCFT of
{fixedtypevaluefield,_,InnerType} ->
case asn1ct_gen:type(asn1ct_gen:get_inner(InnerType#type.def)) of
- Bif when Bif=={primitive,bif};Bif=={constructed,bif} ->
- {Bif,InnerType};
- ERef = #'Externaltypereference'{} ->
- ERef
+ Bif when Bif =:= {primitive,bif}; Bif =:= {constructed,bif} ->
+ #typedef{checked=true,name=Bif,typespec=InnerType};
+ #'Externaltypereference'{}=Ref ->
+ Ref
end;
- 'ASN1_OPEN_TYPE' -> 'ASN1_OPEN_TYPE'
+ 'ASN1_OPEN_TYPE' ->
+ #typedef{checked=true,typespec=T#type{def='ASN1_OPEN_TYPE'}}
end.
-
-
-
-union_of_defed_objs({_,_,_ObjClass=#type{def=ClassDef},_},ObjFieldSetting) ->
- #typedef{typespec=#'ObjectSet'{class = ClassDef,
- set = ObjFieldSetting}};
-union_of_defed_objs({_,_,DefObjClassRef,_},ObjFieldSetting)
- when is_record(DefObjClassRef,'Externaltypereference') ->
- #typedef{typespec=#'ObjectSet'{class = DefObjClassRef,
- set = ObjFieldSetting}}.
-
check_value(OldS,V) when is_record(V,pvaluesetdef) ->
#pvaluesetdef{checked=Checked,type=Type} = V,
@@ -2068,8 +1684,7 @@ check_value(OldS,V) when is_record(V,typedef) ->
#typedef{typespec=TS} = V,
case TS of
#'ObjectSet'{class=ClassRef} ->
- {RefM,TSDef} = get_referenced_type(OldS,ClassRef),
- %%IsObjectSet(TSDef);
+ {_RefM,TSDef} = get_referenced_type(OldS, ClassRef),
case TSDef of
#classdef{} -> throw({objectsetdef});
#typedef{typespec=#type{def=Eref}} when
@@ -2077,14 +1692,12 @@ check_value(OldS,V) when is_record(V,typedef) ->
%% This case if the class reference is a defined
%% reference to class
check_value(OldS,V#typedef{typespec=TS#'ObjectSet'{class=Eref}});
- #typedef{} ->
+ #typedef{typespec=HostType} ->
% an ordinary value set with a type in #typedef.typespec
- ValueSet = TS#'ObjectSet'.set,
- Type=check_type(OldS,TSDef,TSDef#typedef.typespec),
- Value = check_value(OldS,#valuedef{type=Type,
- value=ValueSet,
- module=RefM}),
- {valueset,Type#type{constraint=Value#valuedef.value}}
+ ValueSet0 = TS#'ObjectSet'.set,
+ Constr = check_constraints(OldS, HostType, [ValueSet0]),
+ Type = check_type(OldS,TSDef,TSDef#typedef.typespec),
+ {valueset,Type#type{constraint=Constr}}
end;
_ ->
throw({objectsetdef})
@@ -2104,11 +1717,11 @@ check_value(S, #valuedef{}=V) ->
end.
check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
- #valuedef{name=Name,type=Vtype,value=Value,module=ModName} = V0,
+ #valuedef{name=Name,type=Vtype0,value=Value,module=ModName} = V0,
V = V0#valuedef{checked=true},
+ Vtype = check_type(S0, #typedef{name=Name,typespec=Vtype0},Vtype0),
Def = Vtype#type.def,
- Constr = Vtype#type.constraint,
- S1 = S0#state{type=Vtype,tname=Def,value=V0,vname=Name},
+ S1 = S0#state{tname=Def},
SVal = update_state(S1, ModName),
case Def of
#'Externaltypereference'{type=RecName}=Ext ->
@@ -2116,9 +1729,8 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
%% If V isn't a value but an object Type is a #classdef{}
S2 = update_state(S1, RefM),
case Type of
- #classdef{} ->
- throw({objectdef});
- #typedef{typespec=TypeSpec} ->
+ #typedef{typespec=TypeSpec0}=TypeDef ->
+ TypeSpec = check_type(S2, TypeDef, TypeSpec0),
S3 = case is_contextswitchtype(Type) of
true ->
S2;
@@ -2135,7 +1747,7 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
V#valuedef{type=Type}),
V#valuedef{value=CheckedVal}
end;
- 'ANY' ->
+ 'ASN1_OPEN_TYPE' ->
{opentypefieldvalue,ANYType,ANYValue} = Value,
CheckedV = check_value(SVal,#valuedef{name=Name,
type=ANYType,
@@ -2143,19 +1755,12 @@ check_valuedef(#state{recordtopname=TopName}=S0, V0) ->
module=ModName}),
V#valuedef{value=CheckedV#valuedef.value};
'INTEGER' ->
- ok = validate_integer(SVal, Value, [], Constr),
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
- {'INTEGER',NamedNumberList} ->
- ok = validate_integer(SVal, Value, NamedNumberList, Constr),
+ {'INTEGER',_NamedNumberList} ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, [])};
#'SEQUENCE'{} ->
- {ok,SeqVal} = convert_external(SVal, Value),
+ {ok,SeqVal} = convert_external(SVal, Vtype, Value),
V#valuedef{value=normalize_value(SVal, Vtype, SeqVal, TopName)};
- {'SelectionType',SelName,SelT} ->
- CheckedT = check_selectiontype(SVal, SelName, SelT),
- NewV = V#valuedef{type=CheckedT},
- SelVDef = check_value(S1#state{value=NewV}, NewV),
- V#valuedef{value=SelVDef#valuedef.value};
_ ->
V#valuedef{value=normalize_value(SVal, Vtype, Value, TopName)}
end.
@@ -2169,179 +1774,97 @@ is_contextswitchtype(#typedef{name='CHARACTER STRING'}) ->
is_contextswitchtype(_) ->
false.
-% validate_integer(S,{identifier,Pos,Id},NamedNumberList,Constr) ->
-% case lists:keysearch(Id,1,NamedNumberList) of
-% {value,_} -> ok;
-% false -> error({value,"unknown NamedNumber",S})
-% end;
-%% This case occurs when there is a valuereference
-%% validate_integer(S=#state{mname=M},
-%% #'Externalvaluereference'{module=M,value=Id}=Ref,
-validate_integer(S,#'Externalvaluereference'{value=Id}=Ref,
- NamedNumberList,Constr) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Ref,NamedNumberList,Constr)
- %%error({value,"unknown NamedNumber",S})
- end;
-validate_integer(S,Id,NamedNumberList,Constr) when is_atom(Id) ->
- case lists:keysearch(Id,1,NamedNumberList) of
- {value,_} -> ok;
- false -> validate_integer_ref(S,Id,NamedNumberList,Constr)
- %error({value,"unknown NamedNumber",S})
+%%%
+%%% Start of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+validate_objectidentifier(S, OidType, #'Externalvaluereference'{}=Id) ->
+ %% Must be an OBJECT IDENTIFIER or RELATIVE-OID depending on OidType.
+ get_oid_value(S, OidType, false, Id);
+validate_objectidentifier(S, OidType, {'ValueFromObject',{object,Obj},Fields}) ->
+ %% Must be an OBJECT IDENTIFIER/RELATIVE-OID depending on OidType.
+ case extract_field(S, Obj, Fields) of
+ #valuedef{checked=true,value=Value,type=Type} when is_tuple(Value) ->
+ _ = get_oid_type(S, OidType, Type),
+ Value;
+ _ ->
+ asn1_error(S, {illegal_oid,OidType})
end;
-validate_integer(_S,Value,_NamedNumberList,Constr) when is_integer(Value) ->
- check_integer_range(Value,Constr).
-
-validate_integer_ref(S,Id,_,_) when is_atom(Id) ->
- error({value,"unknown integer referens",S});
-validate_integer_ref(S,Ref,NamedNumberList,Constr) ->
- case get_referenced_type(S,Ref) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def='INTEGER'},value=Value} ->
- validate_integer(NewS,Value,NamedNumberList,Constr);
- _Err -> error({value,"unknown integer referens",S})
+validate_objectidentifier(S, OidType,
+ [{#seqtag{module=Mod,pos=Pos,val=Atom},Val}]) ->
+ %% This case is when an OBJECT IDENTIFIER value has been parsed as a
+ %% SEQUENCE value.
+ Rec = #'Externalvaluereference'{pos=Pos,
+ module=Mod,
+ value=Atom},
+ validate_oid(S, OidType, [Rec,Val], []);
+validate_objectidentifier(S, OidType, [_|_]=L0) ->
+ validate_oid(S, OidType, L0, []);
+validate_objectidentifier(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_value(S, OidType, AllowInteger, #'Externalvaluereference'{}=Id) ->
+ case get_referenced_type(S, Id) of
+ {_,#valuedef{checked=Checked,type=Type,value=V}} ->
+ case get_oid_type(S, OidType, Type) of
+ 'INTEGER' when not AllowInteger ->
+ asn1_error(S, {illegal_oid,OidType});
+ _ when Checked ->
+ V;
+ 'INTEGER' ->
+ V;
+ _ ->
+ validate_objectidentifier(S, OidType, V)
end;
_ ->
- error({value,"unknown integer referens",S})
+ asn1_error(S, {illegal_oid,OidType})
end.
-
-
-
-check_integer_range(_Int, Constr) when is_list(Constr) ->
- ok.
-%%------------
-%% This can be removed when the old parser is removed
-%% The function removes 'space' atoms from the list
-
-is_space_list([H],Acc) ->
- lists:reverse([H|Acc]);
-is_space_list([H,space|T],Acc) ->
- is_space_list(T,[H|Acc]);
-is_space_list([],Acc) ->
- lists:reverse(Acc);
-is_space_list([H|T],Acc) ->
- is_space_list(T,[H|Acc]).
-
-validate_objectidentifier(S,OID,ERef,C)
- when is_record(ERef,'Externalvaluereference') ->
- validate_objectidentifier(S,OID,[ERef],C);
-validate_objectidentifier(S,OID,Tup,C) when is_tuple(Tup) ->
- validate_objectidentifier(S,OID,tuple_to_list(Tup),C);
-validate_objectidentifier(S,OID,L,_) ->
- NewL = is_space_list(L,[]),
- case validate_objectidentifier1(S,OID,NewL) of
- NewL2 when is_list(NewL2) ->{ok,list_to_tuple(NewL2)};
- Other -> {ok,Other}
- end.
-
-validate_objectidentifier1(S, OID, [Id|T])
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S,Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- case check_value(NewS,V) of
- #valuedef{type=#type{def=ERef},checked=true,
- value=Value} when is_tuple(Value) ->
- case is_object_id(OID,NewS,ERef) of
- true ->
- %% T must be a RELATIVE-OID
- validate_oid(true,NewS, rel_oid, T, lists:reverse(tuple_to_list(Value)));
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- error({value, {"illegal "++to_string(OID),[Id|T]}, S})
- end;
- _ ->
- validate_oid(true,S, OID, [Id|T], [])
- end;
-validate_objectidentifier1(S,OID,V) ->
- validate_oid(true,S,OID,V,[]).
-
-validate_oid(false, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID), V,Acc}, S});
-validate_oid(_,_, _, [], Acc) ->
- lists:reverse(Acc);
-validate_oid(_, S, OID, [Value|Vrest], Acc) when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [{'NamedNumber',_Name,Value}|Vrest], Acc)
+validate_oid(S, OidType, [], Acc) ->
+ Oid = lists:reverse(Acc),
+ validate_oid_path(S, OidType, Oid),
+ list_to_tuple(Oid);
+validate_oid(S, OidType, [Value|Vrest], Acc) when is_integer(Value) ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [{'NamedNumber',_Name,Value}|Vrest], Acc)
when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc), S, OID, Vrest, [Value|Acc]);
-validate_oid(_, S, OID, [Id|Vrest], Acc)
- when is_record(Id,'Externalvaluereference') ->
- case catch get_referenced_type(S, Id) of
- {M,V} when is_record(V,valuedef) ->
- NewS = update_state(S,M),
- NewVal = case check_value(NewS, V) of
- #valuedef{checked=true,value=Value} ->
- fun(Int) when is_integer(Int) -> [Int];
- (L) when is_list(L) -> L;
- (T) when is_tuple(T) -> tuple_to_list(T)
- end (Value);
- _ ->
- error({value, {"illegal "++to_string(OID),
- [Id|Vrest],Acc}, S})
- end,
- case NewVal of
- List when is_list(List) ->
- validate_oid(valid_objectid(OID,NewVal,Acc), NewS,
- OID, Vrest,lists:reverse(NewVal)++Acc);
- _ ->
- NewVal
- end;
- _ ->
+ validate_oid(S, OidType, Vrest, [Value|Acc]);
+validate_oid(S, OidType, [#'Externalvaluereference'{}=Id|Vrest], Acc) ->
+ NeededOidType = case Acc of
+ [] -> o_id;
+ [_|_] -> rel_oid
+ end,
+ try get_oid_value(S, NeededOidType, true, Id) of
+ Val when is_integer(Val) ->
+ validate_oid(S, OidType, Vrest, [Val|Acc]);
+ Val when is_tuple(Val) ->
+ L = tuple_to_list(Val),
+ validate_oid(S, OidType, Vrest, lists:reverse(L, Acc))
+ catch
+ _:_ ->
case reserved_objectid(Id#'Externalvaluereference'.value, Acc) of
Value when is_integer(Value) ->
- validate_oid(valid_objectid(OID,Value,Acc),
- S, OID,Vrest, [Value|Acc]);
+ validate_oid(S, OidType,Vrest, [Value|Acc]);
false ->
- error({value, {"illegal "++to_string(OID),[Id,Vrest],Acc}, S})
+ asn1_error(S, {illegal_oid,OidType})
end
end;
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},Value}], [])
- when is_atom(Atom),is_integer(Value) ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,Value]);
-validate_oid(_, S, OID, [{#seqtag{module=Mod,val=Atom},EVRef}], [])
- when is_atom(Atom),is_record(EVRef,'Externalvaluereference') ->
- %% this case when an OBJECT IDENTIFIER value has been parsed as a
- %% SEQUENCE value OTP-4354
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_objectidentifier1(S, OID, [Rec,EVRef]);
-validate_oid(_, S, OID, [#seqtag{module=Mod,val=Atom}|Rest], Acc)
- when is_atom(Atom) ->
- Rec = #'Externalvaluereference'{module=Mod,
- value=Atom},
- validate_oid(true,S, OID, [Rec|Rest],Acc);
-validate_oid(_, S, OID, V, Acc) ->
- error({value, {"illegal "++to_string(OID),V,Acc},S}).
-
-is_object_id(OID,S,ERef=#'Externaltypereference'{}) ->
- {_,OI} = get_referenced_type(S,ERef),
- is_object_id(OID,S,OI#typedef.typespec);
-is_object_id(o_id,_S,'OBJECT IDENTIFIER') ->
- true;
-is_object_id(rel_oid,_S,'RELATIVE-OID') ->
- true;
-is_object_id(_,_S,'INTEGER') ->
- true;
-is_object_id(OID,S,#type{def=Def}) ->
- is_object_id(OID,S,Def);
-is_object_id(_,_S,_) ->
- false.
-
-to_string(o_id) ->
- "OBJECT IDENTIFIER";
-to_string(rel_oid) ->
- "RELATIVE-OID".
+validate_oid(S, OidType, _V, _Acc) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+get_oid_type(S, OidType, #type{def=Def}) ->
+ get_oid_type(S, OidType, Def);
+get_oid_type(S, OidType, #'Externaltypereference'{}=Id) ->
+ {_,OI} = get_referenced_type(S, Id),
+ get_oid_type(S, OidType, OI#typedef.typespec);
+get_oid_type(_S, o_id, 'OBJECT IDENTIFIER'=T) ->
+ T;
+get_oid_type(_S, rel_oid, 'RELATIVE-OID'=T) ->
+ T;
+get_oid_type(_S, _, 'INTEGER'=T) ->
+ T;
+get_oid_type(S, OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
%% ITU-T Rec. X.680 Annex B - D
reserved_objectid('itu-t',[]) -> 0;
@@ -2380,7 +1903,6 @@ reserved_objectid('x',[0,0]) -> 24;
reserved_objectid('y',[0,0]) -> 25;
reserved_objectid('z',[0,0]) -> 26;
-
reserved_objectid(iso,[]) -> 1;
%% arcs below "iso", note that number 1 is not used
reserved_objectid('standard',[1]) -> 0;
@@ -2392,25 +1914,22 @@ reserved_objectid('joint-iso-ccitt',[]) -> 2;
reserved_objectid(_,_) -> false.
-valid_objectid(_OID,[],_Acc) ->
- true;
-valid_objectid(OID,[H|T],Acc) ->
- case valid_objectid(OID, H, Acc) of
- true ->
- valid_objectid(OID,T,[H|Acc]);
- _ ->
- false
- end;
-valid_objectid(o_id,I,[]) when I =:= 0; I =:= 1; I =:= 2 -> true;
-valid_objectid(o_id,_I,[]) -> false;
-valid_objectid(o_id,I,[0]) when I >= 0; I =< 4 -> true;
-valid_objectid(o_id,_I,[0]) -> false;
-valid_objectid(o_id,I,[1]) when I =:= 0; I =:= 2; I =:= 3 -> true;
-valid_objectid(o_id,_I,[1]) -> false;
-valid_objectid(o_id,_I,[2]) -> true;
-valid_objectid(_,_,_) -> true.
-
-convert_external(S=#state{type=Vtype}, Value) ->
+validate_oid_path(_, rel_oid, _) ->
+ ok;
+validate_oid_path(_, o_id, [0,I|_]) when 0 =< I, I =< 9 ->
+ ok;
+validate_oid_path(_, o_id, [1,I|_]) when 0 =< I, I =< 3 ->
+ ok;
+validate_oid_path(_, o_id, [2|_]) ->
+ ok;
+validate_oid_path(S, o_id=OidType, _) ->
+ asn1_error(S, {illegal_oid,OidType}).
+
+%%%
+%%% End of OBJECT IDENTFIER/RELATIVE-OID validation.
+%%%
+
+convert_external(S, Vtype, Value) ->
case Vtype of
#type{tag=[{tag,'UNIVERSAL',8,'IMPLICIT',32}]} ->
%% this is an 'EXTERNAL' (or INSTANCE OF)
@@ -2435,7 +1954,7 @@ to_EXTERNAL1990(S, [{#seqtag{val=identification}=T,
to_EXTERNAL1990(S, Rest, [{T#seqtag{val='indirect-reference'},PCid},
{T#seqtag{val='direct-reference'},TrStx}]);
to_EXTERNAL1990(S, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
to_EXTERNAL1990(S, [V={#seqtag{val='data-value-descriptor'},_}|Rest], Acc) ->
to_EXTERNAL1990(S, Rest, [V|Acc]);
@@ -2443,7 +1962,7 @@ to_EXTERNAL1990(_S, [{#seqtag{val='data-value'}=T,Val}], Acc) ->
Encoding = {T#seqtag{val=encoding},{'CHOICE',{'octet-aligned',Val}}},
lists:reverse([Encoding|Acc]);
to_EXTERNAL1990(S, _, _) ->
- error({value,"illegal value in EXTERNAL type",S}).
+ asn1_error(S, illegal_external_value).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Functions to normalize the default values of SEQUENCE
@@ -2453,17 +1972,16 @@ normalize_value(_,_,mandatory,_) ->
mandatory;
normalize_value(_,_,'OPTIONAL',_) ->
'OPTIONAL';
-normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
- S = S0#state{value=Value},
+normalize_value(S, Type, {'DEFAULT',Value}, NameList) ->
case catch get_canonic_type(S,Type,NameList) of
{'BOOLEAN',CType,_} ->
normalize_boolean(S,Value,CType);
{'INTEGER',CType,_} ->
- normalize_integer(S,Value,CType);
+ normalize_integer(S, Value, CType);
{'BIT STRING',CType,_} ->
normalize_bitstring(S,Value,CType);
- {'OCTET STRING',CType,_} ->
- normalize_octetstring(S0, Value, CType);
+ {'OCTET STRING',_,_} ->
+ normalize_octetstring(S, Value);
{'NULL',_CType,_} ->
%%normalize_null(Value);
'NULL';
@@ -2499,39 +2017,41 @@ normalize_value(S0, Type, {'DEFAULT',Value}, NameList) ->
normalize_value(S,Type,Val,NameList) ->
normalize_value(S,Type,{'DEFAULT',Val},NameList).
-normalize_boolean(S,{Name,Bool},CType) when is_atom(Name) ->
- normalize_boolean(S,Bool,CType);
normalize_boolean(_,true,_) ->
true;
normalize_boolean(_,false,_) ->
false;
normalize_boolean(S,Bool=#'Externalvaluereference'{},CType) ->
get_normalized_value(S,Bool,CType,fun normalize_boolean/3,[]);
-normalize_boolean(_,Other,_) ->
- throw({error,{asn1,{'invalid default value',Other}}}).
+normalize_boolean(S, _, _) ->
+ asn1_error(S, {illegal_value, "BOOLEAN"}).
-normalize_integer(_S,Int,_) when is_integer(Int) ->
+normalize_integer(_S, Int, _) when is_integer(Int) ->
Int;
-normalize_integer(_S,{Name,Int},_) when is_atom(Name),is_integer(Int) ->
- Int;
-normalize_integer(S,{Name,Int=#'Externalvaluereference'{}},
- Type) when is_atom(Name) ->
- normalize_integer(S,Int,Type);
-normalize_integer(S,Int=#'Externalvaluereference'{value=Name},Type) ->
- case Type of
- NNL when is_list(NNL) ->
- case lists:keysearch(Name,1,NNL) of
- {value,{Name,Val}} ->
+normalize_integer(S, #'Externalvaluereference'{value=Name}=Ref, NNL) ->
+ case lists:keyfind(Name, 1, NNL) of
+ {Name,Val} ->
+ Val;
+ false ->
+ try get_referenced_value(S, Ref) of
+ Val when is_integer(Val) ->
Val;
- false ->
- get_normalized_value(S,Int,Type,
- fun normalize_integer/3,[])
- end;
+ _ ->
+ asn1_error(S, illegal_integer_value)
+ catch
+ throw:_ ->
+ asn1_error(S, illegal_integer_value)
+ end
+ end;
+normalize_integer(S, {'ValueFromObject',{object,Obj},FieldNames}, _) ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_integer(Val) ->
+ Val;
_ ->
- get_normalized_value(S,Int,Type,fun normalize_integer/3,[])
+ asn1_error(S, illegal_integer_value)
end;
-normalize_integer(_,Int,_) ->
- exit({'Unknown INTEGER value',Int}).
+normalize_integer(S, _, _) ->
+ asn1_error(S, illegal_integer_value).
%% normalize_bitstring(S, Value, Type) -> bitstring()
%% Convert a literal value for a BIT STRING to an Erlang bit string.
@@ -2543,36 +2063,34 @@ normalize_bitstring(S, Value, Type)->
{bstring,String} when is_list(String) ->
bstring_to_bitstring(String);
#'Externalvaluereference'{} ->
- get_normalized_value(S, Value, Type,
- fun normalize_bitstring/3, []);
- RecList when is_list(RecList) ->
- F = fun(#'Externalvaluereference'{value=Name}) ->
- case lists:keymember(Name, 1, Type) of
- true -> Name;
- false -> throw({error,false})
- end;
- (Name) when is_atom(Name) ->
- %% Already normalized.
- Name;
- (Other) ->
- throw({error,Other})
- end,
- try
- lists:map(F, RecList)
- catch
- throw:{error,Reason} ->
- asn1ct:warning("default value not "
- "compatible with type definition ~p~n",
- [Reason],S,
- "default value not "
- "compatible with type definition"),
- Value
+ Val = get_referenced_value(S, Value),
+ normalize_bitstring(S, Val, Type);
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} ->
+ normalize_bitstring(S, Val, Type);
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end;
+ RecList when is_list(RecList) ->
+ [normalize_bs_item(S, Item, Type) || Item <- RecList];
Bs when is_bitstring(Bs) ->
%% Already normalized.
- Bs
+ Bs;
+ _ ->
+ asn1_error(S, {illegal_value, "BIT STRING"})
end.
+normalize_bs_item(S, #'Externalvaluereference'{value=Name}, Type) ->
+ case lists:keymember(Name, 1, Type) of
+ true -> Name;
+ false -> asn1_error(S, {illegal_value, "BIT STRING"})
+ end;
+normalize_bs_item(_, Atom, _) when is_atom(Atom) ->
+ Atom;
+normalize_bs_item(S, _, _) ->
+ asn1_error(S, {illegal_value, "BIT STRING"}).
+
hstring_to_binary(L) ->
byte_align(hstring_to_bitstring(L)).
@@ -2600,29 +2118,35 @@ hex_to_int(D) when $A =< D, D =< $F -> D - ($A - 10).
%% {bstring,String} each element in String corresponds to one bit in an octet
%% {hstring,String} each element in String corresponds to one byte in an octet
%% #'Externalvaluereference'
-normalize_octetstring(S,Value,CType) ->
+normalize_octetstring(S, Value) ->
case Value of
{bstring,String} ->
bstring_to_binary(String);
{hstring,String} ->
hstring_to_binary(String);
- Rec when is_record(Rec,'Externalvaluereference') ->
- get_normalized_value(S,Value,CType,
- fun normalize_octetstring/3,[]);
- {Name,String} when is_atom(Name) ->
- normalize_octetstring(S,String,CType);
+ #'Externalvaluereference'{} ->
+ case get_referenced_value(S, Value) of
+ String when is_binary(String) ->
+ String;
+ Other ->
+ normalize_octetstring(S, Other)
+ end;
+ {'ValueFromObject',{object,Obj},FieldNames} ->
+ case extract_field(S, Obj, FieldNames) of
+ #valuedef{value=Val} when is_binary(Val) ->
+ Val;
+ _ ->
+ asn1_error(S, illegal_octet_string_value)
+ end;
_ ->
- Item = S#state.value,
- asn1_error(S, Item, illegal_octet_string_value)
+ asn1_error(S, illegal_octet_string_value)
end.
normalize_objectidentifier(S, Value) ->
- {ok,Val} = validate_objectidentifier(S, o_id, Value, []),
- Val.
+ validate_objectidentifier(S, o_id, Value).
-normalize_relative_oid(S,Value) ->
- {ok,Val} = validate_objectidentifier(S, rel_oid, Value, []),
- Val.
+normalize_relative_oid(S, Value) ->
+ validate_objectidentifier(S, rel_oid, Value).
normalize_objectdescriptor(Value) ->
Value.
@@ -2644,40 +2168,22 @@ lookup_enum_value(S, Id, NNL) when is_atom(Id) ->
{_,_}=Ret ->
Ret;
false ->
- asn1_error(S, S#state.value, {undefined,Id})
+ asn1_error(S, {undefined,Id})
end.
-normalize_choice(S,{'CHOICE',{C,V}},CType,NameList) when is_atom(C) ->
- case catch lists:keysearch(C,#'ComponentType'.name,CType) of
- {value,#'ComponentType'{typespec=CT,name=Name}} ->
- {C,normalize_value(S,CT,{'DEFAULT',V},
- [Name|NameList])};
- Other ->
- asn1ct:warning("Wrong format of type/value ~p/~p~n",[Other,V],S,
- "Wrong format of type/value"),
- {C,V}
+normalize_choice(S, {'CHOICE',{C,V}}, CType, NameList)
+ when is_atom(C) ->
+ case lists:keyfind(C, #'ComponentType'.name, CType) of
+ #'ComponentType'{typespec=CT,name=Name} ->
+ {C,normalize_value(S, CT, {'DEFAULT',V}, [Name|NameList])};
+ false ->
+ asn1_error(S, {illegal_id,C})
end;
-normalize_choice(S,{'DEFAULT',ValueList},CType,NameList) when is_list(ValueList) ->
- lists:map(fun(X)-> normalize_choice(S,X,CType,NameList) end, ValueList);
-normalize_choice(S,Val=#'Externalvaluereference'{},CType,NameList) ->
- {M,#valuedef{value=V}}=get_referenced_type(S,Val),
- normalize_choice(update_state(S,M),{'CHOICE',V},CType,NameList);
-% get_normalized_value(S,Val,CType,fun normalize_choice/4,[NameList]);
-normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
+normalize_choice(S,CV={Name,_ChoiceVal},CType,NameList)
when is_atom(Name) ->
-% normalize_choice(S,ChoiceVal,CType,NameList).
normalize_choice(S,{'CHOICE',CV},CType,NameList);
-normalize_choice(_S,V,_CType,_NameList) ->
- exit({error,{bad_choice_value,V}}).
-
-%% normalize_choice(NameList,S,CVal = {'CHOICE',{_,_}},CType,_) ->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CVal={'DEFAULT',VL},CType,_) when is_list(VL)->
-%% normalize_choice(S,CVal,CType,NameList);
-%% normalize_choice(NameList,S,CV={Name,_CV},CType,_) when is_atom(Name)->
-%% normalize_choice(S,{'CHOICE',CV},CType,NameList);
-%% normalize_choice(_,_S,V,_,_) ->
-%% V.
+normalize_choice(S, V, _CType, _NameList) ->
+ asn1_error(S, {illegal_id, error_value(V)}).
normalize_sequence(S,Value,Components,NameList)
when is_tuple(Components) ->
@@ -2732,12 +2238,9 @@ normalized_record(SorS,S,Value,Components,NameList) ->
Value;
_ ->
NoComps = length(Components),
- case normalize_seq_or_set(SorS,S,Value,Components,NameList,[]) of
- ListOfVals when length(ListOfVals) == NoComps ->
- list_to_tuple([NewName|ListOfVals]);
- _ ->
- error({type,{illegal,default,value,Value},S})
- end
+ ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]),
+ NoComps = length(ListOfVals), %% Assert
+ list_to_tuple([NewName|ListOfVals])
end.
is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
case get_referenced_type(S,V) of
@@ -2750,10 +2253,11 @@ is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) ->
is_record_normalized(_,_,_,_) ->
false.
-normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
+normalize_seq_or_set(SorS, S,
+ [{#seqtag{val=Cname},V}|Vs],
[#'ComponentType'{name=Cname,typespec=TS}|Cs],
NameList, Acc) ->
- NewNameList =
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
@@ -2761,24 +2265,26 @@ normalize_seq_or_set(SorS, S, [{#seqtag{val=Cname},V}|Vs],
end,
NVal = normalize_value(S,TS,{'DEFAULT',V},NewNameList),
normalize_seq_or_set(SorS,S,Vs,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
[#'ComponentType'{prop='OPTIONAL'}|Cs],
- NameList,Acc) ->
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[asn1_NOVALUE|Acc]);
-normalize_seq_or_set(SorS,S,Values=[{_Cname1,_V}|_Vs],
- [#'ComponentType'{name=Cname2,typespec=TS,
- prop={'DEFAULT',Value}}|Cs],
- NameList,Acc) ->
- NewNameList =
+normalize_seq_or_set(SorS, S,
+ Values=[{#seqtag{val=Cname0},_V}|_Vs],
+ [#'ComponentType'{name=Cname,typespec=TS,
+ prop={'DEFAULT',Value}}|Cs],
+ NameList, Acc) ->
+ verify_valid_component(S, Cname0, Cs),
+ NewNameList =
case TS#type.def of
#'Externaltypereference'{type=TName} ->
[TName];
- _ -> [Cname2|NameList]
+ _ -> [Cname|NameList]
end,
NVal = normalize_value(S,TS,{'DEFAULT',Value},NewNameList),
normalize_seq_or_set(SorS,S,Values,Cs,NameList,[NVal|Acc]);
-normalize_seq_or_set(_SorS,_S,[],[],_,Acc) ->
- lists:reverse(Acc);
%% If default value is {} ComponentTypes in SEQUENCE are marked DEFAULT
%% or OPTIONAL (or the type is defined SEQUENCE{}, which is handled by
%% the previous case).
@@ -2801,9 +2307,23 @@ normalize_seq_or_set(SorS,S,Value=#'Externalvaluereference'{},
Cs,NameList,Acc) ->
get_normalized_value(S,Value,Cs,fun normalize_seq_or_set/6,
[SorS,NameList,Acc]);
-normalize_seq_or_set(_SorS,S,V,_,_,_) ->
- error({type,{illegal,default,value,V},S}).
-
+normalize_seq_or_set(_SorS, _S, [], [], _, Acc) ->
+ lists:reverse(Acc);
+normalize_seq_or_set(_SorS, S, V, Cs, _, _) ->
+ case V of
+ [{#seqtag{val=Name},_}|_] ->
+ asn1_error(S, {illegal_id,error_value(Name)});
+ [] ->
+ [#'ComponentType'{name=Name}|_] = Cs,
+ asn1_error(S, {missing_id,error_value(Name)})
+ end.
+
+verify_valid_component(S, Name, Cs) ->
+ case lists:keyfind(Name, #'ComponentType'.name, Cs) of
+ false -> asn1_error(S, {illegal_id,error_value(Name)});
+ #'ComponentType'{} -> ok
+ end.
+
normalize_seqof(S,Value,Type,NameList) ->
normalize_s_of('SEQUENCE OF',S,Value,Type,NameList).
@@ -2859,10 +2379,7 @@ normalize_restrictedstring(_S,CString,_) when is_list(CString) ->
%% definedvalue case or argument in a parameterized type
normalize_restrictedstring(S,ERef,CType) when is_record(ERef,'Externalvaluereference') ->
get_normalized_value(S,ERef,CType,
- fun normalize_restrictedstring/3,[]);
-%%
-normalize_restrictedstring(S,{Name,Val},CType) when is_atom(Name) ->
- normalize_restrictedstring(S,Val,CType).
+ fun normalize_restrictedstring/3,[]).
normalize_objectclassfieldvalue(S,{opentypefieldvalue,Type,Value},NameList) ->
%% An open type has per definition no type. Thus should the type
@@ -2910,6 +2427,8 @@ call_Func(S,Val,Type,Func,ArgList) ->
get_canonic_type(S,Type,NameList) ->
{InnerType,NewType,NewNameList} =
case Type#type.def of
+ 'INTEGER'=Name ->
+ {Name,[],NameList};
Name when is_atom(Name) ->
{Name,Type,NameList};
Ref when is_record(Ref,'Externaltypereference') ->
@@ -2964,8 +2483,8 @@ check_formal_parameter(_, {_,_}) ->
ok;
check_formal_parameter(_, #'Externaltypereference'{}) ->
ok;
-check_formal_parameter(S, #'Externalvaluereference'{value=Name}=Ref) ->
- asn1_error(S, Ref, {illegal_typereference,Name}).
+check_formal_parameter(S, #'Externalvaluereference'{value=Name}) ->
+ asn1_error(S, {illegal_typereference,Name}).
% check_type(S,Type,ObjSpec={{objectclassname,_},_}) ->
% check_class(S,ObjSpec);
@@ -2977,7 +2496,7 @@ check_type(_S,Type,Ts) when is_record(Type,typedef),
Ts;
check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{Def,Tag,Constr,IsInlined} =
- case match_parameters(S,Ts#type.def,S#state.parameters) of
+ case match_parameter(S, Ts#type.def) of
#type{tag=PTag,constraint=_Ctmp,def=Dtmp,inlined=Inl} ->
{Dtmp,merge_tags(Ts#type.tag,PTag),Ts#type.constraint,Inl};
#typedef{typespec=#type{tag=PTag,def=Dtmp,inlined=Inl}} ->
@@ -2989,16 +2508,16 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
inlined=IsInlined},
TestFun =
fun(Tref) ->
- MaybeChoice = get_non_typedef(S, Tref),
+ {_, MaybeChoice} = get_referenced_type(S, Tref, true),
case catch((MaybeChoice#typedef.typespec)#type.def) of
{'CHOICE',_} ->
- maybe_illicit_implicit_tag(choice,Tag);
+ maybe_illicit_implicit_tag(S, choice, Tag);
'ANY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ANY DEFINED BY' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
'ASN1_OPEN_TYPE' ->
- maybe_illicit_implicit_tag(open_type,Tag);
+ maybe_illicit_implicit_tag(S, open_type, Tag);
_ ->
Tag
end
@@ -3007,7 +2526,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
case Def of
Ext when is_record(Ext,'Externaltypereference') ->
{RefMod,RefTypeDef,IsParamDef} =
- case get_referenced_type(S,Ext) of
+ case get_referenced_type(S, Ext) of
{undefined,TmpTDef} -> %% A parameter
{get(top_module),TmpTDef,true};
{TmpRefMod,TmpRefDef} ->
@@ -3031,7 +2550,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewS = S#state{mname=RefMod,
module=load_asn1_module(S,RefMod),
tname=get_datastr_name(NewRefTypeDef1),
- type=NewRefTypeDef1,
abscomppath=[],recordtopname=[]},
RefType1 =
check_type(NewS,RefTypeDef,RefTypeDef#typedef.typespec),
@@ -3051,18 +2569,17 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Key);
_ -> ok
end,
+ Pos = Ext#'Externaltypereference'.pos,
{RefType1,#'Externaltypereference'{module=RefMod,
+ pos=Pos,
type=TmpName}}
end,
case asn1ct_gen:prim_bif(asn1ct_gen:get_inner(RefType#type.def)) of
true ->
%% Here we expand to a built in type and inline it
- NewS2 = S#state{type=#typedef{typespec=RefType}},
- NewC =
- constraint_merge(NewS2,
- check_constraints(NewS2,Constr)++
- RefType#type.constraint),
+ NewC = check_constraints(S, RefType, Constr ++
+ RefType#type.constraint),
TempNewDef#newt{
type = RefType#type.def,
tag = merge_tags(Ct,RefType#type.tag),
@@ -3073,19 +2590,13 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
NewExt = ExtRef#'Externaltypereference'{module=merged_mod(S,RefMod,Ext)},
TempNewDef#newt{
type = check_externaltypereference(S,NewExt),
- tag = case S#state.erule of
- ber ->
- merge_tags(Ct,RefType#type.tag);
- _ ->
- Ct
- end
- }
+ tag = merge_tags(Ct,RefType#type.tag)}
end;
'ANY' ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{'ANY_DEFINED_BY',_} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
+ Ct = maybe_illicit_implicit_tag(S, open_type, Tag),
TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
'INTEGER' ->
TempNewDef#newt{tag=
@@ -3132,7 +2643,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
{'ENUMERATED',NamedNumberList} ->
TempNewDef#newt{type=
{'ENUMERATED',
- check_enumerated(S,NamedNumberList,Constr)},
+ check_enumerated(S, NamedNumberList)},
tag=
merge_tags(Tag,?TAG_PRIMITIVE(?N_ENUMERATED)),
constraint=[]};
@@ -3235,7 +2746,7 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SEQUENCE))};
{'CHOICE',Components} ->
- Ct = maybe_illicit_implicit_tag(choice,Tag),
+ Ct = maybe_illicit_implicit_tag(S, choice, Tag),
TempNewDef#newt{type={'CHOICE',check_choice(S,Type,Components)},tag=Ct};
Set when is_record(Set,'SET') ->
RecordName=
@@ -3258,12 +2769,6 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{type={'SET OF',check_setof(S,Type,Components)},
tag=
merge_tags(Tag,?TAG_CONSTRUCTED(?N_SET))};
- %% This is a temporary hack until the full Information Obj Spec
- %% in X.681 is supported
- {#'Externaltypereference'{type='TYPE-IDENTIFIER'},
- [{typefieldreference,_,'Type'}]} ->
- Ct=maybe_illicit_implicit_tag(open_type,Tag),
- TempNewDef#newt{type='ASN1_OPEN_TYPE',tag=Ct};
{pt,Ptype,ParaList} ->
%% Ptype might be a parameterized - type, object set or
@@ -3271,18 +2776,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
%% calling function.
{_RefMod,Ptypedef} = get_referenced_type(S,Ptype),
notify_if_not_ptype(S,Ptypedef),
- NewParaList =
- [match_parameters(S,TmpParam,S#state.parameters)||
- TmpParam <- ParaList],
+ NewParaList = match_parameters(S, ParaList),
Instance = instantiate_ptype(S,Ptypedef,NewParaList),
TempNewDef#newt{type=Instance#type.def,
tag=merge_tags(Tag,Instance#type.tag),
constraint=Instance#type.constraint,
inlined=yes};
- OCFT=#'ObjectClassFieldType'{classname=ClRef} ->
+ #'ObjectClassFieldType'{classname=ClRef0}=OCFT0 ->
%% this case occures in a SEQUENCE when
%% the type of the component is a ObjectClassFieldType
+ ClRef = match_parameter(S, ClRef0),
+ OCFT = OCFT0#'ObjectClassFieldType'{classname=ClRef},
ClassSpec = check_class(S,ClRef),
NewTypeDef =
maybe_open_type(S,ClassSpec,
@@ -3292,16 +2797,18 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
Ct =
case is_open_type(NewTypeDef) of
true ->
- maybe_illicit_implicit_tag(open_type,MergedTag);
+ maybe_illicit_implicit_tag(S, open_type, MergedTag);
_ ->
MergedTag
end,
case TopName of
[] when Type#typedef.name =/= undefined ->
%% This is a top-level type.
- #type{def=Simplified} =
- simplify_type(#type{def=NewTypeDef}),
- TempNewDef#newt{type=Simplified,tag=Ct};
+ #type{constraint=C,def=Simplified} =
+ simplify_type(#type{def=NewTypeDef,
+ constraint=Constr}),
+ TempNewDef#newt{type=Simplified,tag=Ct,
+ constraint=C};
_ ->
TempNewDef#newt{type=NewTypeDef,tag=Ct}
end;
@@ -3311,33 +2818,21 @@ check_type(S=#state{recordtopname=TopName},Type,Ts) when is_record(Ts,type) ->
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- {valueset,Vtype} ->
- TempNewDef#newt{type={valueset,check_type(S,Type,Vtype)}};
{'SelectionType',Name,T} ->
CheckedT = check_selectiontype(S,Name,T),
TempNewDef#newt{tag=merge_tags(Tag,CheckedT#type.tag),
type=CheckedT#type.def};
- Other ->
- exit({'cant check' ,Other})
+ 'ASN1_OPEN_TYPE' ->
+ TempNewDef
end,
#newt{type=TDef,tag=NewTags,constraint=NewConstr,inlined=Inlined} = NewDef,
Ts#type{def=TDef,
inlined=Inlined,
- constraint=check_constraints(S, NewConstr),
+ constraint=check_constraints(S, #type{def=TDef}, NewConstr),
tag=lists:map(fun(#tag{type={default,TTx}}=TempTag) ->
TempTag#tag{type=TTx};
(Other) -> Other
- end, NewTags)};
-check_type(_S,Type,Ts) ->
- exit({error,{asn1,internal_error,Type,Ts}}).
-
-get_non_typedef(S, Tref0) ->
- case get_referenced_type(S, Tref0) of
- {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=Tref}}} ->
- get_non_typedef(S, Tref);
- {_,Type} ->
- Type
- end.
+ end, NewTags)}.
%%
@@ -3353,10 +2848,11 @@ simplify_comp(#'ComponentType'{typespec=Type0}=C) ->
C#'ComponentType'{typespec=Type};
simplify_comp(Other) -> Other.
-simplify_type(#type{tag=Tag,def=Inner}=T) ->
+simplify_type(#type{tag=Tag,def=Inner,constraint=Constr0}=T) ->
case Inner of
- #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}} ->
- Type#type{tag=Tag};
+ #'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}=OCFT ->
+ Constr = [{ocft,OCFT}|Type#type.constraint++Constr0],
+ Type#type{tag=Tag,constraint=Constr};
_ ->
T
end.
@@ -3389,29 +2885,22 @@ get_innertag(_S,#'ObjectClassFieldType'{type=Type}) ->
_ -> []
end.
-get_type_from_object(S,Object,TypeField)
- when is_record(Object,'Externaltypereference');
- is_record(Object,'Externalvaluereference') ->
- {_,ObjectDef} = get_referenced_type(S,Object),
- ObjSpec = check_object(S,ObjectDef,ObjectDef#typedef.typespec),
- get_fieldname_element(S,ObjectDef#typedef{typespec=ObjSpec},TypeField).
-
%% get_class_def(S, Type) -> #classdef{} | 'none'.
get_class_def(S, #typedef{typespec=#type{def=#'Externaltypereference'{}=Eref}}) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(S, #'Externaltypereference'{}=Eref) ->
- {_,NextDef} = get_referenced_type(S, Eref),
+ {_,NextDef} = get_referenced_type(S, Eref, true),
get_class_def(S, NextDef);
get_class_def(_S, #classdef{}=CD) ->
CD;
get_class_def(_S, _) ->
none.
-maybe_illicit_implicit_tag(Kind,Tag) ->
+maybe_illicit_implicit_tag(S, Kind, Tag) ->
case Tag of
[#tag{type='IMPLICIT'}|_T] ->
- throw({error,{asn1,{implicit_tag_before,Kind}}});
+ asn1_error(S, {implicit_tag_before,Kind});
[ChTag = #tag{type={default,_}}|T] ->
case Kind of
open_type ->
@@ -3438,19 +2927,24 @@ merged_mod(S,RefMod,Ext) ->
%% any UNIQUE field, so that a component relation constraint cannot specify
%% the type of a typefield, return 'ASN1_OPEN_TYPE'.
%%
-maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
- OCFT=#'ObjectClassFieldType'{fieldname=FieldRefList},
+maybe_open_type(_, _, #'ObjectClassFieldType'{fieldname={_,_}}=OCFT, _) ->
+ %% Already converted.
+ OCFT;
+maybe_open_type(S, #objectclass{fields=Fs}=ClassSpec,
+ #'ObjectClassFieldType'{fieldname=FieldRefList}=OCFT,
Constr) ->
- Type = get_ObjectClassFieldType(S,Fs,FieldRefList),
- FieldNames=get_referenced_fieldname(FieldRefList),
- case last_fieldname(FieldRefList) of
+ Type = get_OCFType(S, Fs, FieldRefList),
+ FieldNames = get_referenced_fieldname(FieldRefList),
+ case lists:last(FieldRefList) of
{valuefieldreference,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type=Type};
{typefieldreference,_} ->
- case {catch get_unique_fieldname(S,#classdef{typespec=ClassSpec}),
- asn1ct_gen:get_constraint(Constr,componentrelation)}of
- {Tuple,_} when tuple_size(Tuple) =:= 3 ->
+ %% Note: The constraints have not been checked yet,
+ %% so we must use a special lookup routine.
+ case {get_unique_fieldname(S, #classdef{typespec=ClassSpec}),
+ get_componentrelation(Constr)} of
+ {no_unique,_} ->
OCFT#'ObjectClassFieldType'{fieldname=FieldNames,
type='ASN1_OPEN_TYPE'};
{_,no} ->
@@ -3462,16 +2956,12 @@ maybe_open_type(S,ClassSpec=#objectclass{fields=Fs},
end
end.
-last_fieldname(FieldRefList) when is_list(FieldRefList) ->
- lists:last(FieldRefList);
-last_fieldname({FieldName,_}) when is_atom(FieldName) ->
- [A|_] = atom_to_list(FieldName),
- case is_lowercase(A) of
- true ->
- {valuefieldreference,FieldName};
- _ ->
- {typefieldreference,FieldName}
- end.
+get_componentrelation([{element_set,{componentrelation,_,_}=Cr,none}|_]) ->
+ Cr;
+get_componentrelation([_|T]) ->
+ get_componentrelation(T);
+get_componentrelation([]) ->
+ no.
is_open_type(#'ObjectClassFieldType'{type='ASN1_OPEN_TYPE'}) ->
true;
@@ -3510,35 +3000,19 @@ notify_if_not_ptype(S,#pobjectsetdef{class=Cl}) ->
_ ->
throw(pobjectsetdef)
end;
-notify_if_not_ptype(_S,PT) ->
- throw({error,{"supposed to be a parameterized type",PT}}).
-% fix me
+notify_if_not_ptype(S, PT) ->
+ asn1_error(S, {param_bad_type, error_value(PT)}).
+
instantiate_ptype(S,Ptypedef,ParaList) ->
#ptypedef{args=Args,typespec=Type} = Ptypedef,
NewType = check_ptype(S,Ptypedef,Type#type{inlined=yes}),
MatchedArgs = match_args(S,Args, ParaList, []),
OldArgs = S#state.parameters,
- NewS = S#state{type=NewType,parameters=MatchedArgs++OldArgs,abscomppath=[]},
-%% NewS = S#state{type=NewType,parameters=MatchedArgs,abscomppath=[]},
+ NewS = S#state{parameters=MatchedArgs++OldArgs,abscomppath=[]},
check_type(NewS, Ptypedef#ptypedef{typespec=NewType}, NewType).
-get_datastr_name(#typedef{name=N}) ->
- N;
-get_datastr_name(#classdef{name=N}) ->
- N;
-get_datastr_name(#valuedef{name=N}) ->
- N;
-get_datastr_name(#ptypedef{name=N}) ->
- N;
-get_datastr_name(#pvaluedef{name=N}) ->
- N;
-get_datastr_name(#pvaluesetdef{name=N}) ->
- N;
-get_datastr_name(#pobjectdef{name=N}) ->
- N;
-get_datastr_name(#pobjectsetdef{name=N}) ->
- N.
-
+get_datastr_name(Type) ->
+ asn1ct:get_name_of_def(Type).
get_pt_args(#ptypedef{args=Args}) ->
Args;
@@ -3606,8 +3080,8 @@ match_args(S,FA = [FormArg|Ft], AA = [ActArg|At], Acc) ->
end;
match_args(_S,[], [], Acc) ->
lists:reverse(Acc);
-match_args(_,_, _, _) ->
- throw({error,{asn1,{wrong_number_of_arguments}}}).
+match_args(S, _, _, _) ->
+ asn1_error(S, param_wrong_number_of_arguments).
%%%%%%%%%%%%%%%%%
%% categorize_arg(S,FormalArg,ActualArg) -> {FormalArg,CatgorizedActualArg}
@@ -3652,11 +3126,6 @@ parameter_name_style(#'Externaltypereference'{}) ->
parameter_name_style(#'Externalvaluereference'{}) ->
beginning_lowercase.
-is_lowercase(X) when X >= $A,X =< $W ->
- false;
-is_lowercase(_) ->
- true.
-
%% categorize(Parameter) -> CategorizedParameter
%% If Parameter has an abstract syntax of another category than
%% Category, transform it to a known syntax.
@@ -3705,725 +3174,503 @@ parse_objectset(Set) ->
Set.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% check_constraints/2
-%%
-check_constraints(S,C) when is_list(C) ->
- check_constraints(S, C, []).
-
-resolv_tuple_or_list(S,List) when is_list(List) ->
- lists:map(fun(X)->resolv_value(S,X) end, List);
-resolv_tuple_or_list(S,{Lb,Ub}) ->
- {resolv_value(S,Lb),resolv_value(S,Ub)}.
-
-%%%-----------------------------------------
-%% If the constraint value is a defined value the valuename
-%% is replaced by the actual value
%%
-resolv_value(S,Val) ->
- Id = match_parameters(S,Val, S#state.parameters),
- resolv_value1(S,Id).
+%% Check and simplify constraints.
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-resolv_value1(S, ERef = #'Externalvaluereference'{value=Name}) ->
- case catch resolve_namednumber(S, S#state.type, Name) of
- V when is_integer(V) ->
- V;
- _ ->
- case get_referenced_type(S,ERef) of
- {Err,_Reason} when Err == error; Err == 'EXIT' ->
- throw({error,{asn1,{undefined_type_or_value,
- Name}}});
- {_M,VDef} ->
- resolv_value1(S,VDef)
- end
- end;
-resolv_value1(S, {gt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int + 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S, {lt,V}) ->
- case resolv_value1(S, V) of
- Int when is_integer(Int) ->
- Int - 1;
- Other ->
- throw({error,{asn1,{not_integer_value,Other}}})
- end;
-resolv_value1(S,{'ValueFromObject',{object,Object},[{valuefieldreference,
- FieldName}]}) ->
- %% FieldName can hold either a fixed-type value or a variable-type value
- %% Object is a DefinedObject, i.e. a #'Externaltypereference'
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(_,#valuedef{checked=true,value=V}) ->
- V;
-resolv_value1(S,#valuedef{type=_T,
- value={'ValueFromObject',{object,Object},
- [{valuefieldreference,
- FieldName}]}}) ->
- resolve_value_from_object(S,Object,FieldName);
-resolv_value1(S,VDef = #valuedef{}) ->
- #valuedef{value=Val} = check_value(S,VDef),
- Val;
-resolv_value1(_,V) ->
- V.
-resolve_value_from_object(S,Object,FieldName) ->
- {_,ObjTDef} = get_referenced_type(S,Object),
- TS = check_object(S,ObjTDef,ObjTDef#typedef.typespec),
- {_,_,Components} = TS#'Object'.def,
- case lists:keysearch(FieldName,1,Components) of
- {value,{_,#valuedef{value=Val}}} ->
- Val;
- _ ->
- error({value,"illegal value in constraint",S})
+check_constraints(_S, _HostType, []) ->
+ [];
+check_constraints(S, HostType0, [_|_]=Cs0) ->
+ HostType = get_real_host_type(HostType0, Cs0),
+ Cs1 = top_level_intersections(Cs0),
+ Cs2 = [coalesce_constraints(C) || C <- Cs1],
+ {_,Cs3} = filter_extensions(Cs2),
+ Cs = simplify_element_sets(S, HostType, Cs3),
+ finish_constraints(Cs).
+
+get_real_host_type(HostType, Cs) ->
+ case lists:keyfind(ocft, 1, Cs) of
+ false -> HostType;
+ {_,OCFT} -> HostType#type{def=OCFT}
end.
+top_level_intersections([{element_set,{intersection,_,_}=C,none}]) ->
+ top_level_intersections_1(C);
+top_level_intersections(Cs) ->
+ Cs.
+
+top_level_intersections_1({intersection,A,B}) ->
+ [{element_set,A,none}|top_level_intersections_1(B)];
+top_level_intersections_1(Other) ->
+ [{element_set,Other,none}].
+
+coalesce_constraints({element_set,
+ {Tag,{element_set,A,_}},
+ {Tag,{element_set,B,_}}}) ->
+ %% (SIZE (C1), ..., (SIZE (C2)) => (SIZE (C1, ..., C2))
+ {element_set,{Tag,{element_set,A,B}},none};
+coalesce_constraints(Other) ->
+ Other.
+
+%% Remove all outermost extensions except the last.
+
+filter_extensions([H0|T0]) ->
+ case filter_extensions(T0) of
+ {true,T} ->
+ H = remove_extension(H0),
+ {true,[H|T]};
+ {false,T} ->
+ {any_extension(H0),[H0|T]}
+ end;
+filter_extensions([]) ->
+ {false,[]}.
-resolve_namednumber(S,#typedef{typespec=Type},Name) ->
- case Type#type.def of
- {'ENUMERATED',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
- {'INTEGER',NameList} ->
- resolve_namednumber_1(S, Name, NameList, Type);
+remove_extension({element_set,Root,_}) ->
+ {element_set,remove_extension(Root),none};
+remove_extension(Tuple) when is_tuple(Tuple) ->
+ L = [remove_extension(El) || El <- tuple_to_list(Tuple)],
+ list_to_tuple(L);
+remove_extension(Other) -> Other.
+
+any_extension({element_set,_,Ext}) when Ext =/= none ->
+ true;
+any_extension(Tuple) when is_tuple(Tuple) ->
+ any_extension_tuple(1, Tuple);
+any_extension(_) -> false.
+
+any_extension_tuple(I, T) when I =< tuple_size(T) ->
+ any_extension(element(I, T)) orelse any_extension_tuple(I+1, T);
+any_extension_tuple(_, _) -> false.
+
+simplify_element_sets(S, HostType, [{element_set,R0,E0}|T0]) ->
+ R1 = simplify_element_set(S, HostType, R0),
+ E1 = simplify_element_set(S, HostType, E0),
+ case simplify_element_sets(S, HostType, T0) of
+ [{element_set,R2,E2}] ->
+ [{element_set,cs_intersection(S, R1, R2),
+ cs_intersection(S, E1, E2)}];
+ L when is_list(L) ->
+ [{element_set,R1,E1}|L]
+ end;
+simplify_element_sets(S, HostType, [H|T]) ->
+ [H|simplify_element_sets(S, HostType, T)];
+simplify_element_sets(_, _, []) ->
+ [].
+
+simplify_element_set(_S, _HostType, empty) ->
+ {set,[]};
+simplify_element_set(S, HostType, {'SingleValue',Vs0}) when is_list(Vs0) ->
+ Vs1 = [resolve_value(S, HostType, V) || V <- Vs0],
+ Vs = make_constr_set_vs(Vs1),
+ simplify_element_set(S, HostType, Vs);
+simplify_element_set(S, HostType, {'SingleValue',V0}) ->
+ V1 = resolve_value(S, HostType, V0),
+ V = {set,[{range,V1,V1}]},
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ValueRange',{Lb0,Ub0}}) ->
+ Lb = resolve_value(S, HostType, Lb0),
+ Ub = resolve_value(S, HostType, Ub0),
+ V = make_constr_set(S, Lb, Ub),
+ simplify_element_set(S, HostType, V);
+simplify_element_set(S, HostType, {'ALL-EXCEPT',Set0}) ->
+ Set = simplify_element_set(S, HostType, Set0),
+ {'ALL-EXCEPT',Set};
+simplify_element_set(S, HostType, {intersection,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_intersection(S, A, B);
+simplify_element_set(S, HostType, {union,A0,B0}) ->
+ A = simplify_element_set(S, HostType, A0),
+ B = simplify_element_set(S, HostType, B0),
+ cs_union(S, A, B);
+simplify_element_set(S, HostType, {simpletable,{element_set,Type,_}}) ->
+ check_simpletable(S, HostType, Type);
+simplify_element_set(S, _, {componentrelation,R,Id}) ->
+ check_componentrelation(S, R, Id);
+simplify_element_set(S, HostType, {Tag,{element_set,_,_}=El0}) ->
+ [El1] = simplify_element_sets(S, HostType, [El0]),
+ {Tag,El1};
+simplify_element_set(S, HostType, #type{}=Type) ->
+ simplify_element_set_type(S, HostType, Type);
+simplify_element_set(_, _, C) ->
+ C.
+
+simplify_element_set_type(S, HostType, #type{def=Def0}=Type0) ->
+ #'Externaltypereference'{} = Def0, %Assertion.
+ case get_referenced_type(S, Def0) of
+ {_,#valuedef{checked=false,value={valueset,Vs0}}} ->
+ [Vs1] = simplify_element_sets(S, HostType, [Vs0]),
+ case Vs1 of
+ {element_set,Set,none} ->
+ Set;
+ {element_set,Set,{set,[]}} ->
+ Set
+ end;
+ {_,{valueset,#type{def=#'Externaltypereference'{}}=Type}} ->
+ simplify_element_set_type(S, HostType, Type);
_ ->
- not_enumerated
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ %% Open type.
+ #type{def=Def} = check_type(S, HostType, Type0),
+ Def;
+ _ ->
+ #type{constraint=Cs} = check_type(S, HostType, Type0),
+ C = convert_back(Cs),
+ simplify_element_set(S, HostType, C)
+ end
end.
-resolve_namednumber_1(S, Name, NameList, Type) ->
- NamedNumberList = check_enumerated(S, NameList, Type#type.constraint),
- {_,N} = lookup_enum_value(S, Name, NamedNumberList),
- N.
-
-check_constraints(S,[{'ContainedSubtype',Type} | Rest], Acc) ->
- {RefMod,CTDef} = get_referenced_type(S,Type#type.def),
- NewS = S#state{module=load_asn1_module(S,RefMod),mname=RefMod,
- type=CTDef,tname=get_datastr_name(CTDef)},
- CType = check_type(NewS,S#state.tname,CTDef#typedef.typespec),
- check_constraints(S,Rest,CType#type.constraint ++ Acc);
-check_constraints(S,[C | Rest], Acc) ->
- check_constraints(S,Rest,[check_constraint(S,C) | Acc]);
-check_constraints(S,[],Acc) ->
- constraint_merge(S,Acc).
-
-
-range_check(F={FixV,FixV}) ->
-% FixV;
- F;
-range_check(VR={Lb,Ub}) when Lb < Ub ->
- VR;
-range_check(Err={_,_}) ->
- throw({error,{asn1,{illegal_size_constraint,Err}}});
-range_check(Value) ->
- Value.
-
-check_constraint(S,Ext) when is_record(Ext,'Externaltypereference') ->
- check_externaltypereference(S,Ext);
-
-
-check_constraint(S,{'SizeConstraint',{Lb,Ub}})
- when is_list(Lb); tuple_size(Lb) =:= 2 ->
- NewLb = range_check(resolv_tuple_or_list(S,Lb)),
- NewUb = range_check(resolv_tuple_or_list(S,Ub)),
- {'SizeConstraint',{NewLb,NewUb}};
-check_constraint(S,{'SizeConstraint',{Lb,Ub}}) ->
- case {resolv_value(S,Lb),resolv_value(S,Ub)} of
- {FixV,FixV} ->
- {'SizeConstraint',FixV};
- {Low,High} when Low < High ->
- {'SizeConstraint',{Low,High}};
- Err ->
- throw({error,{asn1,{illegal_size_constraint,Err}}})
- end;
-check_constraint(S,{'SizeConstraint',Lb}) ->
- {'SizeConstraint',resolv_value(S,Lb)};
+convert_back([H1,H2|T]) ->
+ {intersection,H1,convert_back([H2|T])};
+convert_back([H]) ->
+ H;
+convert_back([]) ->
+ none.
-check_constraint(S,{'SingleValue', L}) when is_list(L) ->
- F = fun(A) -> resolv_value(S,A) end,
- {'SingleValue',lists:sort(lists:map(F,L))};
-
-check_constraint(S,{'SingleValue', V}) when is_integer(V) ->
- Val = resolv_value(S,V),
-%% [{'SingleValue',Val},{'ValueRange',{Val,Val}}]; % Why adding value range?
- {'SingleValue',Val};
-check_constraint(S,{'SingleValue', V}) ->
- {'SingleValue',resolv_value(S,V)};
-
-check_constraint(S,{'ValueRange', {Lb, Ub}}) ->
- {'ValueRange',{resolv_value(S,Lb),resolv_value(S,Ub)}};
-%% In case of a constraint with extension marks like (1..Ub,...)
-check_constraint(S,{VR={'ValueRange', {_Lb, _Ub}},Rest}) ->
- {check_constraint(S,VR),Rest};
-check_constraint(_S,{'PermittedAlphabet',PA}) ->
- {'PermittedAlphabet',permitted_alphabet_cnstr(PA)};
-
-check_constraint(S,{valueset,Type}) ->
- {valueset,check_type(S,S#state.tname,Type)};
-
-check_constraint(_S,ST={simpletable,Type}) when is_atom(Type) ->
- %% An already checked constraint
- ST;
-check_constraint(S,{simpletable,Type}) ->
+check_simpletable(S, HostType, Type) ->
+ case HostType of
+ #type{def=#'ObjectClassFieldType'{}} ->
+ ok;
+ _ ->
+ %% Table constraints may only be applied to
+ %% CLASS.&field constructs.
+ asn1_error(S, illegal_table_constraint)
+ end,
Def = case Type of
#type{def=D} -> D;
- {'SingleValue',ObjRef = #'Externalvaluereference'{}} ->
- ObjRef
+ {'SingleValue',#'Externalvaluereference'{}=ObjRef} ->
+ ObjRef;
+ _ ->
+ asn1_error(S, invalid_table_constraint)
end,
- C = match_parameters(S,Def,S#state.parameters),
+ C = match_parameter(S, Def),
case C of
#'Externaltypereference'{} ->
- ERef = check_externaltypereference(S,C),
- {simpletable,ERef#'Externaltypereference'.type};
- #type{def=#'Externaltypereference'{}=ExtTypeRef} ->
- ERef = check_externaltypereference(S, ExtTypeRef),
+ ERef = check_externaltypereference(S, C),
{simpletable,ERef#'Externaltypereference'.type};
- {valueset,#type{def=ERef=#'Externaltypereference'{}}} -> % this is an object set
- {_,TDef} = get_referenced_type(S,ERef),
- case TDef#typedef.typespec of
- #'ObjectSet'{} ->
- check_object(S,TDef,TDef#typedef.typespec),
- {simpletable,ERef#'Externaltypereference'.type};
- Err ->
- exit({error,{internal_error,Err}})
- end;
#'Externalvaluereference'{} ->
%% This is an object set with a referenced object
- {_,TorVDef} = get_referenced_type(S,C),
- GetObjectSet =
- fun(#typedef{typespec=O}) when is_record(O,'Object') ->
- #'ObjectSet'{class=O#'Object'.classname,
- set={'SingleValue',C}};
- (#valuedef{type=Cl,value=O})
- when is_record(O,'Externalvaluereference'),
- is_record(Cl,type) ->
- %% an object might reference another object
- #'ObjectSet'{class=Cl#type.def,
- set={'SingleValue',O}};
- (Err) ->
- exit({error,{internal_error,simpletable_constraint,Err}})
- end,
- ObjSet = GetObjectSet(TorVDef),
- {simpletable,check_object(S,Type,ObjSet)};
- #'ObjectSet'{} ->
- io:format("ALERT: simpletable forbidden case!~n",[]),
- {simpletable,check_object(S,Type,C)};
- {'ValueFromObject',{_,ORef},FieldName} ->
- %% This is an ObjectFromObject
- {_,Object} = get_referenced_type(S,ORef),
- ChObject = check_object(S,Object,
- Object#typedef.typespec),
- ObjFromObj=
- get_fieldname_element(S,Object#typedef{
- typespec=ChObject},
- FieldName),
- {simpletable,ObjFromObj};
-%% ObjFromObj#typedef{checked=true,typespec=
-%% check_object(S,ObjFromObj,
-%% ObjFromObj#typedef.typespec)}};
- _ ->
- check_type(S,S#state.tname,Type),%% this seems stupid.
- OSName = Def#'Externaltypereference'.type,
- {simpletable,OSName}
- end;
+ {_,TorVDef} = get_referenced_type(S, C),
+ Set = case TorVDef of
+ #typedef{typespec=#'Object'{classname=ClassName}} ->
+ #'ObjectSet'{class=ClassName,
+ set={'SingleValue',C}};
+ #valuedef{type=#type{def=ClassDef},
+ value=#'Externalvaluereference'{}=Obj} ->
+ %% an object might reference another object
+ #'ObjectSet'{class=ClassDef,
+ set={'SingleValue',Obj}}
+ end,
+ {simpletable,check_object(S, Type, Set)};
+ {'ValueFromObject',{_,Object},FieldNames} ->
+ %% This is an ObjectFromObject.
+ {simpletable,extract_field(S, Object, FieldNames)}
+ end.
-check_constraint(S,{componentrelation,{objectset,Opos,Objset},Id}) ->
+check_componentrelation(S, {objectset,Opos,Objset0}, Id) ->
%% Objset is an 'Externaltypereference' record, since Objset is
%% a DefinedObjectSet.
- RealObjset = match_parameters(S,Objset,S#state.parameters),
- ObjSetRef =
- case RealObjset of
- #'Externaltypereference'{} -> RealObjset;
- #type{def=#'Externaltypereference'{}} -> RealObjset#type.def;
- {valueset,OS = #type{def=#'Externaltypereference'{}}} -> OS#type.def
- end,
- Ext = check_externaltypereference(S,ObjSetRef),
- {componentrelation,{objectset,Opos,Ext},Id};
+ ObjSet = match_parameter(S, Objset0),
+ Ext = check_externaltypereference(S, ObjSet),
+ {componentrelation,{objectset,Opos,Ext},Id}.
+
+%%%
+%%% Internal set representation.
+%%%
+%%% We represent sets as a union of strictly disjoint ranges:
+%%%
+%%% {set,[Range]}
+%%%
+%%% A range is represented as:
+%%%
+%%% Range = {a_range,UpperBound} | {range,LowerBound,UpperBound}
+%%%
+%%% We don't use the atom 'MIN' to represent MIN, because atoms
+%%% compare higher than integer. Instead we use {a_range,UpperBound}
+%%% to represent MIN..UpperBound. We represent MAX as 'MAX' because
+%%% 'MAX' compares higher than any integer.
+%%%
+%%% The ranges are sorted in term order. The ranges must not overlap
+%%% or be adjacent to each other. This invariant is established when
+%%% creating sets, and maintained by the intersection and union
+%%% operators.
+%%%
+%%% Example of invalid set representaions:
+%%%
+%%% [{range,0,10},{range,5,10}] %Overlapping ranges
+%%% [{range,0,5},{range,6,10}] %Adjancent ranges
+%%% [{range,10,20},{a_range,100}] %Not sorted
+%%%
+
+make_constr_set(_, 'MIN', Ub) ->
+ {set,[{a_range,make_constr_set_val(Ub)}]};
+make_constr_set(_, Lb, Ub) when Lb =< Ub ->
+ {set,[{range,make_constr_set_val(Lb),
+ make_constr_set_val(Ub)}]};
+make_constr_set(S, _, _) ->
+ asn1_error(S, reversed_range).
+
+make_constr_set_val([C]) when is_integer(C) -> C;
+make_constr_set_val(Val) -> Val.
+
+make_constr_set_vs(Vs) ->
+ {set,make_constr_set_vs_1(Vs)}.
+
+make_constr_set_vs_1([]) ->
+ [];
+make_constr_set_vs_1([V]) ->
+ [{range,V,V}];
+make_constr_set_vs_1([V0|Vs]) ->
+ V1 = make_constr_set_vs_1(Vs),
+ range_union([{range,V0,V0}], V1).
+
+%%%
+%%% Set operators.
+%%%
+
+cs_intersection(_S, Other, none) ->
+ Other;
+cs_intersection(_S, none, Other) ->
+ Other;
+cs_intersection(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_intersection(SetA, SetB)};
+cs_intersection(_S, A, B) ->
+ {intersection,A,B}.
+
+range_intersection([], []) ->
+ [];
+range_intersection([_|_], []) ->
+ [];
+range_intersection([], [_|_]) ->
+ [];
+range_intersection([H1|_]=A, [H2|_]=B) when H1 > H2 ->
+ range_intersection(B, A);
+range_intersection([H1|T1], [H2|T2]=B) ->
+ %% Now H1 =< H2.
+ case {H1,H2} of
+ {{a_range,Ub0},{a_range,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [H1|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,_},{a_range,_}} ->
+ %% Must be equal.
+ [H1|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% No intersection.
+ range_intersection(T1, B);
+ {{a_range,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 =/= 'MAX'
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{a_range,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{a_range,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub0 > Ub1, Ub1 =/= 'MAX'. The first range completely
+ %% covers and extends beyond the second range.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)];
+ {{range,_Lb0,Ub0},{range,Lb1,_Ub1}} when Ub0 < Lb1 ->
+ %% Lb0 < Lb1. No intersection.
+ range_intersection(T1, B);
+ {{range,_Lb0,Ub0},{range,Lb1,Ub1}} when Ub0 < Ub1 ->
+ %% Ub0 >= Lb1, Ub0 =/= 'MAX'. Partial overlap.
+ [{range,Lb1,Ub0}|range_intersection(T1, [{range,Ub0+1,Ub1}|T2])];
+ {{range,_Lb0,Ub},{range,_Lb1,Ub}} ->
+ %% The first range covers the second range, but does not
+ %% go beyond. We handle this case specially because Ub may
+ %% be 'MAX', and evaluating 'MAX'+1 will fail.
+ [H2|range_intersection(T1, T2)];
+ {{range,_Lb0,Ub0},{range,_Lb1,Ub1}} ->
+ %% Ub1 =/= MAX. The first range completely covers and
+ %% extends beyond the second.
+ [H2|range_intersection([{range,Ub1+1,Ub0}|T1], T2)]
+ end.
-check_constraint(S,Type) when is_record(Type,type) ->
- #type{def=Def} = check_type(S,S#state.tname,Type),
- Def;
+cs_union(_S, {set,SetA}, {set,SetB}) ->
+ {set,range_union(SetA, SetB)};
+cs_union(_S, A, B) ->
+ {union,A,B}.
+
+range_union(A, B) ->
+ range_union_1(lists:merge(A, B)).
+
+range_union_1([{a_range,Ub0},{a_range,Ub1}|T]) ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{a_range,max(Ub0, Ub1)}|T]);
+range_union_1([{a_range,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([{range,Lb0,Ub0},{range,Lb1,Ub1}|T]) when Lb1-1 =< Ub0 ->
+ range_union_1([{range,Lb0,max(Ub0, Ub1)}|T]);
+range_union_1([{range,_,_}=H|T]) ->
+ %% Ranges are disjoint.
+ [H|range_union_1(T)];
+range_union_1([]) ->
+ [].
-check_constraint(S,C) when is_list(C) ->
- lists:map(fun(X)->check_constraint(S,X) end,C);
-% else keep the constraint unchanged
-check_constraint(_S,Any) ->
-% io:format("Constraint = ~p~n",[Any]),
- Any.
-
-permitted_alphabet_cnstr(T) when is_tuple(T) ->
- permitted_alphabet_cnstr([T]);
-permitted_alphabet_cnstr(L) when is_list(L) ->
- VRexpand = fun({'ValueRange',{A,B}}) ->
- {'SingleValue',expand_valuerange(A,B)};
- (Other) ->
- Other
- end,
- L2 = lists:map(VRexpand,L),
- %% first perform intersection
- L3 = permitted_alphabet_intersection(L2),
- [Res] = permitted_alphabet_union(L3),
- Res.
+%%%
+%%% Finish up constrains, making them suitable for the back-ends.
+%%%
+%%% A 'PermittedAlphabet' (FROM) constraint will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]}
+%%%
+%%% A 'SizeConstraint' (SIZE) constraint will be reduced to:
+%%%
+%%% {Lb,Ub}
+%%%
+%%% All other constraints will be reduced to:
+%%%
+%%% {'SingleValue',[integer()]} | {'ValueRange',Lb,Ub}
+%%%
+
+finish_constraints(Cs) ->
+ finish_constraints_1(Cs, fun smart_collapse/1).
+
+finish_constraints_1([{element_set,{Tag,{element_set,_,_}=Set0},none}|T],
+ Collapse0) ->
+ Collapse = collapse_fun(Tag),
+ case finish_constraints_1([Set0], Collapse) of
+ [] ->
+ finish_constraints_1(T, Collapse0);
+ [Set] ->
+ [{Tag,Set}|finish_constraints_1(T, Collapse0)]
+ end;
+finish_constraints_1([{element_set,{set,[{a_range,'MAX'}]},_}|T], Collapse) ->
+ finish_constraints_1(T, Collapse);
+finish_constraints_1([{element_set,{intersection,A0,B0},none}|T], Collapse) ->
+ A = {element_set,A0,none},
+ B = {element_set,B0,none},
+ finish_constraints_1([A,B|T], Collapse);
+finish_constraints_1([{element_set,Root,Ext}|T], Collapse) ->
+ case finish_constraint(Root, Ext, Collapse) of
+ none ->
+ finish_constraints_1(T, Collapse);
+ Constr ->
+ [Constr|finish_constraints_1(T, Collapse)]
+ end;
+finish_constraints_1([H|T], Collapse) ->
+ [H|finish_constraints_1(T, Collapse)];
+finish_constraints_1([], _) ->
+ [].
-expand_valuerange([A],[A]) ->
- [A];
-expand_valuerange([A],[B]) when A < B ->
- [A|expand_valuerange([A+1],[B])].
+finish_constraint({set,Root0}, Ext, Collapse) ->
+ case Collapse(Root0) of
+ none -> none;
+ Root -> finish_constraint(Root, Ext, Collapse)
+ end;
+finish_constraint(Root, Ext, _Collapse) ->
+ case Ext of
+ none -> Root;
+ _ -> {Root,[]}
+ end.
-permitted_alphabet_intersection(C) ->
- permitted_alphabet_merge(C,intersection, []).
+collapse_fun('SizeConstraint') ->
+ fun size_constraint_collapse/1;
+collapse_fun('PermittedAlphabet') ->
+ fun single_value_collapse/1.
-permitted_alphabet_union(C) ->
- permitted_alphabet_merge(C,union, []).
+single_value_collapse(V) ->
+ {'SingleValue',ordsets:from_list(single_value_collapse_1(V))}.
-permitted_alphabet_merge([],_,Acc) ->
- lists:reverse(Acc);
-permitted_alphabet_merge([{'SingleValue',L1},
- UorI,
- {'SingleValue',L2}|Rest],UorI,Acc)
- when is_list(L1),is_list(L2) ->
- UI = ordsets:UorI([ordsets:from_list(L1),ordsets:from_list(L2)]),
- permitted_alphabet_merge([{'SingleValue',UI}|Rest],UorI,Acc);
-permitted_alphabet_merge([C1|Rest],UorI,Acc) ->
- permitted_alphabet_merge(Rest,UorI,[C1|Acc]).
-
-
-%% constraint_merge/2
-%% Compute the intersection of the outermost level of the constraint list.
-%% See Dubuisson second paragraph and fotnote on page 285.
-%% If constraints with extension are included in combined constraints. The
-%% resulting combination will have the extension of the last constraint. Thus,
-%% there will be no extension if the last constraint is without extension.
-%% The rootset of all constraints are considered in the "outermoust
-%% intersection". See section 13.1.2 in Dubuisson.
-constraint_merge(St, Cs0) ->
- Cs = constraint_merge_1(St, Cs0),
- normalize_cs(Cs).
-
-normalize_cs([{'SingleValue',[V]}|Cs]) ->
- [{'SingleValue',V}|normalize_cs(Cs)];
-normalize_cs([{'SingleValue',[_|_]=L0}|Cs]) ->
- [H|T] = L = lists:usort(L0),
- [case is_range(H, T) of
- false -> {'SingleValue',L};
- true -> {'ValueRange',{H,lists:last(T)}}
- end|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{Sv,Sv}}|Cs]) ->
- [{'SingleValue',Sv}|normalize_cs(Cs)];
-normalize_cs([{'ValueRange',{'MIN','MAX'}}|Cs]) ->
- normalize_cs(Cs);
-normalize_cs([{'SizeConstraint',C0}|Cs]) ->
- case normalize_size_constraint(C0) of
- none ->
- normalize_cs(Cs);
- C ->
- [{'SizeConstraint',C}|normalize_cs(Cs)]
- end;
-normalize_cs([H|T]) ->
- [H|normalize_cs(T)];
-normalize_cs([]) -> [].
+single_value_collapse_1([{range,Lb,Ub}|T]) when is_integer(Lb),
+ is_integer(Ub) ->
+ lists:seq(Lb, Ub) ++ single_value_collapse_1(T);
+single_value_collapse_1([]) ->
+ [].
-%% Normalize a size constraint to make it non-ambiguous and
-%% easy to interpret for the backends.
-%%
-%% Returns one of the following terms:
-%% {LowerBound,UpperBound}
-%% {{LowerBound,UpperBound},[]} % Extensible
-%% none % Remove size constraint from list
-%%
-%% where:
-%% LowerBound = integer()
-%% UpperBound = integer() | 'MAX'
-
-normalize_size_constraint(Sv) when is_integer(Sv) ->
- {Sv,Sv};
-normalize_size_constraint({Root,Ext}) when is_list(Ext) ->
- {normalize_size_constraint(Root),[]};
-normalize_size_constraint({{_,_},Ext}) when is_integer(Ext) ->
- normalize_size_constraint(Ext);
-normalize_size_constraint([H|T]) ->
- {H,lists:last(T)};
-normalize_size_constraint({0,'MAX'}) ->
+smart_collapse([{a_range,Ub}]) ->
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{a_range,_}|T]) ->
+ {range,_,Ub} = lists:last(T),
+ {'ValueRange',{'MIN',Ub}};
+smart_collapse([{range,Lb,Ub}]) ->
+ {'ValueRange',{Lb,Ub}};
+smart_collapse([_|_]=L) ->
+ V = lists:foldr(fun({range,Lb,Ub}, A) ->
+ seq(Lb, Ub) ++ A
+ end, [], L),
+ {'SingleValue',V}.
+
+size_constraint_collapse([{range,0,'MAX'}]) ->
none;
-normalize_size_constraint({Lb,Ub}=Range)
- when is_integer(Lb), is_integer(Ub) orelse Ub =:= 'MAX' ->
- Range.
+size_constraint_collapse(Root) ->
+ [{range,Lb,_}|_] = Root,
+ {range,_,Ub} = lists:last(Root),
+ {Lb,Ub}.
-is_range(Prev, [H|T]) when Prev =:= H - 1 -> is_range(H, T);
-is_range(_, [_|_]) -> false;
-is_range(_, []) -> true.
+seq(Same, Same) ->
+ [Same];
+seq(Lb, Ub) when is_integer(Lb), is_integer(Ub) ->
+ lists:seq(Lb, Ub).
-constraint_merge_1(_S, [H]=C) when is_tuple(H) ->
- C;
-constraint_merge_1(_S, []) ->
- [];
-constraint_merge_1(S, C) ->
- %% skip all extension but the last extension
- C1 = filter_extensions(C),
- %% perform all internal level intersections, intersections first
- %% since they have precedence over unions
- C2 = lists:map(fun(X)when is_list(X)->constraint_intersection(S,X);
- (X) -> X end,
- C1),
- %% perform all internal level unions
- C3 = lists:map(fun(X)when is_list(X)->constraint_union(S,X);
- (X) -> X end,
- C2),
-
- %% now get intersection of the outermost level
- %% get the least common single value constraint
- SVs = get_constraints(C3,'SingleValue'),
- CombSV = intersection_of_sv(S,SVs),
- %% get the least common value range constraint
- VRs = get_constraints(C3,'ValueRange'),
- CombVR = intersection_of_vr(S,VRs),
- %% get the least common size constraint
- SZs = get_constraints(C3,'SizeConstraint'),
- CombSZ = intersection_of_size(S,SZs),
- RestC = ordsets:subtract(ordsets:from_list(C3),
- ordsets:from_list(SZs ++ VRs ++ SVs)),
- %% get the least common combined constraint. That is the union of each
- %% deep constraint and merge of single value and value range constraints.
- %% FIXME: Removing 'intersection' from the flattened list essentially
- %% means that intersections are converted to unions!
- Cs = combine_constraints(S, CombSV, CombVR, CombSZ++RestC),
- [X || X <- lists:flatten(Cs),
- X =/= intersection,
- X =/= union].
-
-%% constraint_union(S,C) takes a list of constraints as input and
-%% merge them to a union. Unions are performed when two
-%% constraints is found with an atom union between.
-%% The list may be nested. Fix that later !!!
-constraint_union(_S,[]) ->
- [];
-constraint_union(_S,C=[_E]) ->
- C;
-constraint_union(S,C) when is_list(C) ->
- case lists:member(union,C) of
- true ->
- constraint_union1(S,C,[]);
- _ ->
- C
- end;
-% SV = get_constraints(C,'SingleValue'),
-% SV1 = constraint_union_sv(S,SV),
-% VR = get_constraints(C,'ValueRange'),
-% VR1 = constraint_union_vr(VR),
-% RestC = ordsets:filter(fun({'SingleValue',_})->false;
-% ({'ValueRange',_})->false;
-% (_) -> true end,ordsets:from_list(C)),
-% SV1++VR1++RestC;
-constraint_union(_S,C) ->
- [C].
-
-constraint_union1(S, [{'ValueRange',{Lb1,Ub1}},union,
- {'ValueRange',{Lb2,Ub2}}|Rest], Acc) ->
- AunionB = {'ValueRange',{c_min(Lb1, Lb2),max(Ub1, Ub2)}},
- constraint_union1(S, [AunionB|Rest], Acc);
-constraint_union1(S,[A={'SingleValue',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = constraint_union_sv(S,[A,B]),
- constraint_union1(S,Rest,Acc ++ AunionB);
-constraint_union1(S,[A={'SingleValue',_},union,B={'ValueRange',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,A,B),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[A={'ValueRange',_},union,B={'SingleValue',_}|Rest],Acc) ->
- AunionB = union_sv_vr(S,B,A),
- constraint_union1(S, AunionB++Rest, Acc);
-constraint_union1(S,[union|Rest],Acc) -> %skip when unsupported constraints
- constraint_union1(S,Rest,Acc);
-constraint_union1(S,[A|Rest],Acc) ->
- constraint_union1(S,Rest,[A|Acc]);
-constraint_union1(_S,[],Acc) ->
- Acc.
+%%%-----------------------------------------
+%% If the constraint value is a defined value the valuename
+%% is replaced by the actual value
+%%
+resolve_value(S, HostType, Val) ->
+ Id = match_parameter(S, Val),
+ resolve_value1(S, HostType, Id).
-constraint_union_sv(_S,SV) ->
- Values=lists:map(fun({_,V})->V end,SV),
- case ordsets:from_list(Values) of
- [] -> [];
- [N] -> [{'SingleValue',N}];
- L -> [{'SingleValue',L}]
- end.
-c_min('MIN', _) -> 'MIN';
-c_min(_, 'MIN') -> 'MIN';
-c_min(A, B) -> min(A, B).
-
-union_sv_vr(_S,{'SingleValue',SV},VR)
- when is_integer(SV) ->
- union_sv_vr(_S,{'SingleValue',[SV]},VR);
-union_sv_vr(_S,{'SingleValue',SV},{'ValueRange',{VLb,VUb}})
- when is_list(SV) ->
- L = lists:sort(SV++[VLb,VUb]),
- {Lb,L1} = case lists:member('MIN',L) of
- true -> {'MIN',L--['MIN']}; % remove 'MIN' so it does not disturb
- false -> {hd(L),tl(L)}
- end,
- Ub = case lists:member('MAX',L1) of
- true -> 'MAX';
- false -> lists:last(L1)
- end,
- case SV of
- [H] -> H;
- _ -> SV
- end,
- %% for now we through away the Singlevalues so that they don't disturb
- %% in the code generating phase (the effective Valuerange is already
- %% calculated. If we want to keep the Singlevalues as well for
- %% use in code gen phases we need to introduce a new representation
- %% like {'ValueRange',{Lb,Ub},[ListOfRanges|AntiValues|Singlevalues]
- %% These could be used to generate guards which allows only the specific
- %% values , not the full range
- [{'ValueRange',{Lb,Ub}}].
-
-
-%% get_constraints/2
-%% Arguments are a list of constraints, which has the format {key,value},
-%% and a constraint type
-%% Returns a list of constraints only of the requested type or the atom
-%% 'no' if no such constraints were found
-get_constraints(L=[{CType,_}],CType) ->
- L;
-get_constraints(C,CType) ->
- keysearch_allwithkey(CType,1,C).
-
-%% keysearch_allwithkey(Key,Ix,L)
-%% Types:
-%% Key = is_atom()
-%% Ix = integer()
-%% L = [TwoTuple]
-%% TwoTuple = [{atom(),term()}|...]
-%% Returns a List that contains all
-%% elements from L that has a key Key as element Ix
-keysearch_allwithkey(Key,Ix,L) ->
- lists:filter(fun(X) when is_tuple(X) ->
- case element(Ix,X) of
- Key -> true;
- _ -> false
- end;
- (_) -> false
- end, L).
-
-
-%% filter_extensions(C)
-%% takes a list of constraints as input and returns a list with the
-%% constraints and all extensions but the last are removed.
-filter_extensions([L]) when is_list(L) ->
- [filter_extensions(L)];
-filter_extensions(C=[_H]) ->
- C;
-filter_extensions(C) when is_list(C) ->
- filter_extensions(C,[], []).
-
-filter_extensions([],Acc,[]) ->
- Acc;
-filter_extensions([],Acc,[EC|ExtAcc]) ->
- CwoExt = remove_extension(ExtAcc,[]),
- CwoExt ++ [EC|Acc];
-filter_extensions([C={A,_E}|T],Acc,ExtAcc) when is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'SizeConstraint',{A,_B}}|T],Acc,ExtAcc)
- when is_list(A);is_tuple(A) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([C={'PermittedAlphabet',{{'SingleValue',_},E}}|T],Acc,ExtAcc)
- when is_tuple(E); is_list(E) ->
- filter_extensions(T,Acc,[C|ExtAcc]);
-filter_extensions([H|T],Acc,ExtAcc) ->
- filter_extensions(T,[H|Acc],ExtAcc).
-
-remove_extension([],Acc) ->
- Acc;
-remove_extension([{'SizeConstraint',{A,_B}}|R],Acc) ->
- remove_extension(R,[{'SizeConstraint',A}|Acc]);
-remove_extension([{C,_E}|R],Acc) when is_tuple(C) ->
- remove_extension(R,[C|Acc]);
-remove_extension([{'PermittedAlphabet',{A={'SingleValue',_},
- E}}|R],Acc)
- when is_tuple(E);is_list(E) ->
- remove_extension(R,[{'PermittedAlphabet',A}|Acc]).
-
-%% constraint_intersection(S,C) takes a list of constraints as input and
-%% performs intersections. Intersecions are performed when an
-%% atom intersection is found between two constraints.
-%% The list may be nested. Fix that later !!!
-constraint_intersection(_S,[]) ->
- [];
-constraint_intersection(_S,C=[_E]) ->
- C;
-constraint_intersection(S,C) when is_list(C) ->
-% io:format("constraint_intersection: ~p~n",[C]),
- case lists:member(intersection,C) of
- true ->
- constraint_intersection1(S,C,[]);
- _ ->
- C
+resolve_value1(S, HostType, #'Externalvaluereference'{value=Name}=ERef) ->
+ case resolve_namednumber(S, HostType, Name) of
+ V when is_integer(V) ->
+ V;
+ not_named ->
+ resolve_value1(S, HostType, get_referenced_value(S, ERef))
end;
-constraint_intersection(_S,C) ->
- [C].
-
-constraint_intersection1(S,[A,intersection,B|Rest],Acc) ->
- AisecB = c_intersect(S,A,B),
- constraint_intersection1(S, AisecB++Rest, Acc);
-constraint_intersection1(S,[A|Rest],Acc) ->
- constraint_intersection1(S,Rest,[A|Acc]);
-constraint_intersection1(_, [], [C]) ->
- C;
-constraint_intersection1(_,[],Acc) ->
- lists:reverse(Acc).
-
-c_intersect(S,C1={'SingleValue',_},C2={'SingleValue',_}) ->
- intersection_of_sv(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'ValueRange',_}) ->
- intersection_of_vr(S,[C1,C2]);
-c_intersect(S,C1={'ValueRange',_},C2={'SingleValue',_}) ->
- intersection_sv_vr(S,[C2],[C1]);
-c_intersect(S,C1={'SingleValue',_},C2={'ValueRange',_}) ->
- intersection_sv_vr(S,[C1],[C2]);
-c_intersect(_S,C1,C2) ->
- [C1,C2].
-
-%% combine_constraints(S,SV,VR,CComb)
-%% Types:
-%% S = is_record(state,S)
-%% SV = [] | [SVC]
-%% VR = [] | [VRC]
-%% CComb = [] | [Lists]
-%% SVC = {'SingleValue',integer()} | {'SingleValue',[integer(),...]}
-%% VRC = {'ValueRange',{Lb,Ub}}
-%% Lists = List of lists containing any constraint combination
-%% Lb = 'MIN' | integer()
-%% Ub = 'MAX' | integer()
-%% Returns a combination of the least common constraint among SV,VR and all
-%% elements in CComb
-combine_constraints(_S,[],VR,CComb) ->
- VR ++ CComb;
-% combine_combined_cnstr(S,VR,CComb);
-combine_constraints(_S,SV,[],CComb) ->
- SV ++ CComb;
-% combine_combined_cnstr(S,SV,CComb);
-combine_constraints(S,SV,VR,CComb) ->
- C=intersection_sv_vr(S,SV,VR),
- C ++ CComb.
-% combine_combined_cnstr(S,C,CComb).
-
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2={'ValueRange',{_Lb,_Ub}}])
- when is_integer(SV) ->
- case is_int_in_vr(SV,C2) of
- true -> [C1];
- _ -> %%error({type,{"asn1 illegal constraint",C1,C2},S})
- %throw({error,{"asn1 illegal constraint",C1,C2}})
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2]
+resolve_value1(S, HostType, {gt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int + 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_sv_vr(_S,[C1={'SingleValue',SV}],[C2])
- when is_list(SV) ->
- case lists:filter(fun(X)->is_int_in_vr(X,C2) end,SV) of
- [] ->
- %%error({type,{"asn1 illegal constraint",C1,C2},S});
- %throw({error,{"asn1 illegal constraint",C1,C2}});
- %io:format("warning: could not analyze constraint ~p~n",[[C1,C2]]),
- [C1,C2];
- [V] -> [{'SingleValue',V}];
- L -> [{'SingleValue',L}]
- end.
-
-
-%% Size constraint [{'SizeConstraint',1},{'SizeConstraint',{{1,64},[]}}]
-
-intersection_of_size(_,[]) ->
- [];
-intersection_of_size(_,C=[_SZ]) ->
- C;
-intersection_of_size(S,[SZ,SZ|Rest]) ->
- intersection_of_size(S,[SZ|Rest]);
-intersection_of_size(S,C=[C1={_,Int},{_,Range}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- case Range of
- {Lb,Ub} when Int >= Lb,
- Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- {{Lb,Ub},Ext} when is_list(Ext),Int >= Lb,Int =< Ub ->
- intersection_of_size(S,[C1|Rest]);
- _ ->
- throw({error,{asn1,{illegal_size_constraint,C}}})
+resolve_value1(S, HostType, {lt,V}) ->
+ case resolve_value1(S, HostType, V) of
+ Int when is_integer(Int) ->
+ Int - 1;
+ _Other ->
+ asn1_error(S, illegal_integer_value)
end;
-intersection_of_size(S,[C1={_,Range},C2={_,Int}|Rest])
- when is_integer(Int),is_tuple(Range) ->
- intersection_of_size(S,[C2,C1|Rest]);
-intersection_of_size(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_size(S,[{'SizeConstraint',{Lb,Ub}}|Rest]);
-intersection_of_size(_,SZ) ->
- throw({error,{asn1,{illegal_size_constraint,SZ}}}).
-
-intersection_of_vr(_,[]) ->
- [];
-intersection_of_vr(_,VR=[_C]) ->
- VR;
-intersection_of_vr(S,[{_,{Lb1,Ub1}},{_,{Lb2,Ub2}}|Rest]) ->
- Lb=greatest_LB(ordsets:from_list([Lb1,Lb2])),
- Ub=smallest_UB(ordsets:from_list([Ub1,Ub2])),
- intersection_of_vr(S,[{'ValueRange',{Lb,Ub}}|Rest]);
-intersection_of_vr(_S,VR) ->
- %%error({type,{asn1,{illegal_value_range_constraint,VR}},S});
- throw({error,{asn1,{illegal_value_range_constraint,VR}}}).
-
-intersection_of_sv(_,[]) ->
- [];
-intersection_of_sv(_,SV=[_C]) ->
- SV;
-intersection_of_sv(S,[SV,SV|Rest]) ->
- intersection_of_sv(S,[SV|Rest]);
-intersection_of_sv(S,[{_,Int},{_,SV}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV},{_,Int}|Rest]) when is_integer(Int),
- is_list(SV) ->
- SV2=intersection_of_sv1(S,Int,SV),
- intersection_of_sv(S,[SV2|Rest]);
-intersection_of_sv(S,[{_,SV1},{_,SV2}|Rest]) when is_list(SV1),
- is_list(SV2) ->
- SV3=common_set(SV1,SV2),
- intersection_of_sv(S,[SV3|Rest]);
-intersection_of_sv(_S,SV) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV}}}).
-
-intersection_of_sv1(_S,Int,SV) when is_integer(Int),is_list(SV) ->
- case lists:member(Int,SV) of
- true -> {'SingleValue',Int};
+resolve_value1(S, _HostType, {'ValueFromObject',{object,Object},FieldName}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(_, _, #valuedef{checked=true,value=V}) ->
+ V;
+resolve_value1(S, _, #valuedef{value={'ValueFromObject',
+ {object,Object},FieldName}}) ->
+ get_value_from_object(S, Object, FieldName);
+resolve_value1(S, _HostType, #valuedef{}=VDef) ->
+ #valuedef{value=Val} = check_value(S,VDef),
+ Val;
+resolve_value1(_, _, V) ->
+ V.
+
+resolve_namednumber(S, #type{def=Def}, Name) ->
+ case Def of
+ {'ENUMERATED',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
+ {'INTEGER',NameList} ->
+ resolve_namednumber_1(S, Name, NameList);
_ ->
- %%error({type,{asn1,{illegal_single_value_constraint,Int,SV}},S})
- throw({error,{asn1,{illegal_single_value_constraint,Int,SV}}})
- end;
-intersection_of_sv1(_S,SV1,SV2) ->
- %%error({type,{asn1,{illegal_single_value_constraint,SV1,SV2}},S}).
- throw({error,{asn1,{illegal_single_value_constraint,SV1,SV2}}}).
+ not_named
+ end.
-greatest_LB([H]) ->
- H;
-greatest_LB(L) ->
- greatest_LB1(lists:reverse(L)).
-greatest_LB1(['MIN',H2|_T])->
- H2;
-greatest_LB1([H|_T]) ->
- H.
-smallest_UB(L) ->
- hd(L).
-
-common_set(SV1,SV2) ->
- lists:filter(fun(X)->lists:member(X,SV1) end,SV2).
-
-is_int_in_vr(Int,{_,{'MIN','MAX'}}) when is_integer(Int) ->
- true;
-is_int_in_vr(Int,{_,{'MIN',Ub}}) when is_integer(Int),Int =< Ub ->
- true;
-is_int_in_vr(Int,{_,{Lb,'MAX'}}) when is_integer(Int),Int >= Lb ->
- true;
-is_int_in_vr(Int,{_,{Lb,Ub}}) when is_integer(Int),Int >= Lb,Int =< Ub ->
- true;
-is_int_in_vr(_,_) ->
- false.
-
+resolve_namednumber_1(S, Name, NameList) ->
+ try
+ NamedNumberList = check_enumerated(S, NameList),
+ {_,N} = lookup_enum_value(S, Name, NamedNumberList),
+ N
+ catch _:_ ->
+ not_named
+ end.
+
+%%%
+%%% End of constraint handling.
+%%%
check_imported(S,Imodule,Name) ->
check_imported(S,Imodule,Name,false).
@@ -4510,18 +3757,28 @@ check_reference(S,#'Externaltypereference'{pos=Pos,module=Emod,type=Name}) ->
#'Externaltypereference'{pos=Pos,module=ModName,type=Name}
end.
+get_referenced_value(S, T) ->
+ case get_referenced_type(S, T) of
+ {ExtMod,#valuedef{value=#'Externalvaluereference'{}=Ref}} ->
+ get_referenced_value(update_state(S, ExtMod), Ref);
+ {_,#valuedef{value=Val}} ->
+ Val
+ end.
+
get_referenced_type(S, T) ->
+ get_referenced_type(S, T, false).
+
+get_referenced_type(S, T, Recurse) ->
case do_get_referenced_type(S, T) of
- {_,#type{def=#'Externaltypereference'{}=ERef}} ->
- get_referenced_type(S, ERef);
- {_,#type{def=#'Externalvaluereference'{}=VRef}} ->
- get_referenced_type(S, VRef);
+ {_,#typedef{typespec=#type{def=#'Externaltypereference'{}=ERef}}}
+ when Recurse ->
+ get_referenced_type(S, ERef, Recurse);
{_,_}=Res ->
Res
end.
-do_get_referenced_type(#state{parameters=Ps}=S, T0) ->
- case match_parameters(S, T0, Ps) of
+do_get_referenced_type(S, T0) ->
+ case match_parameter(S, T0) of
T0 ->
do_get_ref_type_1(S, T0);
T ->
@@ -4563,7 +3820,7 @@ get_referenced(S,Emod,Ename,Pos) ->
%% May be an imported entity in module Emod or Emod may not exist
case asn1_db:dbget(Emod,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Emod}}});
+ asn1_error(S, {undefined_import, Ename, Emod});
_ ->
NewS = update_state(S,Emod),
get_imported(NewS,Ename,Emod,Pos)
@@ -4593,12 +3850,11 @@ get_imported(S,Name,Module,Pos) ->
parse_and_save(S,Imodule),
case asn1_db:dbget(Imodule,'MODULE') of
undefined ->
- throw({error,{asn1,{module_not_found,Imodule}}});
+ asn1_error(S, {undefined_import, Name, Module});
Im when is_record(Im,module) ->
case is_exported(Im,Name) of
false ->
- throw({error,
- {asn1,{not_exported,{Im,Name}}}});
+ asn1_error(S, {undefined_export, Name});
_ ->
?dbg("get_imported, is_exported ~p, ~p~n",[Imodule,Name]),
get_referenced_type(S,
@@ -4611,37 +3867,6 @@ get_imported(S,Name,Module,Pos) ->
get_renamed_reference(S,Name,Module)
end.
-check_and_save(S,#'Externaltypereference'{module=M}=ERef,#typedef{checked=false}=TDef,Settings)
- when S#state.mname /= M ->
- %% This ERef is an imported type (or maybe a set.asn compilation)
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=TDef,tname=get_datastr_name(TDef)},
- Type=check_type(NewS,TDef,TDef#typedef.typespec),%XXX
- CheckedTDef = TDef#typedef{checked=true,
- typespec=Type},
- asn1_db:dbput(M,get_datastr_name(TDef),CheckedTDef),
- {merged_name(S,ERef),Settings};
-check_and_save(S,#'Externaltypereference'{module=M,type=N}=Eref,
- #ptypedef{name=Name,args=Params} = PTDef,Settings) ->
- %% instantiate a parameterized type
- %% The parameterized type should be saved as a type in the module
- %% it was instantiated.
- NewS = S#state{mname=M,module=load_asn1_module(S,M),
- type=PTDef,tname=Name},
- {Args,RestSettings} = lists:split(length(Params),Settings),
- Type = check_type(NewS,PTDef,#type{def={pt,Eref,Args}}),
- ERefName = new_reference_name(N),
- ERefNew = #'Externaltypereference'{type=ERefName,module=S#state.mname},
- NewTDef=#typedef{checked=true,name=ERefName,
- typespec=Type},
- insert_once(S,parameterized_objects,{ERefName,type,NewTDef}),
- asn1_db:dbput(S#state.mname,ERefNew#'Externaltypereference'.type,
- NewTDef),
- {ERefNew,RestSettings};
-check_and_save(_S,ERef,TDef,Settings) ->
- %% This might be a renamed type in a set of specs, so rename the ERef
- {ERef#'Externaltypereference'{type=asn1ct:get_name_of_def(TDef)},Settings}.
-
save_object_set_instance(S,Name,ObjSetSpec)
when is_record(ObjSetSpec,'ObjectSet') ->
NewObjSet = #typedef{checked=true,name=Name,typespec=ObjSetSpec},
@@ -4708,18 +3933,14 @@ update_state(S,ModuleName) ->
S;
_ ->
parse_and_save(S,ModuleName),
- case asn1_db:dbget(ModuleName,'MODULE') of
- RefedMod when is_record(RefedMod,module) ->
- S#state{mname=ModuleName,module=RefedMod};
- _ -> throw({error,{asn1,{module_does_not_exist,ModuleName}}})
- end
+ Mod = #module{} = asn1_db:dbget(ModuleName,'MODULE'),
+ S#state{mname=ModuleName,module=Mod}
end.
-
get_renamed_reference(S,Name,Module) ->
case renamed_reference(S,Name,Module) of
undefined ->
- throw({error,{asn1,{undefined_type,Name}}});
+ asn1_error(S, {undefined, Name});
NewTypeName when NewTypeName =/= Name ->
get_referenced1(S,Module,NewTypeName,undefined)
end.
@@ -4770,37 +3991,49 @@ get_importmoduleoftype([I|Is],Name) ->
get_importmoduleoftype([],_) ->
undefined.
+match_parameters(S, Names) ->
+ [match_parameter(S, Name) || Name <- Names].
-match_parameters(_S,Name,[]) ->
- Name;
+match_parameter(#state{parameters=Ps}=S, Name) ->
+ match_parameter(S, Name, Ps).
-match_parameters(_S,#'Externaltypereference'{type=Name},[{#'Externaltypereference'{type=Name},NewName}|_T]) ->
+match_parameter(_S, Name, []) ->
+ Name;
+match_parameter(S, {valueset,{element_set,#type{}=Ts,none}}, Ps) ->
+ match_parameter(S, {valueset,Ts}, Ps);
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{#'Externaltypereference'{type=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externaltypereference'{type=Name},[{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externaltypereference'{type=Name},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{#'Externalvaluereference'{value=Name},NewName}|_T]) ->
NewName;
-match_parameters(_S,#'Externalvaluereference'{value=Name},[{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
+match_parameter(_S, #'Externalvaluereference'{value=Name},
+ [{{_,#'Externalvaluereference'{value=Name}},NewName}|_T]) ->
NewName;
-match_parameters(_S,#type{def=#'Externaltypereference'{module=M,type=Name}},
- [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
+match_parameter(_S, #type{def=#'Externaltypereference'{module=M,type=Name}},
+ [{#'Externaltypereference'{module=M,type=Name},Type}]) ->
Type;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},{valueset,#type{def=NewName}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ {valueset,#type{def=NewName}}}|_T]) ->
NewName;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},
- NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},
+ NewName=#type{def=#'Externaltypereference'{}}}|_T]) ->
NewName#type.def;
-match_parameters(_S,{valueset,#type{def=#'Externaltypereference'{type=Name}}},
- [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
+match_parameter(_S, {valueset,#type{def=#'Externaltypereference'{type=Name}}},
+ [{{_,#'Externaltypereference'{type=Name}},NewName}|_T]) ->
NewName;
%% When a parameter is a parameterized element it has to be
%% instantiated now!
-match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
- case catch check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
- pobjectsetdef ->
-
+match_parameter(S, {valueset,T=#type{def={pt,_,_Args}}}, _Ps) ->
+ try check_type(S,#typedef{name=S#state.tname,typespec=T},T) of
+ #type{def=Ts} ->
+ Ts
+ catch pobjectsetdef ->
{_,ObjRef,_Params} = T#type.def,
{_,ObjDef}=get_referenced_type(S,ObjRef),
%%ObjDef is a pvaluesetdef where the type field holds the class
@@ -4818,17 +4051,15 @@ match_parameters(S,{valueset,T=#type{def={pt,_,_Args}}},_Parameters) ->
ObjectSet = #'ObjectSet'{class=RightClassRef,set=T},
ObjSpec = check_object(S,#typedef{typespec=ObjectSet},ObjectSet),
Name = list_to_atom(asn1ct_gen:list2name([get_datastr_name(ObjDef)|S#state.recordtopname])),
- save_object_set_instance(S,Name,ObjSpec);
- pvaluesetdef -> error({pvaluesetdef,"parameterized valueset",S});
- {error,_Reason} -> error({type,"error in parameter",S});
- Ts when is_record(Ts,type) -> Ts#type.def
+ save_object_set_instance(S,Name,ObjSpec)
end;
+
%% same as previous, only depends on order of parsing
-match_parameters(S,{valueset,{pos,{objectset,_,POSref},Args}},Parameters) ->
- match_parameters(S,{valueset,#type{def={pt,POSref,Args}}},Parameters);
-match_parameters(S,Name, [_H|T]) ->
- %%io:format("match_parameters(~p,~p)~n",[Name,[H|T]]),
- match_parameters(S,Name,T).
+match_parameter(S, {valueset,{pos,{objectset,_,POSref},Args}}, Ps) ->
+ match_parameter(S, {valueset,#type{def={pt,POSref,Args}}}, Ps);
+match_parameter(S, Name, [_H|T]) ->
+ %%io:format("match_parameter(~p,~p)~n",[Name,[H|T]]),
+ match_parameter(S, Name, T).
imported(S,Name) ->
{imports,Ilist} = (S#state.module)#module.imports,
@@ -4854,7 +4085,6 @@ check_named_number_list(_S, [{_,_}|_]=NNL) ->
NNL;
check_named_number_list(S, NNL0) ->
%% Check that the names are unique.
- T = S#state.type,
case check_unique(NNL0, 2) of
[] ->
NNL1 = [{Id,resolve_valueref(S, Val)} || {'NamedNumber',Id,Val} <- NNL0],
@@ -4863,14 +4093,14 @@ check_named_number_list(S, NNL0) ->
[] ->
NNL;
[Val|_] ->
- asn1_error(S, T, {value_reused,Val})
+ asn1_error(S, {value_reused,Val})
end;
[H|_] ->
- asn1_error(S, T, {namelist_redefinition,H})
+ asn1_error(S, {namelist_redefinition,H})
end.
-resolve_valueref(S, #'Externalvaluereference'{module=Mod,value=Name}) ->
- dbget_ex(S, Mod, Name);
+resolve_valueref(S, #'Externalvaluereference'{} = T) ->
+ get_referenced_value(S, T);
resolve_valueref(_, Val) when is_integer(Val) ->
Val.
@@ -4879,7 +4109,7 @@ check_integer(S, NNL) ->
check_bitstring(S, NNL0) ->
NNL = check_named_number_list(S, NNL0),
- _ = [asn1_error(S, S#state.type, {invalid_bit_number,Bit}) ||
+ _ = [asn1_error(S, {invalid_bit_number,Bit}) ||
{_,Bit} <- NNL, Bit < 0],
NNL.
@@ -4904,7 +4134,7 @@ check_type_identifier(S, Eref=#'Externaltypereference'{type=Class}) ->
{_,TD=#typedef{typespec=#type{def=#'Externaltypereference'{}}}} ->
check_type_identifier(S, (TD#typedef.typespec)#type.def);
_ ->
- asn1_error(S, S#state.type, {illegal_instance_of,Class})
+ asn1_error(S, {illegal_instance_of,Class})
end.
iof_associated_type(S,[]) ->
@@ -4913,12 +4143,7 @@ iof_associated_type(S,[]) ->
case get(instance_of) of
undefined ->
AssociateSeq = iof_associated_type1(S,[]),
- Tag =
- case S#state.erule of
- ber ->
- [?TAG_CONSTRUCTED(?N_INSTANCE_OF)];
- _ -> []
- end,
+ Tag = [?TAG_CONSTRUCTED(?N_INSTANCE_OF)],
TypeDef=#typedef{checked=true,
name='INSTANCE OF',
typespec=#type{tag=Tag,
@@ -4944,16 +4169,11 @@ iof_associated_type1(S,C) ->
[] -> 'ASN1_OPEN_TYPE';
_ -> {typefield,'Type'}
end,
- {ObjIdTag,C1TypeTag}=
- case S#state.erule of
- ber ->
- {[{'UNIVERSAL',8}],
- [#tag{class='UNIVERSAL',
- number=6,
- type='IMPLICIT',
- form=0}]};
- _ -> {[{'UNIVERSAL','INTEGER'}],[]}
- end,
+ ObjIdTag = [{'UNIVERSAL',8}],
+ C1TypeTag = [#tag{class='UNIVERSAL',
+ number=6,
+ type='IMPLICIT',
+ form=0}],
TypeIdentifierRef=#'Externaltypereference'{module=ModuleName,
type='TYPE-IDENTIFIER'},
ObjectIdentifier =
@@ -4992,9 +4212,13 @@ iof_associated_type1(S,C) ->
%% returns the leading attribute, the constraint of the components and
%% the tablecinf value for the second component.
-instance_of_constraints(_,[]) ->
+instance_of_constraints(_, []) ->
{false,[],[],[]};
-instance_of_constraints(S, [{simpletable,Type}]) ->
+instance_of_constraints(S, [{element_set,{simpletable,C},none}]) ->
+ {element_set,Type,none} = C,
+ instance_of_constraints_1(S, Type).
+
+instance_of_constraints_1(S, Type) ->
#type{def=#'Externaltypereference'{type=Name}} = Type,
ModuleName = S#state.mname,
ObjectSetRef=#'Externaltypereference'{module=ModuleName,
@@ -5014,93 +4238,100 @@ instance_of_constraints(S, [{simpletable,Type}]) ->
valueindex=[]},
{TableCInf,[{simpletable,Name}],CRel,[{objfun,ObjectSetRef}]}.
-%% Check ENUMERATED
-%% ****************************************
-%% Check that all values are unique
-%% assign values to un-numbered identifiers
-%% check that the constraints are allowed and correct
-%% put the updated info back into database
-check_enumerated(_S,[{Name,Number}|_Rest]= NNList,_Constr) when is_atom(Name), is_integer(Number)->
- %% already checked , just return the same list
- NNList;
-check_enumerated(_S,{[{Name,Number}|_Rest],L}= NNList,_Constr) when is_atom(Name), is_integer(Number), is_list(L)->
- %% already checked , contains extension marker, just return the same lists
- NNList;
-check_enumerated(S,NamedNumberList,_Constr) ->
- check_enum(S,NamedNumberList,[],[],[]).
-
-%% identifiers are put in Acc2
-%% returns either [{Name,Number}] or {[{Name,Number}],[{ExtName,ExtNumber}]}
-%% the latter is returned if the ENUMERATION contains EXTENSIONMARK
-check_enum(S,[{'NamedNumber',Id,Num}|T],Acc1,Acc2,Root) when is_integer(Num) ->
- check_enum(S,T,[{Id,Num}|Acc1],Acc2,Root);
-check_enum(S,['EXTENSIONMARK'|T],Acc1,Acc2,_Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- NewList = enum_number(lists:reverse(Acc2),NewAcc2,0,[],[]),
- { NewList, check_enum(S,T,[],[],enum_counts(NewList))};
-check_enum(S,[Id|T],Acc1,Acc2,Root) when is_atom(Id) ->
- check_enum(S,T,Acc1,[Id|Acc2],Root);
-check_enum(_S,[],Acc1,Acc2,Root) ->
- NewAcc2 = lists:keysort(2,Acc1),
- enum_number(lists:reverse(Acc2),NewAcc2,0,[],Root).
-
-
-% assign numbers to identifiers , numbers from 0 ... but must not
-% be the same as already assigned to NamedNumbers
-enum_number(Identifiers,NamedNumbers,Cnt,Acc,[]) ->
- enum_number(Identifiers,NamedNumbers,Cnt,Acc);
-enum_number(Identifiers,NamedNumbers,_Cnt,Acc,CountL) ->
- enum_extnumber(Identifiers,NamedNumbers,Acc,CountL).
-
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num > Cnt ->
- enum_number(T,[{Id,Num}|T2],Cnt+1,[{H,Cnt}|Acc]);
-enum_number([H|T],[{Id,Num}|T2],Cnt,Acc) when Num < Cnt -> % negative Num
- enum_number(T,T2,Cnt+1,[{H,Cnt},{Id,Num}|Acc]);
-enum_number([],L2,_Cnt,Acc) ->
- lists:append([lists:reverse(Acc),L2]);
-enum_number(L,[{Id,Num}|T2],Cnt,Acc) -> % Num == Cnt
- enum_number(L,T2,Cnt+1,[{Id,Num}|Acc]);
-enum_number([H|T],[],Cnt,Acc) ->
- enum_number(T,[],Cnt+1,[{H,Cnt}|Acc]).
-
-enum_extnumber(Identifiers,NamedNumbers,Acc,[C]) ->
- check_add_enum_numbers(NamedNumbers,[C]),
- enum_number(Identifiers,NamedNumbers,C,Acc);
-enum_extnumber([H|T],[{Id,Num}|T2],Acc,[C|Counts]) when Num > C ->
- enum_extnumber(T,[{Id,Num}|T2],[{H,C}|Acc],Counts);
-enum_extnumber([],L2,Acc,Cnt) ->
- check_add_enum_numbers(L2, Cnt),
- lists:concat([lists:reverse(Acc),L2]);
-enum_extnumber(_Identifiers,[{Id,Num}|_T2],_Acc,[C|_]) when Num < C ->
-%% enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",{Id,Num}}});
-enum_extnumber(Identifiers,[{Id,Num}|T2],Acc,[_C|Counts]) -> % Num =:= C
- enum_extnumber(Identifiers,T2,[{Id,Num}|Acc],Counts);
-enum_extnumber([H|T],[],Acc,[C|Counts]) ->
- enum_extnumber(T,[],[{H,C}|Acc],Counts).
-
-enum_counts([]) ->
- [0];
-enum_counts(L) ->
- Used=[I||{_,I}<-L],
- AddEnumLb = lists:max(Used) + 1,
- lists:foldl(fun(El,AccIn)->lists:delete(El,AccIn) end,
- lists:seq(0,AddEnumLb),
- Used).
-check_add_enum_numbers(L, Cnt) ->
- Max = lists:max(Cnt),
- Fun = fun({_,N}=El) when N < Max ->
- case lists:member(N,Cnt) of
- false ->
- exit({error,{asn1,"AdditionalEnumeration element with same number as root element",El}});
- _ ->
- ok
- end;
- (_) ->
- ok
- end,
- lists:foreach(Fun,L).
+%%%
+%%% Check ENUMERATED.
+%%%
+check_enumerated(_S, [{Name,Number}|_]=NNL)
+ when is_atom(Name), is_integer(Number) ->
+ %% Already checked.
+ NNL;
+check_enumerated(_S, {[{Name,Number}|_],L}=NNL)
+ when is_atom(Name), is_integer(Number), is_list(L) ->
+ %% Already checked (with extension).
+ NNL;
+check_enumerated(S, NNL) ->
+ check_enum_ids(S, NNL, gb_sets:empty()),
+ check_enum(S, NNL, gb_sets:empty(), []).
+
+check_enum_ids(S, [{'NamedNumber',Id,_}|T], Ids0) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, ['EXTENSIONMARK'|T], Ids) ->
+ check_enum_ids(S, T, Ids);
+check_enum_ids(S, [Id|T], Ids0) when is_atom(Id) ->
+ Ids = check_enum_update_ids(S, Id, Ids0),
+ check_enum_ids(S, T, Ids);
+check_enum_ids(_, [], _) ->
+ ok.
+
+check_enum(S, [{'NamedNumber',Id,N}|T], Used0, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ check_enum(S, T, Used, [{Id,N}|Acc]);
+check_enum(S, ['EXTENSIONMARK'|Ext0], Used0, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,Used,Cnt} = check_enum_number_root(Acc, Used0, 0, []),
+ Ext = check_enum_ext(S, Ext0, Used, Cnt, []),
+ {Root,Ext};
+check_enum(S, [Id|T], Used, Acc) when is_atom(Id) ->
+ check_enum(S, T, Used, [Id|Acc]);
+check_enum(_, [], Used, Acc0) ->
+ Acc = lists:reverse(Acc0),
+ {Root,_,_} = check_enum_number_root(Acc, Used, 0, []),
+ lists:keysort(2, Root).
+
+check_enum_number_root([Id|T]=T0, Used0, Cnt, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(Cnt, Used0) of
+ false ->
+ Used = gb_sets:insert(Cnt, Used0),
+ check_enum_number_root(T, Used, Cnt+1, [{Id,Cnt}|Acc]);
+ true ->
+ check_enum_number_root(T0, Used0, Cnt+1, Acc)
+ end;
+check_enum_number_root([H|T], Used, Cnt, Acc) ->
+ check_enum_number_root(T, Used, Cnt, [H|Acc]);
+check_enum_number_root([], Used, Cnt, Acc) ->
+ {lists:keysort(2, Acc),Used,Cnt}.
+
+check_enum_ext(S, [{'NamedNumber',Id,N}|T], Used0, C, Acc) ->
+ Used = check_enum_update_used(S, Id, N, Used0),
+ if
+ N < C ->
+ asn1_error(S, {enum_not_ascending,Id,N,C-1});
+ true ->
+ ok
+ end,
+ check_enum_ext(S, T, Used, N+1, [{Id,N}|Acc]);
+check_enum_ext(S, [Id|T]=T0, Used0, C, Acc) when is_atom(Id) ->
+ case gb_sets:is_element(C, Used0) of
+ true ->
+ check_enum_ext(S, T0, Used0, C+1, Acc);
+ false ->
+ Used = gb_sets:insert(C, Used0),
+ check_enum_ext(S, T, Used, C+1, [{Id,C}|Acc])
+ end;
+check_enum_ext(_, [], _, _, Acc) ->
+ lists:keysort(2, Acc).
+
+check_enum_update_ids(S, Id, Ids) ->
+ case gb_sets:is_element(Id, Ids) of
+ false ->
+ gb_sets:insert(Id, Ids);
+ true ->
+ asn1_error(S, {enum_illegal_redefinition,Id})
+ end.
+
+check_enum_update_used(S, Id, N, Used) ->
+ case gb_sets:is_element(N, Used) of
+ false ->
+ gb_sets:insert(N, Used);
+ true ->
+ asn1_error(S, {enum_reused_value,Id,N})
+ end.
+
+%%%
+%%% End of ENUMERATED checking.
+%%%
check_boolean(_S,_Constr) ->
ok.
@@ -5145,7 +4376,7 @@ check_sequence(S,Type,Comps) ->
CompListTuple = complist_as_tuple(NewComps4),
{CRelInf,CompListTuple};
Dupl ->
- throw({error,{asn1,{duplicate_components,Dupl}}})
+ asn1_error(S, {duplicate_identifier, error_value(hd(Dupl))})
end.
complist_as_tuple(CompList) ->
@@ -5155,8 +4386,6 @@ complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, Acc, Ext, Acc2, ext);
complist_as_tuple([#'EXTENSIONMARK'{}|T], Acc, Ext, Acc2, ext) ->
complist_as_tuple(T, Acc, Ext, Acc2, root2);
-complist_as_tuple([#'EXTENSIONMARK'{}|_T], _Acc, _Ext, _Acc2, root2) ->
- throw({error,{asn1,{too_many_extension_marks}}});
complist_as_tuple([C|T], Acc, Ext, Acc2, root) ->
complist_as_tuple(T, [C|Acc], Ext, Acc2, root);
complist_as_tuple([C|T], Acc, Ext, Acc2, ext) ->
@@ -5199,11 +4428,11 @@ expand_components2(S,{_,PT={pt,_,_}}) ->
expand_components2(S,{_,OCFT = #'ObjectClassFieldType'{}}) ->
UncheckedType = #type{def=OCFT},
Type = check_type(S,#typedef{typespec=UncheckedType},UncheckedType),
- expand_components2(S,{undefined,oCFT_def(S,Type)});
+ expand_components2(S, {undefined,ocft_def(Type)});
expand_components2(S,{_,ERef}) when is_record(ERef,'Externaltypereference') ->
expand_components2(S,get_referenced_type(S,ERef));
-expand_components2(_S,Err) ->
- throw({error,{asn1,{illegal_COMPONENTS_OF,Err}}}).
+expand_components2(S,{_, What}) ->
+ asn1_error(S, {illegal_COMPONENTS_OF, error_value(What)}).
take_only_rootset([])->
[];
@@ -5252,7 +4481,7 @@ check_sequenceof(S,Type,Component) when is_record(Component,type) ->
check_set(S,Type,Components) ->
{TableCInf,NewComponents} = check_sequence(S,Type,Components),
- check_distinct_tags(NewComponents,[]),
+ check_unique_tags(S, collect_components(NewComponents), []),
case {lists:member(der,S#state.options),S#state.erule} of
{true,_} ->
{Sorted,SortedComponents} = sort_components(der,S,NewComponents),
@@ -5264,35 +4493,21 @@ check_set(S,Type,Components) ->
{false,TableCInf,NewComponents}
end.
-
-%% check that all tags are distinct according to X.680 26.3
-check_distinct_tags({C1,C2,C3},Acc) when is_list(C1),is_list(C2),is_list(C3) ->
- check_distinct_tags(C1++C2++C3,Acc);
-check_distinct_tags({C1,C2},Acc) when is_list(C1),is_list(C2) ->
- check_distinct_tags(C1++C2,Acc);
-check_distinct_tags([#'ComponentType'{tags=[T]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags(Cs,[T|Acc]);
-check_distinct_tags([C=#'ComponentType'{tags=[T|Ts]}|Cs],Acc) ->
- check_distinct(T,Acc),
- check_distinct_tags([C#'ComponentType'{tags=Ts}|Cs],[T|Acc]);
-check_distinct_tags([#'ComponentType'{tags=[]}|_Cs],_Acc) ->
- throw({error,"Not distinct tags in SET"});
-check_distinct_tags([],_) ->
- ok.
-check_distinct(T,Acc) ->
- case lists:member(T,Acc) of
- true ->
- throw({error,"Not distinct tags in SET"});
- _ -> ok
- end.
+collect_components({C1,C2,C3}) ->
+ collect_components(C1++C2++C3);
+collect_components({C1,C2}) ->
+ collect_components(C1++C2);
+collect_components(Cs) ->
+ %% Assert that tags are not empty
+ [] = [EmptyTag || EmptyTag = #'ComponentType'{tags=[]} <- Cs],
+ Cs.
%% sorting in canonical order according to X.680 8.6, X.691 9.2
%% DER: all components shall be sorted in canonical order.
%% PER: only root components shall be sorted in canonical order. The
%% extension components shall remain in textual order.
%%
-sort_components(der,S=#state{tname=TypeName},Components) ->
+sort_components(der, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
CompsList = case Ext of
noext -> R1;
@@ -5300,88 +4515,34 @@ sort_components(der,S=#state{tname=TypeName},Components) ->
end,
case {untagged_choice(S,CompsList),Ext} of
{false,noext} ->
- {true,sort_components1(S,TypeName,CompsList,[],[],[],[])};
+ {true,sort_components1(CompsList)};
{false,_} ->
- {true,{sort_components1(S,TypeName,CompsList,[],[],[],[]), []}};
+ {true,{sort_components1(CompsList),[]}};
{true,noext} ->
%% sort in run-time
{dynamic,R1};
_ ->
{dynamic,{R1, Ext, R2}}
end;
-sort_components(per,S=#state{tname=TypeName},Components) ->
+sort_components(per, S, Components) ->
{R1,Ext,R2} = extension(textual_order(Components)),
Root = tag_untagged_choice(S,R1++R2),
case Ext of
noext ->
- {true,sort_components1(S,TypeName,Root,[],[],[],[])};
+ {true,sort_components1(Root)};
_ ->
- {true,{sort_components1(S,TypeName,Root,[],[],[],[]),
- Ext}}
+ {true,{sort_components1(Root),Ext}}
end.
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'UNIVERSAL',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,[C|UnivAcc],ApplAcc,ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'APPLICATION',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,[C|ApplAcc],ContAcc,PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'CONTEXT',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,[C|ContAcc],PrivAcc);
-sort_components1(S,TypeName,[C=#'ComponentType'{tags=[{'PRIVATE',_}|_R]}|Cs],
- UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- sort_components1(S,TypeName,Cs,UnivAcc,ApplAcc,ContAcc,[C|PrivAcc]);
-sort_components1(S,TypeName,[],UnivAcc,ApplAcc,ContAcc,PrivAcc) ->
- I = #'ComponentType'.tags,
- ascending_order_check(S,TypeName,sort_universal_type(UnivAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ApplAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,ContAcc)) ++
- ascending_order_check(S,TypeName,lists:keysort(I,PrivAcc)).
-
-ascending_order_check(S,TypeName,Components) ->
- ascending_order_check1(S,TypeName,Components),
- Components.
-
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{_,T}|_]},
- C2 = #'ComponentType'{tags=[{_,T}|_]}|Rest]) ->
- asn1ct:warning("Indistinct tag ~p in SET ~p, components ~p and ~p~n",
- [T,TypeName,C1#'ComponentType'.name,C2#'ComponentType'.name],S,
- "Indistinct tag in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
-ascending_order_check1(S,TypeName,
- [C1 = #'ComponentType'{tags=[{'UNIVERSAL',T1}|_]},
- C2 = #'ComponentType'{tags=[{'UNIVERSAL',T2}|_]}|Rest]) ->
- case (decode_type(T1) == decode_type(T2)) of
- true ->
- asn1ct:warning("Indistinct tags ~p and ~p in"
- " SET ~p, components ~p and ~p~n",
- [T1,T2,TypeName,C1#'ComponentType'.name,
- C2#'ComponentType'.name],S,
- "Indistinct tags and in SET"),
- ascending_order_check1(S,TypeName,[C2|Rest]);
- _ ->
- ascending_order_check1(S,TypeName,[C2|Rest])
- end;
-ascending_order_check1(S,N,[_|Rest]) ->
- ascending_order_check1(S,N,Rest);
-ascending_order_check1(_,_,[]) ->
- ok.
-
-sort_universal_type(Components) ->
- List = lists:map(fun(C) ->
- #'ComponentType'{tags=[{_,T}|_]} = C,
- {decode_type(T),C}
- end,
- Components),
- SortedList = lists:keysort(1,List),
- lists:map(fun(X)->element(2,X) end,SortedList).
-
-decode_type(I) when is_integer(I) ->
- I;
-decode_type(T) ->
- asn1ct_gen_ber_bin_v2:decode_type(T).
+sort_components1(Cs0) ->
+ Cs1 = [{tag_key(Tag),C} || #'ComponentType'{tags=[Tag|_]}=C <- Cs0],
+ Cs = lists:sort(Cs1),
+ [C || {_,C} <- Cs].
+
+tag_key({'UNIVERSAL',Tag}) -> {0,Tag};
+tag_key({'APPLICATION',Tag}) -> {1,Tag};
+tag_key({'CONTEXT',Tag}) -> {2,Tag};
+tag_key({'PRIVATE',Tag}) -> {3,Tag}.
untagged_choice(_S,[#'ComponentType'{typespec=#type{tag=[],def={'CHOICE',_}}}|_Rest]) ->
true;
@@ -5477,35 +4638,43 @@ check_selectiontype(S,Name,#type{def=Eref})
{RefMod,TypeDef} = get_referenced_type(S,Eref),
NewS = S#state{module=load_asn1_module(S,RefMod),
mname=RefMod,
- type=TypeDef,
tname=get_datastr_name(TypeDef)},
check_selectiontype2(NewS,Name,TypeDef);
check_selectiontype(S,Name,Type=#type{def={pt,_,_}}) ->
- TName =
- case S#state.recordtopname of
- [] ->
- S#state.tname;
- N -> N
- end,
+ TName = case S#state.recordtopname of
+ [] -> S#state.tname;
+ N -> N
+ end,
TDef = #typedef{name=TName,typespec=Type},
check_selectiontype2(S,Name,TDef);
-check_selectiontype(S,Name,Type) ->
- Msg = lists:flatten(io_lib:format("SelectionType error: ~w < ~w must be a reference to a CHOICE.",[Name,Type])),
- error({type,Msg,S}).
+check_selectiontype(S, _Name, Type) ->
+ asn1_error(S, {illegal_choice_type, error_value(Type)}).
check_selectiontype2(S,Name,TypeDef) ->
NewS = S#state{recordtopname=get_datastr_name(TypeDef)},
- CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
- Components = get_choice_components(S,CheckedType#type.def),
- case lists:keysearch(Name,#'ComponentType'.name,Components) of
- {value,C} ->
- %% The selected type will have the tag of the selected type.
- _T = C#'ComponentType'.typespec;
-% T#type{tag=def_to_tag(NewS,T#type.def)};
- _ ->
- Msg = lists:flatten(io_lib:format("error checking SelectionType: ~w~n",[Name])),
- error({type,Msg,S})
+ Components =
+ try
+ CheckedType = check_type(NewS,TypeDef,TypeDef#typedef.typespec),
+ get_choice_components(S,CheckedType#type.def)
+ catch error:_ ->
+ asn1_error(S, {illegal_choice_type, error_value(TypeDef)})
+ end,
+ case lists:keyfind(Name, #'ComponentType'.name, Components) of
+ #'ComponentType'{typespec=TS} -> TS;
+ false -> asn1_error(S, {illegal_id, error_value(Name)})
end.
+
+
+get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
+ Components;
+get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
+ C1++C2;
+get_choice_components(S,ERef=#'Externaltypereference'{}) ->
+ {_RefMod,TypeDef}=get_referenced_type(S,ERef),
+ #typedef{typespec=TS} = TypeDef,
+ get_choice_components(S,TS#type.def).
+
+
check_restrictedstring(_S,_Def,_Constr) ->
ok.
@@ -5538,7 +4707,7 @@ check_choice(S,Type,Components) when is_list(Components) ->
check_unique_tags(S, NewComps3),
complist_as_tuple(NewComps3);
Dupl ->
- throw({error,{asn1,{duplicate_choice_alternatives,Dupl}}})
+ asn1_error(S, {duplicate_identifier,error_value(hd(Dupl))})
end;
check_choice(_S,_,[]) ->
[].
@@ -5635,25 +4804,30 @@ check_unique_tags(S,C) ->
case (S#state.module)#module.tagdefault of
'AUTOMATIC' ->
case any_manual_tag(C) of
- false -> true;
- _ -> collect_and_sort_tags(C,[])
+ false ->
+ true;
+ true ->
+ check_unique_tags(S, C, [])
end;
_ ->
- collect_and_sort_tags(C,[])
+ check_unique_tags(S, C, [])
end.
-collect_and_sort_tags([C|Rest],Acc) when is_record(C,'ComponentType') ->
- collect_and_sort_tags(Rest,C#'ComponentType'.tags ++ Acc);
-collect_and_sort_tags([_|Rest],Acc) ->
- collect_and_sort_tags(Rest,Acc);
-collect_and_sort_tags([],Acc) ->
- {Dupl,_}= lists:mapfoldl(fun(El,El)->{{dup,El},El};(El,_Prev)-> {El,El} end,notag,lists:sort(Acc)),
- Dupl2 = [Dup|| {dup,Dup} <- Dupl],
- if
- length(Dupl2) > 0 ->
- throw({error,{asn1,{duplicates_of_the_tags,Dupl2}}});
- true ->
- true
+check_unique_tags(S, [#'ComponentType'{name=Name,tags=Tags0}|T], Acc) ->
+ Tags = [{Tag,Name} || Tag <- Tags0],
+ check_unique_tags(S, T, Tags ++ Acc);
+check_unique_tags(S, [_|T], Acc) ->
+ check_unique_tags(S, T, Acc);
+check_unique_tags(S, [], Acc) ->
+ R0 = sofs:relation(Acc),
+ R1 = sofs:relation_to_family(R0),
+ R2 = sofs:to_external(R1),
+ Dup = [Els || {_,[_,_|_]=Els} <- R2],
+ case Dup of
+ [] ->
+ ok;
+ [FirstDupl|_] ->
+ asn1_error(S, {duplicate_tags,FirstDupl})
end.
check_unique(L,Pos) ->
@@ -5795,28 +4969,18 @@ componentrelation_leadingattr(S,[C= #'ComponentType'{}|Cs],CompList,Acc,CompAcc)
{[],C};
[{ObjSet,Attr,N,ClassDef,_Path,ValueIndex}|_NewRest] ->
OS = object_set_mod_name(S,ObjSet),
- UniqueFieldName =
- case (catch get_unique_fieldname(S,#classdef{typespec=ClassDef})) of
- {error,'__undefined_',_} ->
- no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
-% UsedFieldName = get_used_fieldname(S,Attr,STList),
+ UniqFN = get_unique_fieldname(S,
+ #classdef{typespec=ClassDef}),
%% Res should be done differently: even though
%% a unique field name exists it is not
%% certain that the ObjectClassFieldType of
%% the simple table constraint picks that
%% class field.
Res = #simpletableattributes{objectsetname=OS,
-%% c_name=asn1ct_gen:un_hyphen_var(Attr),
c_name=Attr,
c_index=N,
- usedclassfield=UniqueFieldName,
- uniqueclassfield=UniqueFieldName,
+ usedclassfield=UniqFN,
+ uniqueclassfield=UniqFN,
valueindex=ValueIndex},
{[Res],C#'ComponentType'{typespec=NewTSpec}}
end;
@@ -5869,7 +5033,7 @@ remove_doubles1(El,L) ->
NewL -> remove_doubles1(El,NewL)
end.
-%% get_simple_table_info searches the commponents Cs by the path from
+%% get_simple_table_info searches the components Cs by the path from
%% an at-list (third argument), and follows into a component of it if
%% necessary, to get information needed for code generating.
%%
@@ -5884,32 +5048,35 @@ remove_doubles1(El,L) ->
% %% at least one step below the outermost level, i.e. the leading
% %% information shall be on a sub level. 2) They don't have any common
% %% path.
-get_simple_table_info(S,Cs,[AtList|Rest]) ->
- [get_simple_table_info1(S,Cs,AtList,[])|get_simple_table_info(S,Cs,Rest)];
-get_simple_table_info(_,_,[]) ->
- [].
-get_simple_table_info1(S,Cs,[Cname|Cnames],Path) when is_list(Cs) ->
- case lists:keysearch(Cname,#'ComponentType'.name,Cs) of
- {value,C} ->
- get_simple_table_info1(S,C,Cnames,[Cname|Path]);
- _ ->
- error({type,"Missing expected simple table constraint",S})
- end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},[],Path) ->
- %% In this component there must be a simple table constraint
- %% o.w. the asn1 code is wrong.
- #type{def=OCFT,constraint=Cnstr} = TS,
- case constraint_member(simpletable,Cnstr) of
- {true,{simpletable,_OSRef}} ->
- simple_table_info(S,OCFT,Path);
- _ ->
- error({type,{"missing expected simple table constraint",
- Cnstr},S})
+get_simple_table_info(S, Cs, AtLists) ->
+ [get_simple_table_info1(S, Cs, AtList, []) || AtList <- AtLists].
+
+get_simple_table_info1(S, Cs, [Cname|Cnames], Path) ->
+ #'ComponentType'{} = C =
+ lists:keyfind(Cname, #'ComponentType'.name, Cs),
+ get_simple_table_info2(S, C, Cnames, [Cname|Path]).
+
+get_simple_table_info2(S, #'ComponentType'{name=Name,typespec=TS}, [], Path) ->
+ OCFT = simple_table_get_ocft(S, Name, TS),
+ case lists:keymember(simpletable, 1, TS#type.constraint) of
+ true ->
+ simple_table_info(S, OCFT, Path);
+ false ->
+ asn1_error(S, {missing_table_constraint,Name})
end;
-get_simple_table_info1(S,#'ComponentType'{typespec=TS},Cnames,Path) ->
+get_simple_table_info2(S, #'ComponentType'{typespec=TS}, Cnames, Path) ->
Components = get_atlist_components(TS#type.def),
- get_simple_table_info1(S,Components,Cnames,Path).
-
+ get_simple_table_info1(S, Components, Cnames, Path).
+
+simple_table_get_ocft(_, _, #type{def=#'ObjectClassFieldType'{}=OCFT}) ->
+ OCFT;
+simple_table_get_ocft(S, Component, #type{constraint=Constr}) ->
+ case lists:keyfind(ocft, 1, Constr) of
+ {ocft,OCFT} ->
+ OCFT;
+ false ->
+ asn1_error(S, {missing_ocft,Component})
+ end.
simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
class=ObjectClass,
@@ -5932,19 +5099,8 @@ simple_table_info(S,#'ObjectClassFieldType'{classname=ClRef,
CDef;
_ -> #classdef{typespec=ObjectClass}
end,
- UniqueName =
- case (catch get_unique_fieldname(S,ClassDef)) of
- {error,'__undefined_',_} -> no_unique;
- {asn1,Msg,_} ->
- error({type,Msg,S});
- {'EXIT',Msg} ->
- error({type,{internal_error,Msg},S});
- {Other,_} -> Other
- end,
- {lists:reverse(Path),ObjectClassFieldName,UniqueName};
-simple_table_info(S,Type,_) ->
- error({type,{"the type referenced by a componentrelation constraint must be a ObjectClassFieldType",Type},S}).
-
+ UniqueName = get_unique_fieldname(S, ClassDef),
+ {lists:reverse(Path),ObjectClassFieldName,UniqueName}.
%% any_component_relation searches for all component relation
%% constraints that refers to the actual level and returns a list of
@@ -5958,9 +5114,8 @@ simple_table_info(S,Type,_) ->
%% is found to check the validity of the at-list.
any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,NamePath,Acc) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
-%% [{componentrelation,_,AtNotation}] ->
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
%% Found component relation constraint, now check
%% whether this constraint is relevant for the level
%% where the search started
@@ -5969,7 +5124,7 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
%% simple table constraint from where the component
%% relation is found.
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -5991,11 +5146,11 @@ any_component_relation(S,[#'ComponentType'{name=CName,typespec=Type}|Cs],CNames,
any_component_relation(S,Cs,CNames,NamePath,InnerAcc++CRelPath++Acc);
any_component_relation(S,Type,CNames,NamePath,Acc) when is_record(Type,type) ->
CRelPath =
- case constraint_member(componentrelation,Type#type.constraint) of
- {true,{_,_,AtNotation}} ->
+ case lists:keyfind(componentrelation, 1, Type#type.constraint) of
+ {_,_,AtNotation} ->
AtNot = extract_at_notation(AtNotation),
evaluate_atpath(S,NamePath,CNames,AtNot);
- _ ->
+ false ->
[]
end,
InnerAcc =
@@ -6017,15 +5172,6 @@ any_component_relation(S,['ExtensionAdditionGroupEnd'|Cs],CNames,NamePath,Acc) -
any_component_relation(_,[],_,_,Acc) ->
Acc.
-constraint_member(componentrelation,[CRel={componentrelation,_,_}|_Rest]) ->
- {true,CRel};
-constraint_member(simpletable,[ST={simpletable,_}|_Rest]) ->
- {true,ST};
-constraint_member(Key,[_H|T]) ->
- constraint_member(Key,T);
-constraint_member(_,[]) ->
- false.
-
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
@@ -6059,9 +5205,7 @@ evaluate_atpath(S=#state{abscomppath=TopPath},NamePath,Cnames,{outermost,AtPath=
{_,[H|_T]} ->
case lists:member(H,Cnames) of
true -> [AtPathBelowTop];
- _ ->
- %% error({type,{asn1,"failed to analyze at-path",AtPath},S})
- throw({type,{asn1,"failed to analyze at-path",AtPath},S})
+ _ -> asn1_error(S, {invalid_at_path, AtPath})
end
end;
evaluate_atpath(_,_,_,_) ->
@@ -6098,23 +5242,8 @@ tuple2complist({R1,E,R2}) ->
tuple2complist(List) when is_list(List) ->
List.
-get_choice_components(_S,{'CHOICE',Components}) when is_list(Components)->
- Components;
-get_choice_components(_S,{'CHOICE',{C1,C2}}) when is_list(C1),is_list(C2) ->
- C1++C2;
-get_choice_components(S,ERef=#'Externaltypereference'{}) ->
- {_RefMod,TypeDef}=get_referenced_type(S,ERef),
- #typedef{typespec=TS} = TypeDef,
- get_choice_components(S,TS#type.def).
-
-extract_at_notation([{Level,[#'Externalvaluereference'{value=Name}|Rest]}]) ->
- {Level,[Name|extract_at_notation1(Rest)]};
-extract_at_notation(At) ->
- exit({error,{asn1,{at_notation,At}}}).
-extract_at_notation1([#'Externalvaluereference'{value=Name}|Rest]) ->
- [Name|extract_at_notation1(Rest)];
-extract_at_notation1([]) ->
- [].
+extract_at_notation([{Level,ValueRefs}]) ->
+ {Level,[Name || #'Externalvaluereference'{value=Name} <- ValueRefs]}.
%% componentrelation1/1 identifies all componentrelation constraints
%% that exist in C or in the substructure of C. Info about the found
@@ -6133,8 +5262,8 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
Ret =
% case Constraint of
% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Constraint) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Constraint) of
+ {_,{_,_,ObjectSet},AtList} ->
[{_,AL=[#'Externalvaluereference'{}|_R1]}|_R2] = AtList,
%% Note: if Path is longer than one,i.e. it is within
%% an inner type of the actual level, then the only
@@ -6145,7 +5274,7 @@ componentrelation1(S,C = #type{def=Def,constraint=Constraint,tablecinf=TCI},
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
{[{ObjectSet,AtPath,ClassDef,Path}],Def};
- _ ->
+ false ->
%% check the inner type of component
innertype_comprel(S,Def,Path)
end,
@@ -6219,10 +5348,8 @@ componentlist_comprel(_,[],Acc,_,NewCL) ->
innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
Ret =
-% case Cons of
-% [{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
- case constraint_member(componentrelation,Cons) of
- {true,{_,{_,_,ObjectSet},AtList}} ->
+ case lists:keyfind(componentrelation, 1, Cons) of
+ {_,{_,_,ObjectSet},AtList} ->
%% This AtList must have an "outermost" at sign to be
%% relevent here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
@@ -6233,7 +5360,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
lists:map(fun(#'Externalvaluereference'{value=V})->V end,
AL),
[{ObjectSet,AtPath,ClassDef,Path}];
- _ ->
+ false ->
innertype_comprel(S,Def,Path)
end,
case Ret of
@@ -6301,8 +5428,7 @@ value_match(S,#'ComponentType'{typespec=Type},Name,[At|Ats],Acc) ->
InnerType = asn1ct_gen:get_inner(Type#type.def),
Components =
case get_atlist_components(Type#type.def) of
- [] -> error({type,{asn1,"element in at list must be a "
- "SEQUENCE, SET or CHOICE.",Name},S});
+ [] -> asn1_error(S, {invalid_element, Name});
Comps -> Comps
end,
{Index,ValueIndex} = component_value_index(S,InnerType,At,Components),
@@ -6322,29 +5448,27 @@ component_index1(_S,Name,[#'ComponentType'{name=Name}|_Cs],N) ->
component_index1(S,Name,[_C|Cs],N) ->
component_index1(S,Name,Cs,N+1);
component_index1(S,Name,[],_) ->
- error({type,{asn1,"component of at-list was not"
- " found in substructure",Name},S}).
+ asn1_error(S, {invalid_at_list, Name}).
-get_unique_fieldname(_S,ClassDef) when is_record(ClassDef,classdef) ->
-%% {_,Fields,_} = ClassDef#classdef.typespec,
- Fields = (ClassDef#classdef.typespec)#objectclass.fields,
- get_unique_fieldname1(Fields,[]);
+get_unique_fieldname(S, #classdef{typespec=TS}) ->
+ Fields = TS#objectclass.fields,
+ get_unique_fieldname1(S, Fields, []);
get_unique_fieldname(S,#typedef{typespec=#type{def=ClassRef}}) ->
%% A class definition may be referenced as
%% REFED-CLASS ::= DEFINED-CLASS and then REFED-CLASS is a typedef
{_M,ClassDef} = get_referenced_type(S,ClassRef),
get_unique_fieldname(S,ClassDef).
-get_unique_fieldname1([],[]) ->
- throw({error,'__undefined_',[]});
-get_unique_fieldname1([],[Name]) ->
- Name;
-get_unique_fieldname1([],Acc) ->
- throw({asn1,'only one UNIQUE field is allowed in CLASS',Acc});
-get_unique_fieldname1([{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|Rest],Acc) ->
- get_unique_fieldname1(Rest,[{Name,Opt}|Acc]);
-get_unique_fieldname1([_H|T],Acc) ->
- get_unique_fieldname1(T,Acc).
+get_unique_fieldname1(S, [{fixedtypevaluefield,Name,_,'UNIQUE',Opt}|T], Acc) ->
+ get_unique_fieldname1(S, T, [{Name,Opt}|Acc]);
+get_unique_fieldname1(S, [_|T], Acc) ->
+ get_unique_fieldname1(S, T, Acc);
+get_unique_fieldname1(S, [], Acc) ->
+ case Acc of
+ [] -> no_unique;
+ [Name] -> Name;
+ [_|_] -> asn1_error(S, multiple_uniqs)
+ end.
get_tableconstraint_info(S,Type,{CheckedTs,EComps,CheckedTs2}) ->
{get_tableconstraint_info(S,Type,CheckedTs,[]),
@@ -6400,31 +5524,8 @@ get_tableconstraint_info(S,Type,[C|Cs],Acc) ->
get_referenced_fieldname([{_,FirstFieldname}]) ->
{FirstFieldname,[]};
-get_referenced_fieldname([{_,FirstFieldname}|Rest]) ->
- {FirstFieldname,lists:map(fun(X)->element(2,X) end,Rest)};
-get_referenced_fieldname(Def={FieldName,RestFieldName}) when is_atom(FieldName),is_list(RestFieldName)->
- Def;
-get_referenced_fieldname(Def) ->
- {no_type,Def}.
-
-%% get_ObjectClassFieldType extracts the type from the chain of
-%% objects that leads to a final type.
-get_ObjectClassFieldType(S,ERef,PrimFieldNameList) when
- is_record(ERef,'Externaltypereference') ->
- {MName,Type} = get_referenced_type(S,ERef),
- NewS = update_state(S#state{type=Type,
- tname=ERef#'Externaltypereference'.type},MName),
- ClassSpec = check_class(NewS,Type),
- Fields = ClassSpec#objectclass.fields,
- get_ObjectClassFieldType(S,Fields,PrimFieldNameList);
-get_ObjectClassFieldType(S,Fields,L=[_PrimFieldName1|_Rest]) ->
- check_PrimitiveFieldNames(S,Fields,L),
- get_OCFType(S,Fields,L);
-get_ObjectClassFieldType(S,ERef,{FieldName,Rest}) ->
- get_ObjectClassFieldType(S,ERef,Rest ++ [FieldName]).
-
-check_PrimitiveFieldNames(_S,_Fields,_) ->
- ok.
+get_referenced_fieldname([{_,FirstFieldname}|T]) ->
+ {FirstFieldname,[element(2, X) || X <- T]}.
%% get_ObjectClassFieldType_classdef gets the def of the class of the
%% ObjectClassFieldType, i.e. the objectclass record. If the type has
@@ -6445,15 +5546,13 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{fixedtypevaluefield,PrimFieldName,Type};
{value,{objectfield,_,ClassRef,_Unique,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,ClassRef),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
{value,{objectsetfield,_,Type,_OptSpec}} ->
{MName,ClassDef} = get_referenced_type(S,Type#type.def),
- NewS = update_state(S#state{type=ClassDef,
- tname=get_datastr_name(ClassDef)},
+ NewS = update_state(S#state{tname=get_datastr_name(ClassDef)},
MName),
CheckedCDef = check_class(NewS,ClassDef),
get_OCFType(S,CheckedCDef#objectclass.fields,Rest);
@@ -6461,7 +5560,7 @@ get_OCFType(S,Fields,[PrimFieldName|Rest]) ->
{value,Other} ->
{element(1,Other),PrimFieldName};
_ ->
- throw({error,lists:flatten(io_lib:format("undefined FieldName in ObjectClassFieldType: ~w",[PrimFieldName]))})
+ asn1_error(S, {illegal_object_field, PrimFieldName})
end.
get_taglist(S,Ext) when is_record(Ext,'Externaltypereference') ->
@@ -6485,30 +5584,8 @@ get_taglist(_S,#'ObjectClassFieldType'{type={typefield,_}}) ->
[];
get_taglist(S,#'ObjectClassFieldType'{type={fixedtypevaluefield,_,Type}}) ->
get_taglist(S,Type);
-get_taglist(S,{ERef=#'Externaltypereference'{},FieldNameList})
- when is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ERef,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,{ObjCl,FieldNameList}) when is_record(ObjCl,objectclass),
- is_list(FieldNameList) ->
- case get_ObjectClassFieldType(S,ObjCl#objectclass.fields,FieldNameList) of
- {fixedtypevaluefield,_,Type} -> get_taglist(S,Type);
- {TypeFieldName,_} when is_atom(TypeFieldName) -> []%should check if allowed
- end;
-get_taglist(S,Def) ->
- case S#state.erule of
- ber ->
- [];
- _ ->
- case Def of
- 'ASN1_OPEN_TYPE' -> % open_type has no UNIVERSAL tag as such
- [];
- _ ->
- [asn1ct_gen:def_to_tag(Def)]
- end
- end.
+get_taglist(_, _) ->
+ [].
get_taglist1(S,[#'ComponentType'{name=_Cname,tags=TagL}|Rest]) when is_list(TagL) ->
%% tag_list has been here , just return TagL and continue with next alternative
@@ -6565,15 +5642,6 @@ get_taglist1(_S,[]) ->
%% tag_number('CHARACTER STRING') -> 29;
%% tag_number('BMPString') -> 30.
-
-dbget_ex(_S,Module,Key) ->
- case asn1_db:dbget(Module,Key) of
- undefined ->
-
- throw({error,{asn1,{undefined,{Module,Key}}}}); % this is catched on toplevel type or value
- T -> T
- end.
-
merge_tags(T1, T2) when is_list(T2) ->
merge_tags2(T1 ++ T2, []);
merge_tags(T1, T2) ->
@@ -6590,75 +5658,46 @@ merge_tags2([H|T],Acc) ->
merge_tags2([], Acc) ->
lists:reverse(Acc).
-%% merge_constraints(C1, []) ->
-%% C1;
-%% merge_constraints([], C2) ->
-%% C2;
-%% merge_constraints(C1, C2) ->
-%% {SList,VList,PAList,Rest} = splitlist(C1++C2,[],[],[],[]),
-%% SizeC = merge_constraints(SList),
-%% ValueC = merge_constraints(VList),
-%% PermAlphaC = merge_constraints(PAList),
-%% case Rest of
-%% [] ->
-%% SizeC ++ ValueC ++ PermAlphaC;
-%% _ ->
-%% throw({error,{asn1,{not_implemented,{merge_constraints,Rest}}}})
-%% end.
-
-%% merge_constraints([]) -> [];
-%% merge_constraints([C1 = {_,{Low1,High1}},{_,{Low2,High2}}|Rest]) when Low1 >= Low2,
-%% High1 =< High2 ->
-%% merge_constraints([C1|Rest]);
-%% merge_constraints([C1={'PermittedAlphabet',_},C2|Rest]) ->
-%% [C1|merge_constraints([C2|Rest])];
-%% merge_constraints([C1 = {_,{_Low1,_High1}},C2 = {_,{_Low2,_High2}}|_Rest]) ->
-%% throw({error,asn1,{conflicting_constraints,{C1,C2}}});
-%% merge_constraints([C]) ->
-%% [C].
-
-%% splitlist([C={'SizeConstraint',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,[C|Sacc],Vacc,PAacc,Restacc);
-%% splitlist([C={'ValueRange',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,[C|Vacc],PAacc,Restacc);
-%% splitlist([C={'PermittedAlphabet',_}|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,[C|PAacc],Restacc);
-%% splitlist([C|Rest],Sacc,Vacc,PAacc,Restacc) ->
-%% splitlist(Rest,Sacc,Vacc,PAacc,[C|Restacc]);
-%% splitlist([],Sacc,Vacc,PAacc,Restacc) ->
-%% {lists:reverse(Sacc),
-%% lists:reverse(Vacc),
-%% lists:reverse(PAacc),
-%% lists:reverse(Restacc)}.
-
-
-
-storeindb(S,M) when is_record(M,module) ->
- TVlist = M#module.typeorval,
- NewM = M#module{typeorval=findtypes_and_values(TVlist)},
- asn1_db:dbnew(NewM#module.name, S#state.erule),
- asn1_db:dbput(NewM#module.name,'MODULE', NewM),
- Res = storeindb(#state{mname=NewM#module.name}, TVlist, []),
- include_default_class(S,NewM#module.name),
+storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) ->
+ S = S0#state{mname=ModName},
+ TVlist1 = [{asn1ct:get_name_of_def(Def),Def} || Def <- TVlist0],
+ case check_duplicate_defs(S, TVlist1) of
+ ok ->
+ storeindb_1(S, M, TVlist0, TVlist1);
+ {error,_}=Error ->
+ Error
+ end.
+
+storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) ->
+ NewM = M#module{typeorval=findtypes_and_values(TVlist0)},
+ asn1_db:dbnew(ModName, S#state.erule),
+ asn1_db:dbput(ModName, 'MODULE', NewM),
+ asn1_db:dbput(ModName, TVlist),
+ include_default_class(S, NewM#module.name),
include_default_type(NewM#module.name),
- Res.
+ ok.
-storeindb(#state{mname=Module}=S, [H|T], Errors) ->
- Name = asn1ct:get_name_of_def(H),
- case asn1_db:dbget(Module, Name) of
- undefined ->
- asn1_db:dbput(Module, Name, H),
- storeindb(S, T, Errors);
- Prev ->
- PrevLine = asn1ct:get_pos_of_def(Prev),
- Error = return_asn1_error(S, H, {already_defined,Name,PrevLine}),
- storeindb(S, T, [Error|Errors])
- end;
-storeindb(_, [], []) ->
- ok;
-storeindb(_, [], [_|_]=Errors) ->
- {error,Errors}.
+check_duplicate_defs(S, Defs) ->
+ Set0 = sofs:relation(Defs),
+ Set1 = sofs:relation_to_family(Set0),
+ Set = sofs:to_external(Set1),
+ case [duplicate_def(S, N, Dup) || {N,[_,_|_]=Dup} <- Set] of
+ [] ->
+ ok;
+ [_|_]=E ->
+ {error,lists:append(E)}
+ end.
+
+duplicate_def(S, Name, Dups0) ->
+ Dups1 = [{asn1ct:get_pos_of_def(Def),Def} || Def <- Dups0],
+ [{Prev,_}|Dups] = lists:sort(Dups1),
+ duplicate_def_1(S, Dups, Name, Prev).
+duplicate_def_1(S, [{_,Def}|T], Name, Prev) ->
+ E = return_asn1_error(S, Def, {already_defined,Name,Prev}),
+ [E|duplicate_def_1(S, T, Name, Prev)];
+duplicate_def_1(_, [], _, _) ->
+ [].
findtypes_and_values(TVList) ->
findtypes_and_values(TVList,[],[],[],[],[],[]).%% Types,Values,
@@ -6698,99 +5737,146 @@ findtypes_and_values([],Tacc,Vacc,Pacc,Cacc,Oacc,OSacc) ->
{lists:reverse(Tacc),lists:reverse(Vacc),lists:reverse(Pacc),
lists:reverse(Cacc),lists:reverse(Oacc),lists:reverse(OSacc)}.
+return_asn1_error(#state{error_context=Context}=S, Error) ->
+ return_asn1_error(S, Context, Error).
+
return_asn1_error(#state{mname=Where}, Item, Error) ->
Pos = asn1ct:get_pos_of_def(Item),
{structured_error,{Where,Pos},?MODULE,Error}.
-asn1_error(S, Item, Error) ->
- throw({error,return_asn1_error(S, Item, Error)}).
+asn1_error(S, Error) ->
+ throw({error,return_asn1_error(S, Error)}).
format_error({already_defined,Name,PrevLine}) ->
io_lib:format("the name ~p has already been defined at line ~p",
[Name,PrevLine]);
+format_error({duplicate_identifier,Ids}) ->
+ io_lib:format("the identifier '~p' has already been used", [Ids]);
+format_error({duplicate_tags,Elements}) ->
+ io_lib:format("duplicate tags in the elements: ~s",
+ [format_elements(Elements)]);
+format_error({enum_illegal_redefinition,Id}) ->
+ io_lib:format("'~s' must not be redefined", [Id]);
+format_error({enum_not_ascending,Id,N,Prev}) ->
+ io_lib:format("the values for enumerations which follow '...' must "
+ "be in ascending order, but '~p(~p)' is less than the "
+ "previous value '~p'", [Id,N,Prev]);
+format_error({enum_reused_value,Id,Val}) ->
+ io_lib:format("'~s' has the value '~p' which is used more than once",
+ [Id,Val]);
+format_error({illegal_id, Id}) ->
+ io_lib:format("illegal identifier: ~p", [Id]);
+format_error({illegal_choice_type, Ref}) ->
+ io_lib:format("expecting a CHOICE type: ~p", [Ref]);
+format_error({illegal_class_name,Class}) ->
+ io_lib:format("the class name '~s' is illegal (it must start with an uppercase letter and only contain uppercase letters, digits, or hyphens)", [Class]);
+format_error({illegal_COMPONENTS_OF, Ref}) ->
+ io_lib:format("expected a SEQUENCE or SET got: ~p", [Ref]);
+format_error(illegal_external_value) ->
+ "illegal value in EXTERNAL type";
format_error({illegal_instance_of,Class}) ->
io_lib:format("using INSTANCE OF on class '~s' is illegal, "
- "because INSTANCE OF may only be used on the class TYPE-IDENTFIER",
+ "because INSTANCE OF may only be used on the class TYPE-IDENTIFIER",
[Class]);
+format_error(illegal_integer_value) ->
+ "expecting an integer value";
+format_error(illegal_object) ->
+ "expecting an object";
+format_error({illegal_object_field, Id}) ->
+ io_lib:format("expecting a class field: ~p",[Id]);
+format_error({illegal_oid,o_id}) ->
+ "illegal OBJECT IDENTIFIER";
+format_error({illegal_oid,rel_oid}) ->
+ "illegal RELATIVE-OID";
format_error(illegal_octet_string_value) ->
"expecting a bstring or an hstring as value for an OCTET STRING";
format_error({illegal_typereference,Name}) ->
io_lib:format("'~p' is used as a typereference, but does not start with an uppercase letter", [Name]);
+format_error(illegal_table_constraint) ->
+ "table constraints may only be applied to CLASS.&field constructs";
+format_error(illegal_value) ->
+ "expecting a value";
+format_error({illegal_value, TYPE}) ->
+ io_lib:format("expecting a ~s value", [TYPE]);
format_error({invalid_fields,Fields,Obj}) ->
io_lib:format("invalid ~s in ~p", [format_fields(Fields),Obj]);
format_error({invalid_bit_number,Bit}) ->
io_lib:format("the bit number '~p' is invalid", [Bit]);
+format_error(invalid_table_constraint) ->
+ "the table constraint is not an object set";
+format_error(invalid_objectset) ->
+ "expecting an object set";
+format_error({implicit_tag_before,Kind}) ->
+ "illegal implicit tag before " ++
+ case Kind of
+ choice -> "'CHOICE'";
+ open_type -> "open type"
+ end;
format_error({missing_mandatory_fields,Fields,Obj}) ->
io_lib:format("missing mandatory ~s in ~p",
[format_fields(Fields),Obj]);
+format_error({missing_table_constraint,Component}) ->
+ io_lib:format("the component '~s' is referenced by a component relation constraint using the '@field-name' notation, but does not have a table constraint",
+ [Component]);
+format_error({missing_id,Id}) ->
+ io_lib:format("expected the mandatory component '~p'", [Id]);
+format_error({missing_ocft,Component}) ->
+ io_lib:format("the component '~s' must be an ObjectClassFieldType (CLASSNAME.&field-name)", [Component]);
+format_error(multiple_uniqs) ->
+ "implementation limitation: only one UNIQUE field is allowed in CLASS";
format_error({namelist_redefinition,Name}) ->
io_lib:format("the name '~s' can not be redefined", [Name]);
+format_error({param_bad_type, Ref}) ->
+ io_lib:format("'~p' is not a parameterized type", [Ref]);
+format_error(param_wrong_number_of_arguments) ->
+ "wrong number of arguments";
+format_error(reversed_range) ->
+ "ranges must be given in increasing order";
+format_error({syntax_duplicated_fields,Fields}) ->
+ io_lib:format("~s must only occur once in the syntax list",
+ [format_fields(Fields)]);
+format_error(syntax_nomatch) ->
+ "unexpected end of object definition";
+format_error({syntax_mandatory_in_optional_group,Name}) ->
+ io_lib:format("the field '&~s' must not be within an optional group since it is not optional",
+ [Name]);
+format_error({syntax_missing_mandatory_fields,Fields}) ->
+ io_lib:format("missing mandatory ~s in the syntax list",
+ [format_fields(Fields)]);
+format_error({syntax_nomatch,Actual}) ->
+ io_lib:format("~s is not the next item allowed according to the defined syntax",
+ [Actual]);
+format_error({syntax_undefined_field,Field}) ->
+ io_lib:format("'&~s' is not a field of the class being defined",
+ [Field]);
format_error({undefined,Name}) ->
io_lib:format("'~s' is referenced, but is not defined", [Name]);
+format_error({undefined_export,Ref}) ->
+ io_lib:format("'~s' is exported but is not defined", [Ref]);
+format_error({undefined_field,FieldName}) ->
+ io_lib:format("the field '&~s' is undefined", [FieldName]);
format_error({undefined_import,Ref,Module}) ->
io_lib:format("'~s' is not exported from ~s", [Ref,Module]);
+format_error({unique_and_default,Field}) ->
+ io_lib:format("the field '&~s' must not have both 'UNIQUE' and 'DEFAULT'",
+ [Field]);
format_error({value_reused,Val}) ->
io_lib:format("the value '~p' is used more than once", [Val]);
+format_error({non_unique_object,Id}) ->
+ io_lib:format("object set with a UNIQUE field value of '~p' is used more than once", [Id]);
format_error(Other) ->
io_lib:format("~p", [Other]).
format_fields([F]) ->
- io_lib:format("field &~s", [F]);
+ io_lib:format("field '&~s'", [F]);
format_fields([H|T]) ->
- [io_lib:format("fields &~s", [H])|
- [io_lib:format(", &~s", [F]) || F <- T]].
-
-error({_,{structured_error,_,_,_}=SE,_}) ->
- SE;
-error({export,Msg,#state{mname=Mname,type=Ref,tname=Typename}}) ->
- Pos = Ref#'Externaltypereference'.pos,
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{export,Pos,Mname,Typename,Msg}};
-% error({type,{Msg1,Msg2},#state{mname=Mname,type=Type,tname=Typename}})
-% when is_record(Type,typedef) ->
-% io:format("asn1error:~p:~p:~p ~p~n",
-% [Type#typedef.pos,Mname,Typename,Msg1]),
-% {error,{type,Type#typedef.pos,Mname,Typename,Msg1,Msg2}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,type) ->
- io:format("asn1error:~p:~p~n~p~n",
- [Mname,Typename,Msg]),
- {error,{type,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,typedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#typedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#typedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,ptypedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#ptypedef.pos,Mname,Typename,Msg]),
- {error,{type,Type#ptypedef.pos,Mname,Typename,Msg}};
-error({type,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{type,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({type,Msg,#state{mname=Mname,type=Type,tname=Typename}})
- when is_record(Type,pobjectdef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",
- [Type#pobjectdef.pos,Mname,Typename,Msg]),
- {error,{type,Type#pobjectdef.pos,Mname,Typename,Msg}};
-error({value,Msg,#state{mname=Mname,value=Value,vname=Valuename}})
- when is_record(Value,valuedef) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Value#valuedef.pos,Mname,Valuename,Msg]),
- {error,{value,Value#valuedef.pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,value=#valuedef{pos=Pos},vname=Valuename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Valuename,Msg]),
- {error,{Other,Pos,Mname,Valuename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#typedef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=#classdef{pos=Pos},tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[Pos,Mname,Typename,Msg]),
- {error,{Other,Pos,Mname,Typename,Msg}};
-error({Other,Msg,#state{mname=Mname,type=Type,tname=Typename}}) ->
- io:format("asn1error:~p:~p:~p~n~p~n",[asn1ct:get_pos_of_def(Type),Mname,Typename,Msg]),
- {error,{Other,asn1ct:get_pos_of_def(Type),Mname,Typename,Msg}}.
+ [io_lib:format("fields '&~s'", [H])|
+ [io_lib:format(", '&~s'", [F]) || F <- T]].
+
+format_elements([H1,H2|T]) ->
+ [io_lib:format("~p, ", [H1])|format_elements([H2|T])];
+format_elements([H]) ->
+ io_lib:format("~p", [H]).
include_default_type(Module) ->
NameAbsList = default_type_list(),
@@ -6953,62 +6039,62 @@ default_type_list() ->
].
-include_default_class(S,Module) ->
- NameAbsList = default_class_list(S),
- include_default_class1(Module,NameAbsList).
+include_default_class(S, Module) ->
+ _ = [include_default_class1(S, Module, ClassDef) ||
+ ClassDef <- default_class_list()],
+ ok.
-include_default_class1(_,[]) ->
- ok;
-include_default_class1(Module,[{Name,TS}|Rest]) ->
- case asn1_db:dbget(Module,Name) of
+include_default_class1(S, Module, {Name,Ts0}) ->
+ case asn1_db:dbget(Module, Name) of
undefined ->
- C = #classdef{checked=true,module=Module,name=Name,
- typespec=TS},
- asn1_db:dbput(Module,Name,C);
- _ -> ok
- end,
- include_default_class1(Module,Rest).
+ #objectclass{fields=Fields,
+ syntax={'WITH SYNTAX',Syntax0}} = Ts0,
+ Syntax = preprocess_syntax(S, Syntax0, Fields),
+ Ts = Ts0#objectclass{syntax={preprocessed_syntax,Syntax}},
+ C = #classdef{checked=true,module=Module,
+ name=Name,typespec=Ts},
+ asn1_db:dbput(Module, Name, C);
+ _ ->
+ ok
+ end.
-default_class_list(S) ->
+default_class_list() ->
[{'TYPE-IDENTIFIER',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id}]}}},
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id}]}}},
{'ABSTRACT-SYNTAX',
- {objectclass,
- [{fixedtypevaluefield,
- id,
- #type{tag=?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER),
- def='OBJECT IDENTIFIER'},
- 'UNIQUE',
- 'MANDATORY'},
- {typefield,'Type','MANDATORY'},
- {fixedtypevaluefield,
- property,
- #type{tag=?TAG_PRIMITIVE(?N_BIT_STRING),
- def={'BIT STRING',[]}},
- undefined,
- {'DEFAULT',
- [0,1,0]}}],
- {'WITH SYNTAX',
- [{typefieldreference,'Type'},
- 'IDENTIFIED',
- 'BY',
- {valuefieldreference,id},
- ['HAS',
- 'PROPERTY',
- {valuefieldreference,property}]]}}}].
-
+ #objectclass{fields=[{fixedtypevaluefield,
+ id,
+ #type{tag=[?TAG_PRIMITIVE(?N_OBJECT_IDENTIFIER)],
+ def='OBJECT IDENTIFIER'},
+ 'UNIQUE',
+ 'MANDATORY'},
+ {typefield,'Type','MANDATORY'},
+ {fixedtypevaluefield,
+ property,
+ #type{tag=[?TAG_PRIMITIVE(?N_BIT_STRING)],
+ def={'BIT STRING',[]}},
+ undefined,
+ {'DEFAULT',
+ [0,1,0]}}],
+ syntax={'WITH SYNTAX',
+ [{typefieldreference,'Type'},
+ 'IDENTIFIED',
+ 'BY',
+ {valuefieldreference,id},
+ ['HAS',
+ 'PROPERTY',
+ {valuefieldreference,property}]]}}}].
new_reference_name(Name) ->
case get(asn1_reference) of
@@ -7037,8 +6123,9 @@ insert_once(S,Tab,Key) ->
skipped
end.
-check_fold(S, [H|T], Check) ->
- Type = asn1_db:dbget(S#state.mname, H),
+check_fold(S0, [H|T], Check) ->
+ Type = asn1_db:dbget(S0#state.mname, H),
+ S = S0#state{error_context=Type},
case Check(S, H, Type) of
ok ->
check_fold(S, T, Check);
@@ -7047,5 +6134,19 @@ check_fold(S, [H|T], Check) ->
end;
check_fold(_, [], Check) when is_function(Check, 3) -> [].
+error_value(Value) when is_integer(Value) -> Value;
+error_value(Value) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) when is_atom(Value) -> Value;
+error_value(#type{def=Value}) -> error_value(Value);
+error_value(RefOrType) ->
+ try name_of_def(RefOrType) of
+ Name -> Name
+ catch _:_ ->
+ case get_datastr_name(RefOrType) of
+ undefined -> RefOrType;
+ Name -> Name
+ end
+ end.
+
name_of_def(#'Externaltypereference'{type=N}) -> N;
name_of_def(#'Externalvaluereference'{value=N}) -> N.
diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
index 5fadd0495a..820d19b85c 100644
--- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -234,7 +234,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
asn1ct_name:new(rb),
emit([" {'",RecordName,"'}.",nl,nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -413,7 +413,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
%% return value as record
emit([" {'",RecordName,"'}.",nl]);
{LeadingAttrTerm,PostponedDecArgs} ->
- emit([com,nl,nl]),
+ emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
{[],[]} ->
ok;
@@ -617,18 +617,20 @@ gen_dec_sequence_call1(Erules,TopType,[#'ComponentType'{name=Cname,typespec=Type
{LA,PostponedDec} =
gen_dec_component(Erules,TopType,Cname,Tags,Type,Num,Prop,
Ext,DecObjInf),
+ emit([com,nl]),
case Rest of
[] ->
{LA ++ LeadingAttrAcc,PostponedDec ++ ArgsAcc};
_ ->
- emit([com,nl]),
asn1ct_name:new(bytes),
gen_dec_sequence_call1(Erules,TopType,Rest,Num+1,Ext,DecObjInf,
LA++LeadingAttrAcc,PostponedDec++ArgsAcc)
end;
gen_dec_sequence_call1(_Erules,_TopType,[],1,_,_,_,_) ->
- no_terms.
+ no_terms;
+gen_dec_sequence_call1(_, _, [], _Num, _, _, LA, PostponedDec) ->
+ {LA, PostponedDec}.
gen_dec_sequence_call2(_Erules,_TopType, {[], [], []}, _Ext,_DecObjInf) ->
no_terms;
@@ -643,7 +645,6 @@ gen_dec_sequence_call2(Erules,TopType,{Root1,EList,Root2},_Ext,DecObjInf) ->
%% TagList is the tags of Root2 elements from the first up to and
%% including the first mandatory element.
TagList = get_root2_taglist(Root2,[]),
- emit({com,nl}),
emit([{curr,tlv}," = ",
{call,ber,skip_ExtensionAdditions,
[{prev,tlv},{asis,TagList}]},com,nl]),
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index a91404ed54..0bc6688a49 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -410,12 +410,11 @@ gen_dec_open_type(Erule, Val, {Xmod,Xtype}, LeadingAttr,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ Typename = [Name,ClType],
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({decode,ObjSet,RestFieldNames,
Prop,Extensible})),
- Typename = [Name,ClType],
Gen = fun(_Fd, N) ->
dec_objset_optional(N, Prop),
dec_objset(Erule, N, ObjSet, RestFieldNames, Typename),
@@ -467,46 +466,15 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
Imm = asn1ct_gen_per:gen_dec_imm(Erule, Type),
{Term,_} = asn1ct_imm:dec_slim_cg(Imm, 'Bytes'),
emit([com,nl,Term]);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- asn1ct_name:start(),
- asn1ct_name:new(bytes),
- {'CHOICE',CompList} = Def#type.def,
- Ext = extensible_enc(CompList),
- emit(["{Result,_} = begin",nl]),
- gen_dec_choice(Erule, Typename, CompList, Ext),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SET OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SET OF',
- Def, false);
- 'SEQUENCE' ->
- Imm0 = gen_dec_constructed_imm(Erule, Typename, Def),
- Imm = opt_imm(Imm0),
- asn1ct_name:start(),
- emit(["{Result,_} = begin",nl]),
- emit_gen_dec_imm(Imm),
- emit([nl,
- "end",com,nl,
- "Result"]);
- 'SEQUENCE OF' ->
- asn1ct_name:start(),
- do_gen_decode_sof(Erule, Typename, 'SEQUENCE OF',
- Def, false)
- end;
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "dec_outlined_",
+ Key = {dec_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_dec_obj(Erule, Name, Typename, Type)
+ end,
+ Func = asn1ct_func:call_gen(Prefix, Key, Gen),
+ emit(["{Term,_} = ",{asis,Func},"(Bytes)",com,nl,
+ "Term"]);
#typedef{name=Type} ->
emit(["{Result,_} = ",{asis,enc_func("dec_", Type)},"(Bytes),",nl,
"Result"]);
@@ -531,6 +499,12 @@ dec_objset_2(Erule, Obj, RestFields0, Typename) ->
end
end.
+gen_dec_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Bytes) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_decode_constructed(Erules, Typename,
+ InnerType, Type).
+
gen_encode_choice(Erule, TopType, D) ->
asn1ct_name:start(),
Imm = gen_encode_choice_imm(Erule, TopType, D),
@@ -595,10 +569,10 @@ gen_encode_sof_imm(Erule, Typename, SeqOrSetOf, #type{}=D) ->
gen_decode_sof(Erules, Typename, SeqOrSetOf, #type{}=D) ->
asn1ct_name:start(),
- do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, true),
+ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D),
emit([".",nl,nl]).
-do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
+do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) ->
{_SeqOrSetOf,ComponentType} = D#type.def,
SizeConstraint = asn1ct_imm:effective_constraint(bitstring,
D#type.constraint),
@@ -610,12 +584,11 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D, NeedRest) ->
""
end,
{Num,Buf} = gen_decode_length(SizeConstraint, Erules),
- Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,
- ComponentType,NeedRest})),
+ Key = erlang:md5(term_to_binary({Typename,SeqOrSetOf,ComponentType})),
Gen = fun(_Fd, Name) ->
gen_decode_sof_components(Erules, Name,
Typename, SeqOrSetOf,
- ComponentType, NeedRest)
+ ComponentType)
end,
F = asn1ct_func:call_gen("dec_components", Key, Gen),
emit([",",nl,
@@ -629,7 +602,7 @@ gen_decode_length(Constraint, Erule) ->
Imm = asn1ct_imm:per_dec_length(Constraint, true, is_aligned(Erule)),
asn1ct_imm:dec_slim_cg(Imm, "Bytes").
-gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
+gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) ->
{ObjFun,ObjFun_Var} =
case Cont#type.tablecinf of
[{objfun,_}|_R] ->
@@ -637,14 +610,8 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont, NeedRest) ->
_ ->
{"",""}
end,
- case NeedRest of
- false ->
- emit([{asis,Name},"(0, _Bytes",ObjFun_Var,", Acc) ->",nl,
- "lists:reverse(Acc);",nl]);
- true ->
- emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
- "{lists:reverse(Acc),Bytes};",nl])
- end,
+ emit([{asis,Name},"(0, Bytes",ObjFun_Var,", Acc) ->",nl,
+ "{lists:reverse(Acc),Bytes};",nl]),
emit([{asis,Name},"(Num, Bytes",ObjFun,", Acc) ->",nl,
"{Term,Remain} = "]),
Constructed_Suffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,
@@ -1024,11 +991,12 @@ enc_var_type_call(Erule, Name, RestFieldNames,
#classdef{typespec=ClassDef} = asn1_db:dbget(ClMod, ClType),
#objectclass{fields=ClassFields} = ClassDef,
Extensible = lists:member('EXTENSIONMARK', ObjSet1),
- ObjSet2 = [{Key,fix_object_code(Name, Code, ClassFields)} ||
- {_,Key,Code} <- ObjSet1],
- ObjSet = lists:sort([P || {_,B}=P <- ObjSet2, B =/= none]),
+ ObjSet = index_object_set(Erule, ClType, Name,
+ ObjSet1, ClassFields),
Key = erlang:md5(term_to_binary({encode,ObjSet,RestFieldNames,Extensible})),
- Imm = enc_objset_imm(Erule, Name, ObjSet, RestFieldNames, Extensible),
+ TypeName = [ClType,Name],
+ Imm = enc_objset_imm(Erule, TypeName, Name, ObjSet,
+ RestFieldNames, Extensible),
Lambda = {lambda,[{var,"Val"},{var,"Id"}],Imm},
Gen = fun(_Fd, N) ->
Aligned = is_aligned(Erule),
@@ -1039,11 +1007,27 @@ enc_var_type_call(Erule, Name, RestFieldNames,
Prefix = lists:concat(["enc_os_",Name]),
[{call_gen,Prefix,Key,Gen,Lambda,[Val,Fun]}].
-fix_object_code(Name, [{Name,B}|_], _ClassFields) ->
- B;
-fix_object_code(Name, [_|T], ClassFields) ->
- fix_object_code(Name, T, ClassFields);
-fix_object_code(Name, [], ClassFields) ->
+index_object_set(_Erules, _ClType, Name, Set0, ClassFields) ->
+ Set = index_object_set_1(Name, Set0, ClassFields),
+ lists:sort(Set).
+
+index_object_set_1(Name, [{_,Key,Code}|T], ClassFields) ->
+ case index_object_set_2(Name, Code, ClassFields) of
+ none ->
+ index_object_set_1(Name, T, ClassFields);
+ Type ->
+ [{Key,Type}|index_object_set_1(Name, T, ClassFields)]
+ end;
+index_object_set_1(Name, [_|T], ClassFields) ->
+ index_object_set_1(Name, T, ClassFields);
+index_object_set_1(_, [], _) ->
+ [].
+
+index_object_set_2(Name, [{Name,Type}|_], _ClassFields) ->
+ Type;
+index_object_set_2(Name, [_|T], ClassFields) ->
+ index_object_set_2(Name, T, ClassFields);
+index_object_set_2(Name, [], ClassFields) ->
case lists:keyfind(Name, 2, ClassFields) of
{typefield,Name,'OPTIONAL'} ->
none;
@@ -1059,7 +1043,8 @@ fix_object_code(Name, [], ClassFields) ->
end
end.
-enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
+enc_objset_imm(Erule, TypeName, Component, ObjSet,
+ RestFieldNames, Extensible) ->
Aligned = is_aligned(Erule),
E = {error,
fun() ->
@@ -1070,7 +1055,7 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end},
[{'cond',
[[{eq,{var,"Id"},Key}|
- enc_obj(Erule, Obj, RestFieldNames, Aligned)] ||
+ enc_obj(Erule, Obj, TypeName, RestFieldNames, Aligned)] ||
{Key,Obj} <- ObjSet] ++
[['_',case Extensible of
false ->
@@ -1086,24 +1071,18 @@ enc_objset_imm(Erule, Component, ObjSet, RestFieldNames, Extensible) ->
end
end]]}].
-enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
+enc_obj(Erule, Obj, TypeName, RestFieldNames0, Aligned) ->
+ Val = {var,"Val"},
case Obj of
+ #typedef{name={constructed,bif},typespec=Type}=Def ->
+ Prefix = "enc_outlined_",
+ Key = {enc_outlined,Def},
+ Gen = fun(_Fd, Name) ->
+ gen_enc_obj(Erule, Name, TypeName, Type)
+ end,
+ [{call_gen,Prefix,Key,Gen,undefined,[Val]}];
#typedef{name={primitive,bif},typespec=Def} ->
asn1ct_gen_per:gen_encode_prim_imm({var,"Val"}, Def, Aligned);
- #typedef{name={constructed,bif},typespec=Def} ->
- InnerType = asn1ct_gen:get_inner(Def#type.def),
- case InnerType of
- 'CHOICE' ->
- gen_encode_choice_imm(Erule, name, Def);
- 'SET' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SET OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def);
- 'SEQUENCE' ->
- gen_encode_constructed_imm(Erule, name, Def);
- 'SEQUENCE OF' ->
- gen_encode_sof_imm(Erule, name, InnerType, Def)
- end;
#typedef{name=Type} ->
[{apply,{local,enc_func(Type),Type},[{var,"Val"}]}];
#'Externalvaluereference'{module=Mod,value=Value} ->
@@ -1112,7 +1091,8 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
{object,_,Fields} = Def,
[NextField|RestFieldNames] = RestFieldNames0,
{NextField,Typedef} = lists:keyfind(NextField, 1, Fields),
- enc_obj(Erule, Typedef, RestFieldNames, Aligned)
+ enc_obj(Erule, Typedef, TypeName,
+ RestFieldNames, Aligned)
end;
#'Externaltypereference'{module=Mod,type=Type} ->
Func = enc_func(Type),
@@ -1124,6 +1104,11 @@ enc_obj(Erule, Obj, RestFieldNames0, Aligned) ->
end
end.
+gen_enc_obj(Erules, Name, Typename, Type) ->
+ emit([{asis,Name},"(Val) ->",nl]),
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ asn1ct_gen:gen_encode_constructed(Erules, Typename,
+ InnerType, Type).
gen_dec_components_call(Erule, TopType, {Root,ExtList},
DecInfObj, Ext, NumberOfOptionals) ->
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index 450d309688..0e41aa1a7a 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -531,34 +531,30 @@ gen_part_decode_funcs({primitive,bif},_TypeName,
gen_part_decode_funcs(WhatKind,_TypeName,{_,Directive,_,_}) ->
throw({error,{asn1,{"Not implemented yet",WhatKind," partial incomplete directive:",Directive}}}).
-
-gen_types(Erules,Tname,{RootL1,ExtList,RootL2})
+%% EncDec = 'gen_encode' | 'gen_decode'
+gen_types(Erules, Tname, {RootL1,ExtList,RootL2}, EncDec)
when is_list(RootL1), is_list(RootL2) ->
- gen_types(Erules,Tname,RootL1),
- Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList)),
- gen_types(Erules,Tname,RootL2);
-gen_types(Erules,Tname,{RootList,ExtList}) when is_list(RootList) ->
- gen_types(Erules,Tname,RootList),
+ gen_types(Erules, Tname, RootL1, EncDec),
Rtmod = ct_gen_module(Erules),
- gen_types(Erules,Tname,Rtmod:extaddgroup2sequence(ExtList));
-gen_types(Erules,Tname,[{'EXTENSIONMARK',_,_}|Rest]) ->
- gen_types(Erules,Tname,Rest);
-gen_types(Erules,Tname,[ComponentType|Rest]) ->
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec),
+ gen_types(Erules, Tname, RootL2, EncDec);
+gen_types(Erules, Tname, {RootList,ExtList}, EncDec) when is_list(RootList) ->
+ gen_types(Erules, Tname, RootList, EncDec),
Rtmod = ct_gen_module(Erules),
+ gen_types(Erules, Tname, Rtmod:extaddgroup2sequence(ExtList), EncDec);
+gen_types(Erules, Tname, [{'EXTENSIONMARK',_,_}|T], EncDec) ->
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(Erules, Tname, [ComponentType|T], EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,ComponentType),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,ComponentType),
- gen_types(Erules,Tname,Rest);
-gen_types(_,_,[]) ->
- true;
-gen_types(Erules,Tname,Type) when is_record(Type,type) ->
Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, ComponentType),
+ gen_types(Erules, Tname, T, EncDec);
+gen_types(_, _, [], _) ->
+ ok;
+gen_types(Erules, Tname, #type{}=Type, EncDec) ->
asn1ct_name:clear(),
- Rtmod:gen_encode(Erules,Tname,Type),
- asn1ct_name:clear(),
- Rtmod:gen_decode(Erules,Tname,Type).
+ Rtmod = ct_gen_module(Erules),
+ Rtmod:EncDec(Erules, Tname, Type).
%% VARIOUS GENERATOR STUFF
%% *************************************************
@@ -599,25 +595,25 @@ gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
'SET' ->
Rtmod:gen_encode_set(Erules,Typename,D),
#'SET'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE' ->
Rtmod:gen_encode_sequence(Erules,Typename,D),
#'SEQUENCE'{components=Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'CHOICE' ->
Rtmod:gen_encode_choice(Erules,Typename,D),
{_,Components} = D#type.def,
- gen_types(Erules,Typename,Components);
+ gen_types(Erules, Typename, Components, gen_encode);
'SEQUENCE OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
'SET OF' ->
Rtmod:gen_encode_sof(Erules,Typename,InnerType,D),
{_,Type} = D#type.def,
NameSuffix = asn1ct_gen:constructed_suffix(InnerType,Type#type.def),
- gen_types(Erules,[NameSuffix|Typename],Type);
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_encode);
_ ->
exit({nyi,InnerType})
end;
@@ -630,20 +626,29 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
asn1ct:step_in_constructed(), %% updates namelist for exclusive decode
case InnerType of
'SET' ->
- Rtmod:gen_decode_set(Erules,Typename,D);
+ Rtmod:gen_decode_set(Erules,Typename,D),
+ #'SET'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE' ->
- Rtmod:gen_decode_sequence(Erules,Typename,D);
+ Rtmod:gen_decode_sequence(Erules,Typename,D),
+ #'SEQUENCE'{components=Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'CHOICE' ->
- Rtmod:gen_decode_choice(Erules,Typename,D);
+ Rtmod:gen_decode_choice(Erules,Typename,D),
+ {_,Components} = D#type.def,
+ gen_types(Erules, Typename, Components, gen_decode);
'SEQUENCE OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode);
'SET OF' ->
- Rtmod:gen_decode_sof(Erules,Typename,InnerType,D);
- _ ->
- exit({nyi,InnerType})
+ Rtmod:gen_decode_sof(Erules,Typename,InnerType,D),
+ {_,#type{def=Def}=Type} = D#type.def,
+ NameSuffix = asn1ct_gen:constructed_suffix(InnerType, Def),
+ gen_types(Erules, [NameSuffix|Typename], Type, gen_decode)
end;
-
gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) ->
gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
@@ -1228,15 +1233,23 @@ gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) ->
emit({"}).",nl,nl}),
Tr ++ ExtensionList2;
{Rootl1,Extl,Rootl2} ->
+ case Rootl1 =/= [] andalso Extl++Rootl2 =/= [] of
+ true -> emit([com]);
+ false -> ok
+ end,
case Rootl1 of
- [] -> true;
- _ -> emit([",",nl])
+ [_|_] -> emit([nl]);
+ [] -> ok
end,
emit(["%% with extensions",nl]),
gen_record2(Name,'SEQUENCE',Extl,"",ext),
+ case Extl =/= [] andalso Rootl2 =/= [] of
+ true -> emit([com]);
+ false -> ok
+ end,
case Extl of
- [_H|_] when Rootl2 /= [] -> emit([",",nl]);
- _ -> ok
+ [_|_] -> emit([nl]);
+ [] -> ok
end,
emit(["%% end of extensions",nl]),
gen_record2(Name,'SEQUENCE',Rootl2,"",noext),
diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
index e51b0898be..37413298a7 100644
--- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,7 @@
-include("asn1_records.hrl").
--export([decode_class/1, decode_type/1]).
+-export([decode_class/1]).
-export([gen_encode/2,gen_encode/3,gen_decode/2,gen_decode/3]).
-export([gen_encode_prim/4]).
-export([gen_dec_prim/3]).
@@ -278,8 +278,7 @@ emit_enc_enumerated_cases(L, Tags) ->
emit_enc_enumerated_cases(L, Tags, noext).
emit_enc_enumerated_cases([{EnumName,EnumVal}|T], Tags, Ext) ->
- Bytes = encode_pos_integer(EnumVal, []),
- Len = length(Bytes),
+ {Bytes,Len} = encode_integer(EnumVal),
emit([{asis,EnumName}," -> ",
{call,ber,encode_tags,[Tags,{asis,Bytes},Len]},";",nl]),
emit_enc_enumerated_cases(T, Tags, Ext);
@@ -288,10 +287,25 @@ emit_enc_enumerated_cases([], _Tags, _Ext) ->
emit([{curr,enumval}," -> exit({error,{asn1, {enumerated_not_in_range,",{curr, enumval},"}}})"]),
emit([nl,"end"]).
-encode_pos_integer(0, [B|_Acc] = L) when B < 128 ->
+encode_integer(Val) ->
+ Bytes =
+ if
+ Val >= 0 ->
+ encode_integer_pos(Val, []);
+ true ->
+ encode_integer_neg(Val, [])
+ end,
+ {Bytes,length(Bytes)}.
+
+encode_integer_pos(0, [B|_Acc]=L) when B < 128 ->
L;
-encode_pos_integer(N, Acc) ->
- encode_pos_integer(N bsr 8, [N band 255|Acc]).
+encode_integer_pos(N, Acc) ->
+ encode_integer_pos((N bsr 8), [N band 16#ff| Acc]).
+
+encode_integer_neg(-1, [B1|_T]=L) when B1 > 127 ->
+ L;
+encode_integer_neg(N, Acc) ->
+ encode_integer_neg(N bsr 8, [N band 16#ff|Acc]).
%%===============================================================================
%%===============================================================================
@@ -1179,23 +1193,25 @@ gen_objset_enc(_,_,{unique,undefined},_,_,_,_,_) ->
gen_objset_enc(Erules, ObjSetName, UniqueName,
[{ObjName,Val,Fields}|T], ClName, ClFields,
NthObj,Acc)->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
{InternalFunc,NewNthObj}=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_enc_funs(Fields,ClFields,ObjSetName,NthObj);
+ gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj);
{CurrMod,Name} ->
- emit({" fun 'enc_",Name,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",Name,"'/3;",nl]),
{[],NthObj};
{ModuleName,Name} ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(enc,ModuleName,Name),
+ emit([";",nl]),
{[],NthObj};
_ ->
- emit({" fun 'enc_",ObjName,"'/3"}),
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ " fun 'enc_",ObjName,"'/3;",nl]),
{[],NthObj}
end,
- emit({";",nl}),
gen_objset_enc(Erules, ObjSetName, UniqueName, T, ClName, ClFields,
NewNthObj, InternalFunc ++ Acc);
%% See X.681 Annex E for the following case
@@ -1223,13 +1239,14 @@ emit_default_getenc(ObjSetName,UniqueName) ->
%% gen_inlined_enc_funs for each object iterates over all fields of a
%% class, and for each typefield it checks if the object has that
%% field and emits the proper code.
-gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, NthObj) ->
- emit([indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
+gen_inlined_enc_funs(Fields, [{typefield,_,_}|_]=T, ObjSetName, Val, NthObj) ->
+ emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ indent(3),"fun(Type, Val, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
gen_inlined_enc_funs1(Fields, T, ObjSetName, [], NthObj, []);
-gen_inlined_enc_funs(Fields,[_|Rest],ObjSetName,NthObj) ->
- gen_inlined_enc_funs(Fields,Rest,ObjSetName,NthObj);
-gen_inlined_enc_funs(_,[],_,NthObj) ->
+gen_inlined_enc_funs(Fields, [_|Rest], ObjSetName, Val, NthObj) ->
+ gen_inlined_enc_funs(Fields, Rest, ObjSetName, Val, NthObj);
+gen_inlined_enc_funs(_, [], _, _, NthObj) ->
{[],NthObj}.
gen_inlined_enc_funs1(Fields, [{typefield,Name,_}|Rest], ObjSetName,
@@ -1276,7 +1293,7 @@ gen_inlined_enc_funs1(Fields,[_|Rest], ObjSetName, Sep, NthObj, Acc)->
gen_inlined_enc_funs1(Fields, Rest, ObjSetName, Sep, NthObj, Acc);
gen_inlined_enc_funs1(_, [], _, _, NthObj, Acc) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
{Acc,NthObj}.
emit_enc_open_type(I) ->
@@ -1358,23 +1375,25 @@ gen_objset_dec(_,_,{unique,undefined},_,_,_,_) ->
ok;
gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T],
ClName, ClFields, NthObj)->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
CurrMod = get(currmod),
NewNthObj=
case ObjName of
{no_mod,no_name} ->
- gen_inlined_dec_funs(Fields,ClFields,ObjSName,NthObj);
+ gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj);
{CurrMod,Name} ->
- emit([" fun 'dec_",Name,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",Name,"'/3;", nl]),
NthObj;
{ModuleName,Name} ->
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
emit_ext_fun(dec,ModuleName,Name),
+ emit([";",nl]),
NthObj;
_ ->
- emit([" fun 'dec_",ObjName,"'/3"]),
+ emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ " fun 'dec_",ObjName,"'/3;", nl]),
NthObj
end,
- emit([";",nl]),
gen_objset_dec(Erules, ObjSName, UniqueName, T, ClName,
ClFields, NewNthObj);
gen_objset_dec(_,ObjSetName,_UniqueName,['EXTENSIONMARK'],_ClName,
@@ -1394,10 +1413,15 @@ emit_default_getdec(ObjSetName,UniqueName) ->
emit(["'getdec_",ObjSetName,"'(ErrV) ->",nl]),
emit([indent(2), "fun(C,V,_) -> exit({{component,C},{value,V},{unique_name_and_value,",{asis,UniqueName},", ErrV}}) end"]).
-gen_inlined_dec_funs(Fields, ClFields, ObjSetName, NthObj) ->
+gen_inlined_dec_funs(Fields, [{typefield,_,_}|_]=ClFields, ObjSetName, Val, NthObj) ->
+ emit(["'getdec_",ObjSetName,"'(",{asis,Val},") ->",nl]),
emit([indent(3),"fun(Type, Bytes, _RestPrimFieldName) ->",nl,
indent(6),"case Type of",nl]),
- gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj).
+ gen_inlined_dec_funs1(Fields, ClFields, ObjSetName, "", NthObj);
+gen_inlined_dec_funs(Fields, [_|ClFields], ObjSetName, Val, NthObj) ->
+ gen_inlined_dec_funs(Fields, ClFields, ObjSetName, Val, NthObj);
+gen_inlined_dec_funs(_, _, _, _,NthObj) ->
+ NthObj.
gen_inlined_dec_funs1(Fields, [{typefield,Name,Prop}|Rest],
ObjSetName, Sep0, NthObj) ->
@@ -1439,7 +1463,7 @@ gen_inlined_dec_funs1(Fields, [_|Rest], ObjSetName, Sep, NthObj)->
gen_inlined_dec_funs1(Fields, Rest, ObjSetName, Sep, NthObj);
gen_inlined_dec_funs1(_, [], _, _, NthObj) ->
emit([nl,indent(6),"end",nl,
- indent(3),"end"]),
+ indent(3),"end;",nl]),
NthObj.
emit_dec_open_type(I) ->
@@ -1534,39 +1558,6 @@ decode_class('CONTEXT') ->
decode_class('PRIVATE') ->
?PRIVATE.
-decode_type('BOOLEAN') -> 1;
-decode_type('INTEGER') -> 2;
-decode_type('BIT STRING') -> 3;
-decode_type('OCTET STRING') -> 4;
-decode_type('NULL') -> 5;
-decode_type('OBJECT IDENTIFIER') -> 6;
-decode_type('ObjectDescriptor') -> 7;
-decode_type('EXTERNAL') -> 8;
-decode_type('REAL') -> 9;
-decode_type('ENUMERATED') -> 10;
-decode_type('EMBEDDED_PDV') -> 11;
-decode_type('UTF8String') -> 12;
-decode_type('RELATIVE-OID') -> 13;
-decode_type('SEQUENCE') -> 16;
-decode_type('SEQUENCE OF') -> 16;
-decode_type('SET') -> 17;
-decode_type('SET OF') -> 17;
-decode_type('NumericString') -> 18;
-decode_type('PrintableString') -> 19;
-decode_type('TeletexString') -> 20;
-decode_type('T61String') -> 20;
-decode_type('VideotexString') -> 21;
-decode_type('IA5String') -> 22;
-decode_type('UTCTime') -> 23;
-decode_type('GeneralizedTime') -> 24;
-decode_type('GraphicString') -> 25;
-decode_type('VisibleString') -> 26;
-decode_type('GeneralString') -> 27;
-decode_type('UniversalString') -> 28;
-decode_type('BMPString') -> 30;
-decode_type('CHOICE') -> 'CHOICE'; % choice gets the tag from the actual alternative
-decode_type(Else) -> exit({error,{asn1,{unrecognized_type,Else}}}).
-
mkfuncname(#'Externaltypereference'{module=Mod,type=EType}, DecOrEnc) ->
CurrMod = get(currmod),
case CurrMod of
diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl
index bdd14871d1..5297d5291c 100644
--- a/lib/asn1/src/asn1ct_imm.erl
+++ b/lib/asn1/src/asn1ct_imm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2012-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2012-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -499,6 +499,8 @@ per_dec_enumerated_fix_list([], Tail, _) -> Tail.
per_dec_integer_1([{'SingleValue',Value}], _Aligned) ->
{value,Value};
+per_dec_integer_1([{'ValueRange',{'MIN',_}}], Aligned) ->
+ per_dec_unconstrained(Aligned);
per_dec_integer_1([{'ValueRange',{Lb,'MAX'}}], Aligned) when is_integer(Lb) ->
per_decode_semi_constrained(Lb, Aligned);
per_dec_integer_1([{'ValueRange',{Lb,Ub}}], Aligned) when is_integer(Lb),
@@ -1094,6 +1096,9 @@ per_enc_integer_1(Val0, [Constr], Aligned) ->
per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) ->
per_enc_constrained(Val, Sv, Sv, Aligned);
+per_enc_integer_2(Val, {'ValueRange',{'MIN',Ub}}, Aligned)
+ when is_integer(Ub) ->
+ {[],{lt,Val,Ub+1},per_enc_unconstrained(Val, Aligned)};
per_enc_integer_2(Val0, {'ValueRange',{Lb,'MAX'}}, Aligned)
when is_integer(Lb) ->
{Prefix,Val} = sub_lb(Val0, Lb),
@@ -1580,7 +1585,7 @@ do_combine_put_bits(_, _, _) ->
throw(impossible).
debit(Budget0, Alternatives) ->
- case Budget0 - log2(Alternatives) of
+ case Budget0 - math:log2(Alternatives) of
Budget when Budget > 0.0 ->
Budget;
_ ->
@@ -1593,8 +1598,6 @@ num_clauses([_|T], N) ->
num_clauses(T, N+1);
num_clauses([], N) -> N.
-log2(N) ->
- math:log(N) / math:log(2.0).
collect_put_bits(Imm) ->
lists:splitwith(fun({put_bits,V,_,_}) when is_integer(V) -> true;
@@ -1919,16 +1922,7 @@ enc_opt(nil, St) ->
enc_opt({seq,H0,T0}, St0) ->
{H,St1} = enc_opt(H0, St0),
{T,St} = enc_opt(T0, St1),
- case {H,T} of
- {none,_} ->
- {T,St};
- {{list,Imm,Data},
- {seq,{call,per,complete,[Data],_},_}} ->
- %% Get rid of any explicit 'align' added by per_enc_open_type/2.
- {{seq,{list,remove_trailing_align(Imm),Data},T},St};
- {_,_} ->
- {{seq,H,T},St}
- end;
+ {enc_opt_seq(H, T),St};
enc_opt({set,_,_}=Imm, St) ->
{Imm,St#ost{t=undefined}};
enc_opt({sub,Src0,Int,Dst}, St0) ->
@@ -1962,6 +1956,28 @@ remove_trailing_align({seq,H,T}) ->
{seq,H,remove_trailing_align(T)};
remove_trailing_align(Imm) -> Imm.
+enc_opt_seq(none, T) ->
+ T;
+enc_opt_seq({list,Imm,Data}, {seq,{call,per,complete,[Data],_},_}=T) ->
+ %% Get rid of any explicit 'align' added by per_enc_open_type/2.
+ {seq,{list,remove_trailing_align(Imm),Data},T};
+enc_opt_seq({call,_,_,_,{var,_}=Dst}=H, T) ->
+ case is_var_unused(Dst, T) of
+ false -> {seq,H,T};
+ true -> T
+ end;
+enc_opt_seq(H, T) ->
+ {seq,H,T}.
+
+is_var_unused(_, align) ->
+ true;
+is_var_unused(V, {call,_,_,Args}) ->
+ not lists:member(V, Args);
+is_var_unused(V, {cons,H,T}) ->
+ is_var_unused(V, H) andalso is_var_unused(V, T);
+is_var_unused(_, _) ->
+ false.
+
bit_size_propagate(Bin, Type, St) ->
case t_range(Type) of
any ->
@@ -2423,7 +2439,8 @@ bit_string_name2pos_fun(NNL, Src) ->
gen_name2pos(Fd, Name, Names) ->
Cs0 = gen_name2pos_cs(Names, Name),
Cs = Cs0 ++ [bit_clause(Name),nil_clause(),invalid_clause()],
- F = {function,1,Name,1,Cs},
+ F0 = {function,1,Name,1,Cs},
+ F = erl_parse:new_anno(F0),
file:write(Fd, [erl_pp:function(F)]).
gen_name2pos_cs([{K,V}|T], Name) ->
diff --git a/lib/asn1/src/asn1ct_parser.yrl b/lib/asn1/src/asn1ct_parser.yrl
deleted file mode 100644
index 083162f191..0000000000
--- a/lib/asn1/src/asn1ct_parser.yrl
+++ /dev/null
@@ -1,1177 +0,0 @@
-%%<copyright>
-%% <year>1997-2008</year>
-%% <holder>Ericsson AB, All Rights Reserved</holder>
-%%</copyright>
-%%<legalnotice>
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson AB.
-%%</legalnotice>
-%%
-Nonterminals
-ModuleDefinition ModuleIdentifier DefinitiveIdentifier DefinitiveObjIdComponentList
-DefinitiveObjIdComponent TagDefault ExtensionDefault
-ModuleBody Exports SymbolsExported Imports SymbolsImported
-SymbolsFromModuleList SymbolsFromModule GlobalModuleReference AssignedIdentifier SymbolList
-Symbol Reference AssignmentList Assignment
-ExtensionAndException
-ComponentTypeLists
-Externaltypereference Externalvaluereference DefinedType DefinedValue
-AbsoluteReference ItemSpec ItemId ComponentId TypeAssignment
-ValueAssignment
-% ValueSetTypeAssignment
-ValueSet
-Type BuiltinType NamedType ReferencedType
-Value ValueNotNull BuiltinValue ReferencedValue NamedValue
-% BooleanType
-BooleanValue IntegerType NamedNumberList NamedNumber SignedNumber
-% inlined IntegerValue
-EnumeratedType
-% inlined Enumerations
-Enumeration EnumerationItem
-% inlined EnumeratedValue
-% RealType
-RealValue NumericRealValue SpecialRealValue BitStringType
-% inlined BitStringValue
-IdentifierList
-% OctetStringType
-% inlined OctetStringValue
-% NullType NullValue
-SequenceType ComponentTypeList ComponentType
-% SequenceValue SequenceOfValue
-ComponentValueList SequenceOfType
-SAndSOfValue ValueList SetType
-% SetValue SetOfValue
-SetOfType
-ChoiceType
-% AlternativeTypeList made common with ComponentTypeList
-ChoiceValue
-AnyValue
-AnyDefBy
-SelectionType
-TaggedType Tag ClassNumber Class
-% redundant TaggedValue
-% EmbeddedPDVType EmbeddedPDVValue ExternalType ExternalValue ObjectIdentifierType
-ObjectIdentifierValue ObjIdComponentList ObjIdComponent
-% NameForm NumberForm NameAndNumberForm
-CharacterStringType
-RestrictedCharacterStringValue CharacterStringList
-% CharSyms CharsDefn
-Quadruple
-% Group Plane Row Cell
-Tuple
-% TableColumn TableRow
-% UnrestrictedCharacterString
-CharacterStringValue
-% UnrestrictedCharacterStringValue
-ConstrainedType Constraint ConstraintSpec TypeWithConstraint
-ElementSetSpecs ElementSetSpec
-%GeneralConstraint
-UserDefinedConstraint UserDefinedConstraintParameter
-UserDefinedConstraintParameters
-ExceptionSpec
-ExceptionIdentification
-Unions
-UnionMark
-UElems
-Intersections
-IntersectionElements
-IntersectionMark
-IElems
-Elements
-Elems
-SubTypeElements
-Exclusions
-LowerEndpoint
-UpperEndpoint
-LowerEndValue
-UpperEndValue
-TypeConstraints NamedConstraint PresenceConstraint
-
-ParameterizedTypeAssignment
-ParameterList
-Parameters
-Parameter
-ParameterizedType
-
-% X.681
-ObjectClassAssignment ObjectClass ObjectClassDefn
-FieldSpecs FieldSpec OptionalitySpec WithSyntaxSpec
-TokenOrGroupSpecs TokenOrGroupSpec
-SyntaxList OptionalGroup RequiredToken Word
-TypeOptionalitySpec
-ValueOrObjectOptSpec
-VSetOrOSetOptSpec
-ValueOptionalitySpec
-ObjectOptionalitySpec
-ValueSetOptionalitySpec
-ObjectSetOptionalitySpec
-% X.681 chapter 15
-InformationFromObjects
-ValueFromObject
-%ValueSetFromObjects
-TypeFromObject
-%ObjectFromObject
-%ObjectSetFromObjects
-ReferencedObjects
-FieldName
-PrimitiveFieldName
-
-ObjectAssignment
-ObjectSetAssignment
-ObjectSet
-ObjectSetElements
-Object
-ObjectDefn
-DefaultSyntax
-DefinedSyntax
-FieldSettings
-FieldSetting
-DefinedSyntaxTokens
-DefinedSyntaxToken
-Setting
-DefinedObject
-ObjectFromObject
-ObjectSetFromObjects
-ParameterizedObject
-ExternalObjectReference
-DefinedObjectSet
-DefinedObjectClass
-ExternalObjectClassReference
-
-% X.682
-TableConstraint
-ComponentRelationConstraint
-ComponentIdList
-
-% X.683
-ActualParameter
-.
-
-%UsefulType.
-
-Terminals
-'ABSENT' 'ABSTRACT-SYNTAX' 'ALL' 'ANY'
-'APPLICATION' 'AUTOMATIC' 'BEGIN' 'BIT'
-'BOOLEAN' 'BY' 'CHARACTER' 'CHOICE' 'CLASS' 'COMPONENT'
-'COMPONENTS' 'CONSTRAINED' 'DEFAULT' 'DEFINED' 'DEFINITIONS'
-'EMBEDDED' 'END' 'ENUMERATED' 'EXCEPT' 'EXPLICIT'
-'EXPORTS' 'EXTENSIBILITY' 'EXTERNAL' 'FALSE' 'FROM' 'GeneralizedTime'
-'TYPE-IDENTIFIER'
-'IDENTIFIER' 'IMPLICIT' 'IMPLIED' 'IMPORTS'
-'INCLUDES' 'INSTANCE' 'INTEGER' 'INTERSECTION'
-'MAX' 'MIN' 'MINUS-INFINITY' 'NULL'
-'OBJECT' 'ObjectDescriptor' 'OCTET' 'OF' 'OPTIONAL' 'PDV' 'PLUS-INFINITY'
-'PRESENT' 'PRIVATE' 'REAL' 'SEQUENCE' 'SET' 'SIZE'
-'STRING' 'SYNTAX' 'TAGS' 'TRUE' 'UNION'
-'UNIQUE' 'UNIVERSAL' 'UTCTime' 'WITH'
-'{' '}' '(' ')' '.' '::=' ';' ',' '@' '*' '-' '[' ']'
-'!' '..' '...' '|' '<' ':' '^'
-number identifier typereference restrictedcharacterstringtype
-bstring hstring cstring typefieldreference valuefieldreference
-objectclassreference word.
-
-Rootsymbol ModuleDefinition.
-Endsymbol '$end'.
-
-Left 300 'EXCEPT'.
-Left 200 '^'.
-Left 200 'INTERSECTION'.
-Left 100 '|'.
-Left 100 'UNION'.
-
-
-ModuleDefinition -> ModuleIdentifier
- 'DEFINITIONS'
- TagDefault
- ExtensionDefault
- '::='
- 'BEGIN'
- ModuleBody
- 'END' :
- {'ModuleBody',Ex,Im,Types} = '$7',
- {{typereference,Pos,Name},Defid} = '$1',
- #module{
- pos= Pos,
- name= Name,
- defid= Defid,
- tagdefault='$3',
- extensiondefault='$4',
- exports=Ex,
- imports=Im,
- typeorval=Types}.
-% {module, '$1','$3','$6'}.
-% Results always in a record of type module defined in asn_records.hlr
-
-ModuleIdentifier -> typereference DefinitiveIdentifier :
- put(asn1_module,'$1'#typereference.val),
- {'$1','$2'}.
-
-DefinitiveIdentifier -> '{' DefinitiveObjIdComponentList '}' : '$2' .
-DefinitiveIdentifier -> '$empty': [].
-
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent : ['$1'].
-DefinitiveObjIdComponentList -> DefinitiveObjIdComponent DefinitiveObjIdComponentList : ['$1'|'$2'].
-
-DefinitiveObjIdComponent -> identifier : '$1' . %expanded->
-% DefinitiveObjIdComponent -> NameForm : '$1' .
-DefinitiveObjIdComponent -> number : '$1' . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNumberForm : 'fix' .
-DefinitiveObjIdComponent -> identifier '(' number ')' : {'$1','$3'} . %expanded->
-% DefinitiveObjIdComponent -> DefinitiveNameAndNumberForm : {'$1','$3'} .
-
-% DefinitiveNumberForm -> number : 'fix' .
-
-% DefinitiveNameAndNumberForm -> identifier '(' DefinitiveNumberForm ')' : 'fix' .
-
-TagDefault -> 'EXPLICIT' 'TAGS' : put(tagdefault,'EXPLICIT'),'EXPLICIT' .
-TagDefault -> 'IMPLICIT' 'TAGS' : put(tagdefault,'IMPLICIT'),'IMPLICIT' .
-TagDefault -> 'AUTOMATIC' 'TAGS' : put(tagdefault,'AUTOMATIC'),'AUTOMATIC' .
-TagDefault -> '$empty': put(tagdefault,'EXPLICIT'),'EXPLICIT'. % because this is the default
-
-ExtensionDefault -> 'EXTENSIBILITY' 'IMPLIED' : 'IMPLIED'.
-ExtensionDefault -> '$empty' : 'false'. % because this is the default
-
-ModuleBody -> Exports Imports AssignmentList : {'ModuleBody','$1','$2','$3'}.
-ModuleBody -> '$empty' : {'ModuleBody',nil,nil,[]}.
-
-Exports -> 'EXPORTS' SymbolList ';' : {exports,'$2'}.
-Exports -> 'EXPORTS' ';' : {exports,[]}.
-Exports -> '$empty' : {exports,all} .
-
-% inlined above SymbolsExported -> SymbolList : '$1'.
-% inlined above SymbolsExported -> '$empty' : [].
-
-Imports -> 'IMPORTS' SymbolsFromModuleList ';' : {imports,'$2'}.
-Imports -> 'IMPORTS' ';' : {imports,[]}.
-Imports -> '$empty' : {imports,[]} .
-
-% inlined above SymbolsImported -> SymbolsFromModuleList : '$1'.
-% inlined above SymbolsImported -> '$empty' : [].
-
-SymbolsFromModuleList -> SymbolsFromModule :['$1'].
-% SymbolsFromModuleList -> SymbolsFromModuleList SymbolsFromModule :$1.%changed
-SymbolsFromModuleList -> SymbolsFromModule SymbolsFromModuleList :['$1'|'$2'].
-
-% expanded SymbolsFromModule -> SymbolList 'FROM' GlobalModuleReference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference : #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-SymbolsFromModule -> SymbolList 'FROM' typereference '{' ValueList '}': #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference identifier: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference Externalvaluereference: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-%SymbolsFromModule -> SymbolList 'FROM' typereference DefinedValue: #'SymbolsFromModule'{symbols = '$1',module='$3'}.
-
-% inlined GlobalModuleReference -> typereference AssignedIdentifier : {'$1','$2'} .
-
-% inlined above AssignedIdentifier -> '{' ValueList '}' : '$2'.
-% replaced AssignedIdentifier -> '{' DefinedValue ObjIdComponentList '}' :{'$2','$3'}.
-% not necessary , replaced by SAndSOfValue AssignedIdentifier -> ObjectIdentifierValue :'$1'.
-% AssignedIdentifier -> DefinedValue : '$1'.
-% inlined AssignedIdentifier -> '$empty' : undefined.
-
-SymbolList -> Symbol : ['$1'].
-SymbolList -> Symbol ',' SymbolList :['$1'|'$3'].
-
-Symbol -> Reference :'$1'.
-% later Symbol -> ParameterizedReference :'$1'.
-
-Reference -> typereference :'$1'.
-Reference -> identifier:'$1'.
-Reference -> typereference '{' '}':'$1'.
-Reference -> Externaltypereference '{' '}':'$1'.
-
-% later Reference -> objectclassreference :'$1'.
-% later Reference -> objectreference :'$1'.
-% later Reference -> objectsetreference :'$1'.
-
-AssignmentList -> Assignment : ['$1'].
-% modified AssignmentList -> AssignmentList Assignment : '$1'.
-AssignmentList -> Assignment AssignmentList : ['$1'|'$2'].
-
-Assignment -> TypeAssignment : '$1'.
-Assignment -> ValueAssignment : '$1'.
-% later Assignment -> ValueSetTypeAssignment : '$1'.
-Assignment -> ObjectClassAssignment : '$1'.
-% later Assignment -> ObjectAssignment : '$1'.
-% combined with ValueAssignment Assignment -> ObjectAssignment : '$1'.
-Assignment -> ObjectSetAssignment : '$1'.
-Assignment -> ParameterizedTypeAssignment : '$1'.
-%Assignment -> ParameterizedValueAssignment : '$1'.
-%Assignment -> ParameterizedValueSetTypeAssignment : '$1'.
-%Assignment -> ParameterizedObjectClassAssignment : '$1'.
-
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5',[]}}.
-ObjectClassAssignment -> typereference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
-%ObjectClassAssignment -> objectclassreference '::=' 'CLASS' '{' FieldSpecs '}' WithSyntaxSpec :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'CLASS','$5','$7'}}.
-
-FieldSpecs -> FieldSpec : ['$1'].
-FieldSpecs -> FieldSpec ',' FieldSpecs : ['$1'|'$3'].
-
-FieldSpec -> typefieldreference TypeOptionalitySpec : {typefield,'$1','$2'}.
-
-FieldSpec -> valuefieldreference Type 'UNIQUE' ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2','UNIQUE','$4'}.
-FieldSpec -> valuefieldreference Type ValueOrObjectOptSpec :
- {fixedtypevaluefield,'$1','$2',undefined,'$3'}.
-
-FieldSpec -> valuefieldreference typefieldreference ValueOrObjectOptSpec :
- {variabletypevaluefield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference typefieldreference VSetOrOSetOptSpec :
- {variabletypevaluesetfield, '$1','$2','$3'}.
-
-FieldSpec -> typefieldreference Type VSetOrOSetOptSpec :
- {fixedtypevaluesetfield, '$1','$2','$3'}.
-
-TypeOptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-TypeOptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-TypeOptionalitySpec -> '$empty' : 'MANDATORY'.
-
-ValueOrObjectOptSpec -> ValueOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> ObjectOptionalitySpec : '$1'.
-ValueOrObjectOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-ValueOrObjectOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueOptionalitySpec -> 'DEFAULT' Value :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-
-%ObjectOptionalitySpec -> 'DEFAULT' Object :{'DEFAULT','$1'}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting ',' FieldSettings '}' :
- {'DEFAULT',{object,['$2'|'$4']}}.
-ObjectOptionalitySpec -> 'DEFAULT' '{' FieldSetting '}' :
- {'DEFAULT',{object, ['$2']}}.
-%ObjectOptionalitySpec -> 'DEFAULT' '{' DefinedSyntaxTokens '}' :
-% {'DEFAULT',{object, '$2'}}.
-ObjectOptionalitySpec -> 'DEFAULT' ObjectFromObject :
- {'DEFAULT',{object, '$2'}}.
-
-
-VSetOrOSetOptSpec -> ValueSetOptionalitySpec : '$1'.
-%VSetOrOSetOptSpec -> ObjectSetOptionalitySpec : '$1'.
-VSetOrOSetOptSpec -> 'OPTIONAL' : 'OPTIONAL'.
-VSetOrOSetOptSpec -> '$empty' : 'MANDATORY'.
-
-ValueSetOptionalitySpec -> 'DEFAULT' ValueSet : {'DEFAULT','$1'}.
-
-%ObjectSetOptionalitySpec -> 'DEFAULT' ObjectSet : {'DEFAULT','$1'}.
-
-OptionalitySpec -> 'DEFAULT' Type : {'DEFAULT','$2'}.
-OptionalitySpec -> 'DEFAULT' ValueNotNull :
- case '$2' of
- {identifier,_,Id} -> {'DEFAULT',Id};
- _ -> {'DEFAULT','$2'}
- end.
-OptionalitySpec -> 'OPTIONAL' : 'OPTIONAL'.
-OptionalitySpec -> '$empty' : 'MANDATORY'.
-
-WithSyntaxSpec -> 'WITH' 'SYNTAX' SyntaxList : {'WITH SYNTAX','$3'}.
-
-SyntaxList -> '{' TokenOrGroupSpecs '}' : '$2'.
-SyntaxList -> '{' '}' : [].
-
-TokenOrGroupSpecs -> TokenOrGroupSpec : ['$1'].
-TokenOrGroupSpecs -> TokenOrGroupSpec TokenOrGroupSpecs : ['$1'|'$2'].
-
-TokenOrGroupSpec -> RequiredToken : '$1'.
-TokenOrGroupSpec -> OptionalGroup : '$1'.
-
-OptionalGroup -> '[' TokenOrGroupSpecs ']' : '$2'.
-
-RequiredToken -> typereference : '$1'.
-RequiredToken -> Word : '$1'.
-RequiredToken -> ',' : '$1'.
-RequiredToken -> PrimitiveFieldName : '$1'.
-
-Word -> 'BY' : 'BY'.
-
-ParameterizedTypeAssignment -> typereference ParameterList '::=' Type :
- #ptypedef{pos=element(2,'$1'),name=element(3,'$1'),
- args='$2', typespec='$4'}.
-
-ParameterList -> '{' Parameters '}':'$2'.
-
-Parameters -> Parameter: ['$1'].
-Parameters -> Parameter ',' Parameters: ['$1'|'$3'].
-
-Parameter -> typereference: '$1'.
-Parameter -> Value: '$1'.
-Parameter -> Type ':' typereference: {'$1','$3'}.
-Parameter -> Type ':' Value: {'$1','$3'}.
-Parameter -> '{' typereference '}': {objectset,'$2'}.
-
-
-% Externaltypereference -> modulereference '.' typereference : {'$1','$3'} .
-Externaltypereference -> typereference '.' typereference : #'Externaltypereference'{pos=element(2,'$1'),module=element(3,'$1'),type=element(3,'$3')}.
-
-% Externalvaluereference -> modulereference '.' valuereference : {'$1','$3'} .
-% inlined Externalvaluereference -> typereference '.' identifier : #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),value=element(3,'$3')}.
-
-
-DefinedType -> Externaltypereference : '$1' .
-DefinedType -> typereference :
- #'Externaltypereference'{pos='$1'#typereference.pos,
- module= get(asn1_module),
- type= '$1'#typereference.val} .
-DefinedType -> typereference ParameterList : {pt,'$1','$2'}.
-DefinedType -> Externaltypereference ParameterList : {pt,'$1','$2'}.
-
-% ActualParameterList -> '{' ActualParameters '}' : '$1'.
-
-% ActualParameters -> ActualParameter : ['$1'].
-% ActualParameters -> ActualParameter ',' ActualParameters : ['$1'|'$3'].
-
-ActualParameter -> Type : '$1'.
-ActualParameter -> ValueNotNull : '$1'.
-ActualParameter -> ValueSet : '$1'.
-% later DefinedType -> ParameterizedType : '$1' .
-% later DefinedType -> ParameterizedValueSetType : '$1' .
-
-% inlined DefinedValue -> Externalvaluereference :'$1'.
-% inlined DefinedValue -> identifier :'$1'.
-% later DefinedValue -> ParameterizedValue :'$1'.
-
-% not referenced yet AbsoluteReference -> '@' GlobalModuleReference '.' ItemSpec :{'$2','$4'}.
-
-% not referenced yet ItemSpec -> typereference :'$1'.
-% not referenced yet ItemSpec -> ItemId '.' ComponentId : {'$1','$3'}.
-
-% not referenced yet ItemId -> ItemSpec : '$1'.
-
-% not referenced yet ComponentId -> identifier :'$1'.
-% not referenced yet ComponentId -> number :'$1'.
-% not referenced yet ComponentId -> '*' :'$1'.
-
-TypeAssignment -> typereference '::=' Type :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec='$3'}.
-
-ValueAssignment -> identifier Type '::=' Value :
- #valuedef{pos=element(2,'$1'),name=element(3,'$1'),type='$2',value='$4'}.
-
-% later ValueSetTypeAssignment -> typereference Type '::=' ValueSet :{'ValueSetTypeAssignment','$1','$2','$4'}.
-
-
-ValueSet -> '{' ElementSetSpec '}' : {valueset,'$2'}.
-
-% record(type,{tag,def,constraint}).
-Type -> BuiltinType :#type{def='$1'}.
-Type -> 'NULL' :#type{def='NULL'}.
-Type -> TaggedType:'$1'.
-Type -> ReferencedType:#type{def='$1'}. % change notag later
-Type -> ConstrainedType:'$1'.
-
-%ANY is here for compatibility with the old ASN.1 standard from 1988
-BuiltinType -> 'ANY' AnyDefBy:
- case '$2' of
- [] -> 'ANY';
- _ -> {'ANY DEFINED BY','$2'}
- end.
-BuiltinType -> BitStringType :'$1'.
-BuiltinType -> 'BOOLEAN' :element(1,'$1').
-BuiltinType -> CharacterStringType :'$1'.
-BuiltinType -> ChoiceType :'$1'.
-BuiltinType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-BuiltinType -> EnumeratedType :'$1'.
-BuiltinType -> 'EXTERNAL' :element(1,'$1').
-% later BuiltinType -> InstanceOfType :'$1'.
-BuiltinType -> IntegerType :'$1'.
-% BuiltinType -> 'NULL' :element(1,'$1').
-% later BuiltinType -> ObjectClassFieldType :'$1'.
-BuiltinType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-BuiltinType -> 'OCTET' 'STRING' :'OCTET STRING'.
-BuiltinType -> 'REAL' :element(1,'$1').
-BuiltinType -> SequenceType :'$1'.
-BuiltinType -> SequenceOfType :'$1'.
-BuiltinType -> SetType :'$1'.
-BuiltinType -> SetOfType :'$1'.
-% The so called Useful types
-BuiltinType -> 'GeneralizedTime': 'GeneralizedTime'.
-BuiltinType -> 'UTCTime' :'UTCTime'.
-BuiltinType -> 'ObjectDescriptor' : 'ObjectDescriptor'.
-
-% moved BuiltinType -> TaggedType :'$1'.
-
-
-AnyDefBy -> 'DEFINED' 'BY' identifier: '$3'.
-AnyDefBy -> '$empty': [].
-
-NamedType -> identifier Type :
-%{_,Pos,Val} = '$1',
-%{'NamedType',Pos,{Val,'$2'}}.
-V1 = '$1',
-{'NamedType',V1#identifier.pos,{V1#identifier.val,'$2'}}.
-NamedType -> SelectionType :'$1'.
-
-ReferencedType -> DefinedType : '$1'.
-% redundant ReferencedType -> UsefulType : 'fix'.
-ReferencedType -> SelectionType : '$1'.
-ReferencedType -> TypeFromObject : '$1'.
-% later ReferencedType -> ValueSetFromObjects : 'fix'.
-
-% to much conflicts Value -> AnyValue :'$1'.
-Value -> ValueNotNull : '$1'.
-Value -> 'NULL' :element(1,'$1').
-
-ValueNotNull -> BuiltinValue :'$1'.
-% inlined Value -> DefinedValue :'$1'. % DefinedValue , identifier
-% inlined Externalvaluereference -> Externalvaluereference :'$1'.
-ValueNotNull -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$3')}.
-ValueNotNull -> identifier :'$1'.
-
-
-%tmp Value -> NamedNumber: '$1'. % not a value but part of ObjIdC
-% redundant BuiltinValue -> BitStringValue :'$1'.
-BuiltinValue -> BooleanValue :'$1'.
-BuiltinValue -> CharacterStringValue :'$1'.
-BuiltinValue -> ChoiceValue :'$1'.
-% BuiltinValue -> EmbeddedPDVValue :'$1'. ==SequenceValue
-% BuiltinValue -> EnumeratedValue :'$1'. identifier
-% BuiltinValue -> ExternalValue :'$1'. ==SequenceValue
-% later BuiltinValue -> InstanceOfValue :'$1'.
-BuiltinValue -> SignedNumber :'$1'.
-% BuiltinValue -> 'NULL' :'$1'.
-% later BuiltinValue -> ObjectClassFieldValue :'$1'.
-% replaced by SAndSOfValue BuiltinValue -> ObjectIdentifierValue :'$1'.
-BuiltinValue -> bstring :element(3,'$1').
-BuiltinValue -> hstring :element(3,'$1').
-% conflict BuiltinValue -> RealValue :'$1'.
-BuiltinValue -> SAndSOfValue :'$1'.
-% replaced BuiltinValue -> SequenceOfValue :'$1'.
-% replaced BuiltinValue -> SequenceValue :'$1'.
-% replaced BuiltinValue -> SetValue :'$1'.
-% replaced BuiltinValue -> SetOfValue :'$1'.
-% conflict redundant BuiltinValue -> TaggedValue :'$1'.
-
-% inlined ReferencedValue -> DefinedValue:'$1'.
-% ReferencedValue -> Externalvaluereference:'$1'.
-% ReferencedValue -> identifier :'$1'.
-% later ReferencedValue -> ValueFromObject:'$1'.
-
-% inlined BooleanType -> BOOLEAN :'BOOLEAN'.
-
-% to much conflicts AnyValue -> Type ':' Value : {'ANYVALUE',{'$1','$3'}}.
-
-BooleanValue -> TRUE :true.
-BooleanValue -> FALSE :false.
-
-IntegerType -> 'INTEGER' : 'INTEGER'.
-IntegerType -> 'INTEGER' '{' NamedNumberList '}' : {'INTEGER','$3'}.
-
-NamedNumberList -> NamedNumber :['$1'].
-% modified NamedNumberList -> NamedNumberList ',' NamedNumber :'fix'.
-NamedNumberList -> NamedNumber ',' NamedNumberList :['$1'|'$3'].
-
-NamedNumber -> identifier '(' SignedNumber ')' : {'NamedNumber',element(3,'$1'),'$3'}.
-NamedNumber -> identifier '(' typereference '.' identifier ')' : {'NamedNumber',element(3,'$1'),{'ExternalValue',element(3,'$3'),element(3,'$5')}}.
-NamedNumber -> identifier '(' identifier ')' : {'NamedNumber',element(3,'$1'),element(3,'$3')}.
-
-%NamedValue -> identifier Value :
-% {'NamedValue',element(2,'$1'),element(3,'$1'),'$2'}.
-
-
-SignedNumber -> number : element(3,'$1').
-SignedNumber -> '-' number : - element(3,'$1').
-
-% inlined IntegerValue -> SignedNumber :'$1'.
-% conflict moved to Value IntegerValue -> identifier:'$1'.
-
-EnumeratedType -> ENUMERATED '{' Enumeration '}' :{'ENUMERATED','$3'}.
-
-% inlined Enumerations -> Enumeration :{'$1','false',[]}.
-% inlined Enumerations -> Enumeration ',' '...' : {'$1','true',[]}.
-% inlined Enumerations -> Enumeration ',' '...' ',' Enumeration : {'$1','true','$5'}.
-
-Enumeration -> EnumerationItem :['$1'].
-% modified Enumeration -> EnumerationItem ',' Enumeration :'fix'.
-Enumeration -> EnumerationItem ',' Enumeration :['$1'|'$3'].
-
-EnumerationItem -> identifier:element(3,'$1').
-EnumerationItem -> NamedNumber :'$1'.
-EnumerationItem -> '...' :'EXTENSIONMARK'.
-
-% conflict moved to Value EnumeratedValue -> identifier:'$1'.
-
-% inlined RealType -> REAL:'REAL'.
-
-RealValue -> NumericRealValue :'$1'.
-RealValue -> SpecialRealValue:'$1'.
-
-% ?? NumericRealValue -> number:'$1'. % number MUST BE '0'
-NumericRealValue -> SAndSOfValue : '$1'. % Value of the associated sequence type
-
-SpecialRealValue -> 'PLUS-INFINITY' :'$1'.
-SpecialRealValue -> 'MINUS-INFINITY' :'$1'.
-
-BitStringType -> 'BIT' 'STRING' :{'BIT STRING',[]}.
-BitStringType -> 'BIT' 'STRING' '{' NamedNumberList '}' :{'BIT STRING','$4'}.
-% NamedBitList replaced by NamedNumberList to reduce the grammar
-% Must check later that all "numbers" are positive
-
-% inlined BitStringValue -> bstring:'$1'.
-% inlined BitStringValue -> hstring:'$1'.
-% redundant use SequenceValue BitStringValue -> '{' IdentifierList '}' :$2.
-% redundant use SequenceValue BitStringValue -> '{' '}' :'fix'.
-
-IdentifierList -> identifier :[element(3,'$1')].
-% modified IdentifierList -> IdentifierList ',' identifier :'$1'.
-IdentifierList -> identifier ',' IdentifierList :[element(3,'$1')|'$3'].
-
-% inlined OctetStringType -> 'OCTET' 'STRING' :'OCTET STRING'.
-
-% inlined OctetStringValue -> bstring:'$1'.
-% inlined OctetStringValue -> hstring:'$1'.
-
-% inlined NullType -> 'NULL':'NULL'.
-
-% inlined NullValue -> NULL:'NULL'.
-
-% result is {'SEQUENCE',Optionals,Extensionmark,Componenttypelist}.
-SequenceType -> SEQUENCE '{' ComponentTypeList '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ComponentTypeLists '}' :{'SEQUENCE','$3'}.
-% SequenceType -> SEQUENCE '{' ExtensionAndException '}' :{'SEQUENCE','$3'}.
-SequenceType -> SEQUENCE '{' '}' :{'SEQUENCE',[]}.
-
-% result is {RootComponentList,ExtensionAndException,AdditionalComponentTypeList}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException :{'$1','$3',[]}.
-%ComponentTypeLists -> ComponentTypeList :{'$1','false',[]}.
-%ComponentTypeLists -> ComponentTypeList ',' ExtensionAndException
-% ',' ComponentTypeList :{'$1','$3', '$5'}.
-%ComponentTypeLists -> ExtensionAndException ',' ComponentTypeList :{[],'$1','$3'}.
-
-ComponentTypeList -> ComponentType :['$1'].
-% modified below ComponentTypeList -> ComponentTypeList ',' ComponentType :'$1'.
-ComponentTypeList -> ComponentType ',' ComponentTypeList :['$1'|'$3'].
-
-% -record('ComponentType',{pos,name,type,attrib}).
-ComponentType -> '...' ExceptionSpec :{'EXTENSIONMARK',element(2,'$1'),'$2'}.
-ComponentType -> NamedType :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop=mandatory}.
-ComponentType -> NamedType 'OPTIONAL' :
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop='OPTIONAL'}.
-ComponentType -> NamedType 'DEFAULT' Value:
- {'NamedType',Pos,{Name,Type}} = '$1',
- #'ComponentType'{pos=Pos,name=Name,typespec=Type,prop={'DEFAULT','$3'}}.
-ComponentType -> 'COMPONENTS' 'OF' Type :{'COMPONENTS OF','$3'}.
-
-% redundant ExtensionAndException -> '...' : extensionmark.
-% ExtensionAndException -> '...' ExceptionSpec : {extensionmark,'$2'}.
-
-% replaced SequenceValue -> '{' ComponentValueList '}':'$2'.
-% replaced SequenceValue -> '{' '}':[].
-
-ValueList -> Value :['$1'].
-ValueList -> NamedNumber :['$1'].
-% modified ValueList -> ValueList ',' Value :'$1'.
-ValueList -> Value ',' ValueList :['$1'|'$3'].
-ValueList -> Value ',' '...' :['$1' |[]].
-ValueList -> Value ValueList : ['$1',space|'$2'].
-ValueList -> NamedNumber ValueList: ['$1',space|'$2'].
-
-%ComponentValueList -> identifier ObjIdComponent:[{'NamedValue','$1','$2'}].
-%ComponentValueList -> NamedValue :['$1'].
-%ComponentValueList -> NamedValue ',' ComponentValueList:['$1'|'$3'].
-%ComponentValueList -> identifier ObjIdComponent ',' ComponentValueList :[{'NamedValue', '$1','$2'}|'$4'].
-
-SequenceOfType -> SEQUENCE OF Type : {'SEQUENCE OF','$3'}.
-
-% replaced SequenceOfValue with SAndSOfValue
-
-SAndSOfValue -> '{' ValueList '}' :'$2'.
-%SAndSOfValue -> '{' ComponentValueList '}' :'$2'.
-SAndSOfValue -> '{' '}' :[].
-
-% save for later SetType ->
-% result is {'SET',Optionals,Extensionmark,Componenttypelist}.
-SetType -> SET '{' ComponentTypeList '}' :{'SET','$3'}.
-% SetType -> SET '{' ExtensionAndException '}' :{'SET','$3'}.
-SetType -> SET '{' '}' :{'SET',[]}.
-
-% replaced SetValue with SAndSOfValue
-
-SetOfType -> SET OF Type : {'SET OF','$3'}.
-
-% replaced SetOfValue with SAndSOfValue
-
-ChoiceType -> 'CHOICE' '{' ComponentTypeList '}' :{'CHOICE','$3'}.
-% AlternativeTypeList is replaced by ComponentTypeList
-ChoiceValue -> identifier ':' Value : {'ChoiceValue',element(3,'$1'),'$3'}.
-% save for later SelectionType ->
-
-TaggedType -> Tag Type : '$2'#type{tag=['$1'#tag{type={default,get(tagdefault)}}]}.
-TaggedType -> Tag IMPLICIT Type :'$3'#type{tag=['$1'#tag{type='IMPLICIT'}]}.
-TaggedType -> Tag EXPLICIT Type :'$3'#type{tag=['$1'#tag{type='EXPLICIT'}]}.
-
-Tag -> '[' Class ClassNumber ']': #tag{class='$2',number='$3'}.
-Tag -> '[' Class typereference '.' identifier ']':
- #tag{class='$2',number=#'Externalvaluereference'{pos=element(2,'$3'),module=element(3,'$3'),
- value=element(3,'$5')}}.
-Tag -> '[' Class number ']': #tag{class='$2',number=element(3,'$3')}.
-Tag -> '[' Class identifier ']': #tag{class='$2',number=element(3,'$3')}.
-
-ClassNumber -> number :element(3,'$1').
-% inlined above ClassNumber -> typereference '.' identifier :{'Externalvaluereference',element(3,'$1'),element(3,'$3')}.
-ClassNumber -> identifier :element(3,'$1').
-
-Class -> 'UNIVERSAL' :element(1,'$1').
-Class -> 'APPLICATION' :element(1,'$1').
-Class -> 'PRIVATE' :element(1,'$1').
-Class -> '$empty' :'CONTEXT'.
-
-% conflict redundant TaggedValue -> Value:'$1'.
-
-% inlined EmbeddedPDVType -> 'EMBEDDED' 'PDV' :'EMBEDDED PDV'.
-
-% inlined EmbeddedPDVValue -> SequenceValue:'$1'.
-
-% inlined ExternalType -> 'EXTERNAL' :'EXTERNAL'.
-
-% inlined ExternalValue -> SequenceValue :'$1'.
-
-% inlined ObjectIdentifierType -> 'OBJECT' 'IDENTIFIER' :'OBJECT IDENTIFIER'.
-
-ObjectIdentifierValue -> '{' ObjIdComponentList '}' :'$2'.
-% inlined ObjectIdentifierValue -> SequenceAndSequenceOfValue :'$1'.
-% ObjectIdentifierValue -> '{' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue','$2','$3'}.
-% ObjectIdentifierValue -> '{' typereference '.' identifier ObjIdComponentList '}' :{'ObjectIdentifierValue',{'$2','$4'},'$5'}.
-
-ObjIdComponentList -> Value:'$1'.
-ObjIdComponentList -> Value ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> DefinedValue:'$1'.
-%ObjIdComponentList -> number:'$1'.
-%ObjIdComponentList -> DefinedValue ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> number ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-%ObjIdComponentList -> ObjIdComponent ObjIdComponentList :['$1'|'$2'].
-
-% redundant ObjIdComponent -> NameForm :'$1'. % expanded
-% replaced by 2 ObjIdComponent -> NumberForm :'$1'.
-% ObjIdComponent -> number :'$1'.
-% ObjIdComponent -> DefinedValue :'$1'. % means DefinedValue
-% ObjIdComponent -> NameAndNumberForm :'$1'.
-% ObjIdComponent -> NamedNumber :'$1'.
-% NamedBit replaced by NamedNumber to reduce grammar
-% must check later that "number" is positive
-
-% NameForm -> identifier:'$1'.
-
-% inlined NumberForm -> number :'$1'.
-% inlined NumberForm -> DefinedValue :'$1'.
-
-% replaced by NamedBit NameAndNumberForm -> identifier '(' NumberForm ')'.
-% NameAndNumberForm -> NamedBit:'$1'.
-
-
-CharacterStringType -> restrictedcharacterstringtype :element(3,'$1').
-CharacterStringType -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-RestrictedCharacterStringValue -> cstring :element(3, '$1').
-% modified below RestrictedCharacterStringValue -> CharacterStringList :'$1'.
-% conflict vs BuiltinValue RestrictedCharacterStringValue -> SequenceAndSequenceOfValue :'$1'.
-RestrictedCharacterStringValue -> Quadruple :'$1'.
-RestrictedCharacterStringValue -> Tuple :'$1'.
-
-% redundant CharacterStringList -> '{' ValueList '}' :'$2'. % modified
-
-% redundant CharSyms -> CharsDefn :'$1'.
-% redundant CharSyms -> CharSyms ',' CharsDefn :['$1'|'$3'].
-
-% redundant CharsDefn -> cstring :'$1'.
-% temporary replaced see below CharsDefn -> DefinedValue :'$1'.
-% redundant CharsDefn -> Value :'$1'.
-
-Quadruple -> '{' number ',' number ',' number ',' number '}' :{'Quadruple','$2','$4','$6','$8'}.
-% {Group,Plane,Row,Cell}
-
-Tuple -> '{' number ',' number '}' :{'Tuple', '$2','$4'}.
-% {TableColumn,TableRow}
-
-% inlined UnrestrictedCharacterString -> 'CHARACTER' 'STRING' :'CHARACTER STRING'.
-
-CharacterStringValue -> RestrictedCharacterStringValue :'$1'.
-% conflict vs BuiltinValue CharacterStringValue -> SequenceValue :'$1'. % UnrestrictedCharacterStringValue
-
-% inlined UsefulType -> typereference :'$1'.
-
-SelectionType -> identifier '<' Type : {'SelectionType',element(3,'$1'),'$3'}.
-
-ConstrainedType -> Type Constraint :
- '$1'#type{constraint=merge_constraints(['$2'])}.
-ConstrainedType -> Type Constraint Constraint :
- '$1'#type{constraint=merge_constraints(['$2','$3'])}.
-ConstrainedType -> Type Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4'])}.
-ConstrainedType -> Type Constraint Constraint Constraint Constraint:
- '$1'#type{constraint=merge_constraints(['$2','$3','$4','$5'])}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-%ConstrainedType -> Type Constraint :'$1'#type{constraint='$2'}.
-ConstrainedType -> TypeWithConstraint :'$1'.
-
-TypeWithConstraint -> 'SET' Constraint 'OF' Type :
- #type{def = {'SET OF','$4'},constraint=merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SET' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SET OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-TypeWithConstraint -> 'SEQUENCE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$4'},constraint =
- merge_constraints(['$2'])}.
-TypeWithConstraint -> 'SEQUENCE' 'SIZE' Constraint 'OF' Type :
- #type{def = {'SEQUENCE OF','$5'},constraint = merge_constraints([#constraint{c={'SizeConstraint','$3'#constraint.c}}])}.
-
-
-Constraint -> '(' ConstraintSpec ExceptionSpec ')' :
- #constraint{c='$2',e='$3'}.
-
-% inlined Constraint -> SubTypeConstraint :'$1'.
-ConstraintSpec -> ElementSetSpecs :'$1'.
-ConstraintSpec -> UserDefinedConstraint :'$1'.
-ConstraintSpec -> TableConstraint :'$1'.
-
-TableConstraint -> ComponentRelationConstraint : '$1'.
-TableConstraint -> ObjectSet : '$1'.
-%TableConstraint -> '{' typereference '}' :tableconstraint.
-
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' ComponentIdList '}' : componentrelation.
-ComponentRelationConstraint -> '{' typereference '}' '{' '@' '.' ComponentIdList '}' : componentrelation.
-
-ComponentIdList -> identifier: ['$1'].
-ComponentIdList -> identifier '.' ComponentIdList: ['$1'| '$3'].
-
-
-% later ConstraintSpec -> GeneralConstraint :'$1'.
-
-% from X.682
-UserDefinedConstraint -> 'CONSTRAINED' 'BY' '{' '}' : {constrained_by,[]}.
-UserDefinedConstraint -> 'CONSTRAINED' 'BY'
- '{' UserDefinedConstraintParameters '}' : {constrained_by,'$4'}.
-
-UserDefinedConstraintParameters -> UserDefinedConstraintParameter : ['$1'].
-UserDefinedConstraintParameters ->
- UserDefinedConstraintParameter ','
- UserDefinedConstraintParameters: ['$1'|'$3'].
-
-UserDefinedConstraintParameter -> Type '.' ActualParameter : {'$1','$3'}.
-UserDefinedConstraintParameter -> ActualParameter : '$1'.
-
-
-
-ExceptionSpec -> '!' ExceptionIdentification : '$1'.
-ExceptionSpec -> '$empty' : undefined.
-
-ExceptionIdentification -> SignedNumber : '$1'.
-% inlined ExceptionIdentification -> DefinedValue : '$1'.
-ExceptionIdentification -> typereference '.' identifier :
- #'Externalvaluereference'{pos=element(2,'$1'),module=element(3,'$1'),
- value=element(3,'$1')}.
-ExceptionIdentification -> identifier :'$1'.
-ExceptionIdentification -> Type ':' Value : {'$1','$3'}.
-
-% inlined SubTypeConstraint -> ElementSetSpec
-
-ElementSetSpecs -> ElementSetSpec : '$1'.
-ElementSetSpecs -> ElementSetSpec ',' '...': {'$1',[]}.
-ElementSetSpecs -> '...' ',' ElementSetSpec : {[],'$3'}.
-ElementSetSpecs -> ElementSetSpec ',' '...' ',' ElementSetSpec : {'$1','$5'}.
-
-ElementSetSpec -> Unions : '$1'.
-ElementSetSpec -> 'ALL' Exclusions : {'ALL','$2'}.
-
-Unions -> Intersections : '$1'.
-Unions -> UElems UnionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:union(to_set(V1),to_set(V2))}
- end.
-
-UElems -> Unions :'$1'.
-
-Intersections -> IntersectionElements :'$1'.
-Intersections -> IElems IntersectionMark IntersectionElements :
- case {'$1','$3'} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {'SingleValue',ordsets:intersection(to_set(V1),to_set(V2))};
- {V1,V2} when list(V1) ->
- V1 ++ [V2];
- {V1,V2} ->
- [V1,V2]
- end.
-%Intersections -> IElems '^' IntersectionElements :{'INTERSECTION','$1','$3'}.
-%Intersections -> IElems 'INTERSECTION' IntersectionElements :{'INTERSECTION','$1','$3'}.
-
-IElems -> Intersections :'$1'.
-
-IntersectionElements -> Elements :'$1'.
-IntersectionElements -> Elems Exclusions :{'$1','$2'}.
-
-Elems -> Elements :'$1'.
-
-Exclusions -> 'EXCEPT' Elements :{'EXCEPT','$2'}.
-
-IntersectionMark -> 'INTERSECTION':'$1'.
-IntersectionMark -> '^':'$1'.
-UnionMark -> 'UNION':'$1'.
-UnionMark -> '|':'$1'.
-
-
-Elements -> SubTypeElements : '$1'.
-%Elements -> ObjectSetElements : '$1'.
-Elements -> '(' ElementSetSpec ')' : '$2'.
-Elements -> ReferencedType : '$1'.
-
-SubTypeElements -> ValueList : {'SingleValue','$1'}. % NOTE it must be a Value
-% The rule above modifyed only because of conflicts
-SubTypeElements -> 'INCLUDES' Type : {'ContainedSubType','$2'}.
-%not lalr1 if this is activated SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> LowerEndpoint '..' UpperEndpoint : {'ValueRange',{'$1','$3'}}.
-SubTypeElements -> 'FROM' Constraint : {'PermittedAlphabet','$2'#constraint.c}.
-SubTypeElements -> 'SIZE' Constraint: {'SizeConstraint','$2'#constraint.c}.
-% later will introduce conflicts related to NULL SubTypeElements -> Type : {'TypeConstraint','$1'}.
-SubTypeElements -> 'WITH' 'COMPONENT' Constraint:{'WITH COMPONENT','$3'}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' TypeConstraints '}':{'WITH COMPONENTS',{'FullSpecification','$4'}}.
-SubTypeElements -> 'WITH' 'COMPONENTS' '{' '...' ',' TypeConstraints '}' :{'WITH COMPONENTS',{'PartialSpecification','$3'}}.
-
-% inlined above InnerTypeConstraints ::=
-% inlined above SingleTypeConstraint::= Constraint
-% inlined above MultipleTypeConstraints ::= FullSpecification | PartialSpecification
-% inlined above FullSpecification ::= "{" TypeConstraints "}"
-% inlined above PartialSpecification ::= "{" "..." "," TypeConstraints "}"
-% TypeConstraints -> identifier : [{'NamedConstraint',element(3,'$1'),undefined,undefined}]. % is this really meaningful or allowed
-TypeConstraints -> NamedConstraint : ['$1'].
-TypeConstraints -> NamedConstraint ',' TypeConstraints : ['$1'|'$3'].
-TypeConstraints -> identifier : ['$1'].
-TypeConstraints -> identifier ',' TypeConstraints : ['$1'|'$3'].
-
-NamedConstraint -> identifier Constraint PresenceConstraint :{'NamedConstraint',element(3,'$1'),'$2','$3'}.
-NamedConstraint -> identifier Constraint :{'NamedConstraint',element(3,'$1'),'$2',undefined}.
-NamedConstraint -> identifier PresenceConstraint :{'NamedConstraint',element(3,'$1'),undefined,'$2'}.
-
-PresenceConstraint -> 'PRESENT' : 'PRESENT'.
-PresenceConstraint -> 'ABSENT' : 'ABSENT'.
-PresenceConstraint -> 'OPTIONAL' : 'OPTIONAL'.
-
-
-
-LowerEndpoint -> LowerEndValue :'$1'.
-%LowerEndpoint -> LowerEndValue '<':{gt,'$1'}.
-LowerEndpoint -> LowerEndValue '<':('$1'+1).
-
-UpperEndpoint -> UpperEndValue :'$1'.
-%UpperEndpoint -> '<' UpperEndValue :{lt,'$2'}.
-UpperEndpoint -> '<' UpperEndValue :('$2'-1).
-
-LowerEndValue -> Value :'$1'.
-LowerEndValue -> 'MIN' :'MIN'.
-
-UpperEndValue -> Value :'$1'.
-UpperEndValue -> 'MAX' :'MAX'.
-
-
-% X.681
-
-
-% X.681 chap 15
-
-%TypeFromObject -> ReferencedObjects '.' FieldName : {'$1','$3'}.
-TypeFromObject -> typereference '.' FieldName : {'$1','$3'}.
-
-ReferencedObjects -> typereference : '$1'.
-%ReferencedObjects -> ParameterizedObject
-%ReferencedObjects -> DefinedObjectSet
-%ReferencedObjects -> ParameterizedObjectSet
-
-FieldName -> typefieldreference : ['$1'].
-FieldName -> valuefieldreference : ['$1'].
-FieldName -> FieldName '.' FieldName : ['$1' | '$3'].
-
-PrimitiveFieldName -> typefieldreference : '$1'.
-PrimitiveFieldName -> valuefieldreference : '$1'.
-
-%ObjectSetAssignment -> typereference DefinedObjectClass '::=' ObjectSet: null.
-ObjectSetAssignment -> typereference typereference '::=' ObjectSet :
- #typedef{pos=element(2,'$1'),name=element(3,'$1'),typespec={'ObjectSet',element(3,'$2'), '$4'}}.
-ObjectSetAssignment -> typereference typereference '.' typereference '::=' ObjectSet.
-
-ObjectSet -> '{' ElementSetSpecs '}' : '$2'.
-ObjectSet -> '{' '...' '}' : ['EXTENSIONMARK'].
-
-%ObjectSetElements -> Object.
-% ObjectSetElements -> identifier : '$1'.
-%ObjectSetElements -> DefinedObjectSet.
-%ObjectSetElements -> ObjectSetFromObjects.
-%ObjectSetElements -> ParameterizedObjectSet.
-
-%ObjectAssignment -> identifier DefinedObjectClass '::=' Object.
-ObjectAssignment -> ValueAssignment.
-%ObjectAssignment -> identifier typereference '::=' Object.
-%ObjectAssignment -> identifier typereference '.' typereference '::=' Object.
-
-%Object -> DefinedObject: '$1'.
-%Object -> ExternalObjectReference: '$1'.%Object -> DefinedObject: '$1'.
-Object -> typereference '.' identifier: '$1'.%Object -> DefinedObject: '$1'.
-Object -> identifier: '$1'.%Object -> DefinedObject: '$1'.
-
-%Object -> ObjectDefn -> DefaultSyntax: '$1'.
-Object -> '{' FieldSetting ',' FieldSettings '}' : ['$2'|'$4'].
-Object -> '{' FieldSetting '}' :['$2'].
-
-%% For User-friendly notation
-%% Object -> ObjectDefn -> DefinedSyntax
-Object -> '{' '}'.
-Object -> '{' DefinedSyntaxTokens '}'.
-
-% later Object -> ParameterizedObject: '$1'. look in x.683
-
-%DefinedObject -> ExternalObjectReference: '$1'.
-%DefinedObject -> identifier: '$1'.
-
-DefinedObjectClass -> typereference.
-%DefinedObjectClass -> objectclassreference.
-DefinedObjectClass -> ExternalObjectClassReference.
-%DefinedObjectClass -> typereference '.' objectclassreference.
-%%DefinedObjectClass -> UsefulObjectClassReference.
-
-ExternalObjectReference -> typereference '.' identifier.
-ExternalObjectClassReference -> typereference '.' typereference.
-%%ExternalObjectClassReference -> typereference '.' objectclassreference.
-
-ObjectDefn -> DefaultSyntax: '$1'.
-%ObjectDefn -> DefinedSyntax: '$1'.
-
-ObjectFromObject -> ReferencedObjects '.' FieldName : {'ObjectFromObject','$1','$3'}.
-
-% later look in x.683 ParameterizedObject ->
-
-%DefaultSyntax -> '{' '}'.
-%DefaultSyntax -> '{' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting ',' FieldSettings '}': '$2'.
-DefaultSyntax -> '{' FieldSetting '}': '$2'.
-
-FieldSetting -> PrimitiveFieldName Setting: {'$1','$2'}.
-
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting ',' FieldSettings: ['$1'|'$3'].
-FieldSettings -> FieldSetting: '$1'.
-
-%DefinedSyntax -> '{' '}'.
-DefinedSyntax -> '{' DefinedSyntaxTokens '}': '$2'.
-
-DefinedSyntaxTokens -> DefinedSyntaxToken: '$1'.
-DefinedSyntaxTokens -> DefinedSyntaxToken DefinedSyntaxTokens: ['$1'|'$2'].
-
-% expanded DefinedSyntaxToken -> Literal: '$1'.
-%DefinedSyntaxToken -> typereference: '$1'.
-DefinedSyntaxToken -> word: '$1'.
-DefinedSyntaxToken -> ',': '$1'.
-DefinedSyntaxToken -> Setting: '$1'.
-%DefinedSyntaxToken -> '$empty': nil .
-
-% Setting ::= Type|Value|ValueSet|Object|ObjectSet
-Setting -> Type: '$1'.
-%Setting -> Value: '$1'.
-%Setting -> ValueNotNull: '$1'.
-Setting -> BuiltinValue: '$1'.
-Setting -> ValueSet: '$1'.
-%Setting -> Object: '$1'.
-%Setting -> ExternalObjectReference.
-Setting -> typereference '.' identifier.
-Setting -> identifier.
-Setting -> ObjectDefn.
-
-Setting -> ObjectSet: '$1'.
-
-
-Erlang code.
-%%-author('[email protected]').
--copyright('Copyright (c) 1991-99 Ericsson Telecom AB').
--vsn('$Revision: /main/release/1 $').
--include("asn1_records.hrl").
-
-to_set(V) when list(V) ->
- ordsets:list_to_set(V);
-to_set(V) ->
- ordsets:list_to_set([V]).
-
-merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
- {merge_constraints(Rlist,[],[]),
- merge_constraints(ExtList,[],[])};
-
-merge_constraints(Clist) ->
- merge_constraints(Clist, [], []).
-
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
- lists:flatten(Cacc);
-merge_constraints([],Cacc,Eacc) ->
- lists:flatten(Cacc) ++ [{'Errors',Eacc}].
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',V} when list(V) ->
- [C,
- {'ValueRange',{lists:min(V),lists:max(V)}}];
- {'PermittedAlphabet',{'SingleValue',V}} when list(V) ->
- V2 = {'SingleValue',
- ordsets:list_to_set(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when list(List) ->
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
-
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when list(L) ->
- ordsets:list_to_set(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({C1,C2}) ->
- {fixup_size_constraint(C1), fixup_size_constraint(C2)}.
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/lib/asn1/src/asn1ct_parser2.erl b/lib/asn1/src/asn1ct_parser2.erl
index 3891fce8d3..488e4af4e0 100644
--- a/lib/asn1/src/asn1ct_parser2.erl
+++ b/lib/asn1/src/asn1ct_parser2.erl
@@ -20,7 +20,7 @@
%%
-module(asn1ct_parser2).
--export([parse/1]).
+-export([parse/2,format_error/1]).
-include("asn1_records.hrl").
%% Only used internally within this module.
@@ -28,26 +28,34 @@
-record(constraint, {c,e}).
-record(identifier, {pos,val}).
-%% parse all types in module
-parse(Tokens) ->
- case catch parse_ModuleDefinition(Tokens) of
- {'EXIT',Reason} ->
- {error,{{undefined,get(asn1_module),
- [internal,error,'when',parsing,module,definition,Reason]},
- hd(Tokens)}};
- {asn1_error,Reason} ->
- {error,{Reason,hd(Tokens)}};
- {ModuleDefinition,Rest1} ->
- {Types,Rest2} = parse_AssignmentList(Rest1),
- clean_process_dictionary(),
- case Rest2 of
- [{'END',_}|_Rest3] ->
- {ok,ModuleDefinition#module{typeorval = Types}};
- _ ->
- {error,{{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'END']},
- hd(Rest2)}}
- end
+parse(File0, Tokens0) ->
+ try do_parse(Tokens0) of
+ {ok,#module{}}=Result ->
+ Result
+ catch
+ throw:{asn1_error,Fun} when is_function(Fun, 0) ->
+ handle_parse_error(File0, Fun());
+ throw:{asn1_error,{parse_error,Tokens}} ->
+ handle_parse_error(File0, Tokens)
+ after
+ clean_process_dictionary()
+ end.
+
+handle_parse_error(File0, [Token|_]) ->
+ File = filename:basename(File0),
+ Line = get_line(Token),
+ Error = {structured_error,{File,Line},?MODULE,
+ {syntax_error,get_token(Token)}},
+ {error,[Error]}.
+
+do_parse(Tokens0) ->
+ {ModuleDefinition,Tokens1} = parse_ModuleDefinition(Tokens0),
+ {Types,Tokens2} = parse_AssignmentList(Tokens1),
+ case Tokens2 of
+ [{'END',_}|_Rest3] ->
+ {ok,ModuleDefinition#module{typeorval=Types}};
+ _ ->
+ parse_error(Tokens2)
end.
clean_process_dictionary() ->
@@ -57,6 +65,11 @@ clean_process_dictionary() ->
_ = erase(extensiondefault),
ok.
+format_error({syntax_error,Token}) when is_atom(Token) ->
+ io_lib:format("syntax error before: '~s'", [Token]);
+format_error({syntax_error,Token}) ->
+ io_lib:format("syntax error before: '~p'", [Token]).
+
parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
put(asn1_module,ModuleIdentifier),
{_DefinitiveIdentifier,Rest02} =
@@ -70,9 +83,7 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
[{'DEFINITIONS',_}|Rest03] ->
Rest03;
_ ->
- throw({asn1_error,{get_line(hd(Rest02)),get(asn1_module),
- [got,get_token(hd(Rest02)),
- expected,'DEFINITIONS']}})
+ parse_error(Rest02)
end,
{TagDefault,Rest2} =
case Rest of
@@ -104,12 +115,11 @@ parse_ModuleDefinition([{typereference,L1,ModuleIdentifier}|Rest0]) ->
extensiondefault = ExtensionDefault,
exports = Exports,
imports = {imports, Imports}}, Rest6};
- _ -> throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,"::= BEGIN"]}})
+ _ ->
+ parse_error(Rest3)
end;
parse_ModuleDefinition(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typereference]}}).
+ parse_error(Tokens).
parse_Exports([{'EXPORTS',_L1},{';',_L2}|Rest]) ->
{{exports,[]},Rest};
@@ -122,8 +132,7 @@ parse_Exports([{'EXPORTS',_L1}|Rest]) ->
[{';',_}|Rest3] ->
{{exports,SymbolList},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,';']}})
+ parse_error(Rest2)
end;
parse_Exports(Rest) ->
{{exports,all},Rest}.
@@ -137,29 +146,25 @@ parse_SymbolList(Tokens,Acc) ->
[{',',_L1}|Rest2] ->
parse_SymbolList(Rest2,[Symbol|Acc]);
Rest2 ->
- {lists:reverse([Symbol|Acc]),Rest2}
+ {lists:reverse(Acc, [Symbol]),Rest2}
end.
parse_Symbol(Tokens) ->
parse_Reference(Tokens).
parse_Reference([{typereference,L1,TrefName},{'{',_L2},{'}',_L3}|Rest]) ->
-% {Tref,Rest};
{tref2Exttref(L1,TrefName),Rest};
parse_Reference([Tref1 = {typereference,_,_},{'.',_},Tref2 = {typereference,_,_},
{'{',_L2},{'}',_L3}|Rest]) ->
-% {{Tref1,Tref2},Rest};
{{tref2Exttref(Tref1),tref2Exttref(Tref2)},Rest};
parse_Reference([Tref = {typereference,_L1,_TrefName}|Rest]) ->
{tref2Exttref(Tref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName},{'{',_L2},{'}',_L3}|Rest]) ->
+parse_Reference([#identifier{}=Vref,{'{',_L2},{'}',_L3}|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
-parse_Reference([Vref = {identifier,_L1,_VName}|Rest]) ->
+parse_Reference([#identifier{}=Vref|Rest]) ->
{identifier2Extvalueref(Vref),Rest};
parse_Reference(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,identifier]]}}).
+ parse_error(Tokens).
parse_Imports([{'IMPORTS',_L1},{';',_L2}|Rest]) ->
{{imports,[]},Rest};
@@ -168,9 +173,8 @@ parse_Imports([{'IMPORTS',_L1}|Rest]) ->
case Rest2 of
[{';',_L2}|Rest3] ->
{{imports,SymbolsFromModuleList},Rest3};
- Rest3 ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,';']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_Imports(Tokens) ->
{{imports,[]},Tokens}.
@@ -180,11 +184,12 @@ parse_SymbolsFromModuleList(Tokens) ->
parse_SymbolsFromModuleList(Tokens,Acc) ->
{SymbolsFromModule,Rest} = parse_SymbolsFromModule(Tokens),
- case (catch parse_SymbolsFromModule(Rest)) of
+ try parse_SymbolsFromModule(Rest) of
{Sl,_Rest2} when is_record(Sl,'SymbolsFromModule') ->
- parse_SymbolsFromModuleList(Rest,[SymbolsFromModule|Acc]);
- _ ->
- {lists:reverse([SymbolsFromModule|Acc]),Rest}
+ parse_SymbolsFromModuleList(Rest, [SymbolsFromModule|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc, [SymbolsFromModule]),Rest}
end.
parse_SymbolsFromModule(Tokens) ->
@@ -198,169 +203,154 @@ parse_SymbolsFromModule(Tokens) ->
end,
{SymbolList,Rest} = parse_SymbolList(Tokens),
case Rest of
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref={identifier,_L2,_Id},C={',',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{',',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,C|Rest2]};
+ module=tref2Exttref(Tref)},Rest2};
%% This a special case when there is only one Symbol imported
%% from the next module. No other way to distinguish Ref from
%% a part of the GlobalModuleReference of Name.
- [{'FROM',_L1},Tref = {typereference,_,Name},Ref = {identifier,_L2,_Id},From = {'FROM',_}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|
+ [#identifier{},{'FROM',_}|_]=Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
- module=tref2Exttref(Tref)},[Ref,From|Rest2]};
- [{'FROM',_L1},Tref = {typereference,_,Name},{identifier,_L2,_Id}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ module=tref2Exttref(Tref)},Rest2};
+ [{'FROM',_L1},{typereference,_,Name}=Tref,#identifier{}|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
- [{'FROM',_L1},Tref = {typereference,_,Name},Brace = {'{',_}|Rest2] ->
- {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue([Brace|Rest2]), % value not used yet, fix me
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|[{'{',_}|_]=Rest2] ->
+ {_ObjIdVal,Rest3} = parse_ObjectIdentifierValue(Rest2), % value not used yet, fix me
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest3};
- [{'FROM',_L1},Tref = {typereference,_,Name}|Rest2] ->
- NewSymbolList = lists:map(SetRefModuleName(Name),SymbolList),
+ [{'FROM',_L1},{typereference,_,Name}=Tref|Rest2] ->
+ NewSymbolList = lists:map(SetRefModuleName(Name), SymbolList),
{#'SymbolsFromModule'{symbols=NewSymbolList,
module=tref2Exttref(Tref)},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,
- ['FROM typerefernece identifier ,',
- 'FROM typereference identifier',
- 'FROM typereference {',
- 'FROM typereference']]}})
+ parse_error(Rest)
end.
parse_ObjectIdentifierValue([{'{',_}|Rest]) ->
parse_ObjectIdentifierValue(Rest,[]).
-parse_ObjectIdentifierValue([{number,_,Num}|Rest],Acc) ->
+parse_ObjectIdentifierValue([{number,_,Num}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[Num|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {number,_,Num}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{number,_,Num},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Num}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {identifier,_,Id2}, {')',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},#identifier{val=Id2},{')',_}|Rest], Acc) ->
parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,Id2}|Acc]);
-parse_ObjectIdentifierValue([{identifier,_,Id},{'(',_}, {typereference,_,Tref},{'.',_},{identifier,_,Id2}, {')',_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
-parse_ObjectIdentifierValue([Id = {identifier,_,_}|Rest],Acc) ->
- parse_ObjectIdentifierValue(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ObjectIdentifierValue([{'}',_}|Rest],Acc) ->
+parse_ObjectIdentifierValue([#identifier{val=Id},{'(',_},{typereference,_,Tref},{'.',_},#identifier{val=Id2}, {')',_}|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [{'NamedNumber',Id,{'ExternalValue',Tref,Id2}}|Acc]);
+parse_ObjectIdentifierValue([#identifier{}=Id|Rest], Acc) ->
+ parse_ObjectIdentifierValue(Rest, [identifier2Extvalueref(Id)|Acc]);
+parse_ObjectIdentifierValue([{'}',_}|Rest], Acc) ->
{lists:reverse(Acc),Rest};
-parse_ObjectIdentifierValue([H|_T],_Acc) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- ['{ some of the following }',number,'identifier ( number )',
- 'identifier ( identifier )',
- 'identifier ( typereference.identifier)',identifier]]}}).
+parse_ObjectIdentifierValue(Tokens, _Acc) ->
+ parse_error(Tokens).
-parse_AssignmentList(Tokens = [{'END',_}|_Rest]) ->
- {[],Tokens};
-parse_AssignmentList(Tokens = [{'$end',_}|_Rest]) ->
- {[],Tokens};
parse_AssignmentList(Tokens) ->
- parse_AssignmentList(Tokens,[]).
+ parse_AssignmentList(Tokens, []).
-parse_AssignmentList(Tokens= [{'END',_}|_Rest],Acc) ->
+parse_AssignmentList([{'END',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens= [{'$end',_}|_Rest],Acc) ->
+parse_AssignmentList([{'$end',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_AssignmentList(Tokens,Acc) ->
- case (catch parse_Assignment(Tokens)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,R} ->
-% [H|T] = Tokens,
- throw({error,{R,hd(Tokens)}});
- {Assignment,Rest} ->
- parse_AssignmentList(Rest,[Assignment|Acc])
- end.
-
-parse_Assignment(Tokens) ->
- Flist = [fun parse_TypeAssignment/1,
- fun parse_ValueAssignment/1,
- fun parse_ObjectClassAssignment/1,
- fun parse_ObjectAssignment/1,
- fun parse_ObjectSetAssignment/1,
- fun parse_ParameterizedAssignment/1,
+parse_AssignmentList(Tokens0, Acc) ->
+ {Assignment,Tokens} = parse_Assignment(Tokens0),
+ parse_AssignmentList(Tokens, [Assignment|Acc]).
+
+parse_Assignment([{typereference,L1,Name},{'::=',_}|Tokens0]) ->
+ %% 1) Type ::= TypeDefinition
+ %% 2) CLASS-NAME ::= CLASS {...}
+ Flist = [{type,fun parse_Type/1},
+ {class,fun parse_ObjectClass/1}],
+ case parse_or_tag(Tokens0, Flist) of
+ {{type,Type},Tokens} ->
+ %% TypeAssignment
+ {#typedef{pos=L1,name=Name,typespec=Type},Tokens};
+ {{class,Type},Tokens} ->
+ %% ObjectClassAssignment
+ {#classdef{pos=L1,name=Name,module=resolve_module(Type),
+ typespec=Type},Tokens}
+ end;
+parse_Assignment([{typereference,_,_},{'{',_}|_]=Tokens) ->
+ %% 1) Type{...} ::= ...
+ %% 2) ValueSet{...} Type ::= ...
+ %% ObjectSet{...} CLASS-NAME ::= CLASS {...}
+ %% 3) CLASS-NAME{...} ::= CLASS {...}
+ %% A parameterized value set and and a parameterized object set
+ %% cannot be distinguished from each other without type information.
+ Flist = [fun parse_ParameterizedTypeAssignment/1,
+ fun parse_ParameterizedValueSetTypeAssignment/1,
+ fun parse_ParameterizedObjectClassAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([{typereference,_,_}|_]=Tokens) ->
+ %% 1) ObjectSet CLASS-NAME ::= ...
+ %% 2) ValueSet Type ::= ...
+ Flist = [fun parse_ObjectSetAssignment/1,
fun parse_ValueSetTypeAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {asn1_assignment_error,Reason} ->
- throw({asn1_error,Reason});
- Result ->
- Result
- end.
-
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{},{'{',_}|_]=Tokens) ->
+ %% 1) value{...} Type ::= ...
+ %% 2) object{...} CLASS-NAME ::= ...
+ Flist = [fun parse_ParameterizedValueAssignment/1,
+ fun parse_ParameterizedObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment([#identifier{}|_]=Tokens) ->
+ %% 1) value Type ::= ...
+ %% 2) object CLASS-NAME ::= ...
+ Flist = [fun parse_ValueAssignment/1,
+ fun parse_ObjectAssignment/1],
+ parse_or(Tokens, Flist);
+parse_Assignment(Tokens) ->
+ parse_error(Tokens).
parse_or(Tokens,Flist) ->
parse_or(Tokens,Flist,[]).
-parse_or(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(Tokens, [Fun|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {_,Rest}=Result when is_list(Rest) ->
+ Result
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or(Tokens, Funs, [Error|ErrList])
end;
-parse_or(Tokens,[Fun|Frest],ErrList) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or(Tokens,Frest,[AsnAssErr|ErrList]);
- Result = {_,L} when is_list(L) ->
- Result;
- Error ->
- parse_or(Tokens,Frest,[Error|ErrList])
- end.
-
-parse_or_tag(Tokens,Flist) ->
- parse_or_tag(Tokens,Flist,[]).
-
-parse_or_tag(_Tokens,[],ErrList) ->
- case ErrList of
- [] ->
- throw({asn1_error,{parse_or_tag,ErrList}});
- L when is_list(L) ->
- %% chose to throw 1) the error with the highest line no,
- %% 2) the last error which is not a asn1_assignment_error or
- %% 3) the last error.
- throw(prioritize_error(ErrList))
+parse_or(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+parse_or_tag(Tokens, Flist) ->
+ parse_or_tag(Tokens, Flist, []).
+
+parse_or_tag(Tokens, [{Tag,Fun}|Funs], ErrList) when is_function(Fun, 1) ->
+ try Fun(Tokens) of
+ {Parsed,Rest} when is_list(Rest) ->
+ {{Tag,Parsed},Rest}
+ catch
+ throw:{asn1_error,Error} ->
+ parse_or_tag(Tokens, Funs, [Error|ErrList])
end;
-parse_or_tag(Tokens,[{Tag,Fun}|Frest],ErrList) when is_function(Fun) ->
- case (catch Fun(Tokens)) of
- Exit = {'EXIT',_Reason} ->
- parse_or_tag(Tokens,Frest,[Exit|ErrList]);
- AsnErr = {asn1_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnErr|ErrList]);
- AsnAssErr = {asn1_assignment_error,_} ->
- parse_or_tag(Tokens,Frest,[AsnAssErr|ErrList]);
- {ParseRes,Rest} when is_list(Rest) ->
- {{Tag,ParseRes},Rest};
- Error ->
- parse_or_tag(Tokens,Frest,[Error|ErrList])
- end.
+parse_or_tag(_Tokens, [], ErrList) ->
+ throw({asn1_error,fun() -> prioritize_error(ErrList) end}).
+
+prioritize_error(Errors0) ->
+ Errors1 = prioritize_error_1(Errors0),
+ Errors2 = [{length(L),L} || L <- Errors1],
+ Errors = lists:sort(Errors2),
+ [Res|_] = [L || {_,L} <- Errors],
+ Res.
+
+prioritize_error_1([F|T]) when is_function(F, 0) ->
+ [F()|prioritize_error_1(T)];
+prioritize_error_1([{parse_error,Tokens}|T]) ->
+ [Tokens|prioritize_error_1(T)];
+prioritize_error_1([]) ->
+ [].
-parse_TypeAssignment([{typereference,L1,Tref},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {#typedef{pos=L1,name=Tref,typespec=Type},Rest2};
-parse_TypeAssignment([H1,H2|_Rest]) ->
- throw({asn1_assignment_error,{get_line(H1),get(asn1_module),
- [got,[get_token(H1),get_token(H2)], expected,
- typereference,'::=']}});
-parse_TypeAssignment([H|_T]) ->
- throw({asn1_assignment_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- typereference]}}).
%% parse_Type(Tokens) -> Ret
%%
@@ -370,9 +360,8 @@ parse_TypeAssignment([H|_T]) ->
%%
parse_Type(Tokens) ->
{Tag,Rest3} = case Tokens of
- [Lbr= {'[',_}|Rest] ->
- parse_Tag([Lbr|Rest]);
- Rest-> {[],Rest}
+ [{'[',_}|_] -> parse_Tag(Tokens);
+ _ -> {[],Tokens}
end,
{Tag2,Rest4} = case Rest3 of
[{'IMPLICIT',_}|Rest31] when is_record(Tag,tag)->
@@ -384,31 +373,17 @@ parse_Type(Tokens) ->
Rest31 ->
{Tag,Rest31}
end,
- Flist = [fun parse_BuiltinType/1,fun parse_ReferencedType/1,fun parse_TypeWithConstraint/1],
- {Type,Rest5} = case (catch parse_or(Rest4,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_Reason} ->
- throw(AsnErr);
- Result ->
- Result
- end,
- case hd(Rest5) of
- {'(',_} ->
+ Flist = [fun parse_BuiltinType/1,
+ fun parse_ReferencedType/1,
+ fun parse_TypeWithConstraint/1],
+ {Type,Rest5} = parse_or(Rest4, Flist),
+ case Rest5 of
+ [{'(',_}|_] ->
{Constraints,Rest6} = parse_Constraints(Rest5),
- if is_record(Type,type) ->
- {Type#type{constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6};
- true ->
- {#type{def=Type,constraint=merge_constraints(Constraints),
- tag=Tag2},Rest6}
- end;
- _ ->
- if is_record(Type,type) ->
- {Type#type{tag=Tag2},Rest5};
- true ->
- {#type{def=Type,tag=Tag2},Rest5}
- end
+ {Type#type{tag=Tag2,
+ constraint=merge_constraints(Constraints)},Rest6};
+ [_|_] ->
+ {Type#type{tag=Tag2},Rest5}
end.
parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
@@ -419,11 +394,10 @@ parse_BuiltinType([{'BIT',_},{'STRING',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'BIT STRING',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
- {{'BIT STRING',[]},Rest}
+ {#type{def={'BIT STRING',[]}},Rest}
end;
parse_BuiltinType([{'BOOLEAN',_}|Rest]) ->
{#type{def='BOOLEAN'},Rest};
@@ -435,41 +409,33 @@ parse_BuiltinType([{'CHARACTER',_},{'STRING',_}|Rest]) ->
{#type{def='CHARACTER STRING'},Rest};
parse_BuiltinType([{'CHOICE',_},{'{',_}|Rest]) ->
- {AlternativeTypeLists,Rest2} = parse_AlternativeTypeLists(Rest),
- AlternativeTypeLists1 =
- lists:filter(fun(#'ExtensionAdditionGroup'{}) -> false;
- ('ExtensionAdditionGroupEnd') -> false;
- (_) -> true
- end,AlternativeTypeLists),
+ {L0,Rest2} = parse_AlternativeTypeLists(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
- AlternativeTypeLists2 =
- case {[Ext||Ext = #'EXTENSIONMARK'{} <- AlternativeTypeLists1],
- get(extensiondefault)} of
- {[],'IMPLIED'} -> AlternativeTypeLists1 ++ [#'EXTENSIONMARK'{}];
- _ -> AlternativeTypeLists1
+ NeedExt = not lists:keymember('EXTENSIONMARK', 1, L0) andalso
+ get(extensiondefault) =:= 'IMPLIED',
+ L = case NeedExt of
+ true ->
+ L0 ++ [#'EXTENSIONMARK'{}];
+ false ->
+ L0
end,
-
- {#type{def={'CHOICE',AlternativeTypeLists2}},Rest3};
+ {#type{def={'CHOICE',L}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EMBEDDED',_},{'PDV',_}|Rest]) ->
{#type{def='EMBEDDED PDV'},Rest};
parse_BuiltinType([{'ENUMERATED',_},{'{',_}|Rest]) ->
- {Enumerations,Rest2} = parse_Enumerations(Rest,get(extensiondefault)),
+ {Enumerations,Rest2} = parse_Enumerations(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{#type{def={'ENUMERATED',Enumerations}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_BuiltinType([{'EXTERNAL',_}|Rest]) ->
{#type{def='EXTERNAL'},Rest};
-
-% InstanceOfType
parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
{DefinedObjectClass,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
@@ -480,9 +446,6 @@ parse_BuiltinType([{'INSTANCE',_},{'OF',_}|Rest]) ->
_ ->
{#type{def={'INSTANCE OF',DefinedObjectClass,[]}},Rest2}
end;
-
-% parse_BuiltinType(Tokens) ->
-
parse_BuiltinType([{'INTEGER',_}|Rest]) ->
case Rest of
[{'{',_}|Rest2] ->
@@ -491,17 +454,13 @@ parse_BuiltinType([{'INTEGER',_}|Rest]) ->
[{'}',_}|Rest4] ->
{#type{def={'INTEGER',NamedNumberList}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end;
_ ->
{#type{def='INTEGER'},Rest}
end;
parse_BuiltinType([{'NULL',_}|Rest]) ->
{#type{def='NULL'},Rest};
-
-% ObjectClassFieldType fix me later
-
parse_BuiltinType([{'OBJECT',_},{'IDENTIFIER',_}|Rest]) ->
{#type{def='OBJECT IDENTIFIER'},Rest};
parse_BuiltinType([{'OCTET',_},{'STRING',_}|Rest]) ->
@@ -529,18 +488,14 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
case Rest3 of
[{'}',_}|Rest4] ->
- {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
+ {#type{def=#'SEQUENCE'{components=ComponentTypeLists}},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,'}']}})
+ parse_error(Rest3)
end
-% _ -> % Seq case 4,17-19,23-26 will fail here
-% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-% [got,get_token(hd(Rest2)),expected,'}']}})
end;
parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -551,25 +506,19 @@ parse_BuiltinType([{'SEQUENCE',_},{'{',_}|Rest]) ->
{#type{def=#'SEQUENCE'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SEQUENCE OF',#type{def=Type,tag=[]}}},Rest2};
-
-parse_BuiltinType([{'SEQUENCE',_},{'OF',_},{identifier,_,_} |Rest]) ->
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SEQUENCE OF',Type}},Tokens};
+parse_BuiltinType([{'SEQUENCE',_},{'OF',_},#identifier{} |Rest]) ->
%% TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
parse_BuiltinType([{'SEQUENCE',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SEQUENCE OF',Type}},Rest2};
-
-
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'}',_}|Rest]) ->
{#type{def=#'SET'{components=[#'EXTENSIONMARK'{pos = Line}]}},Rest};
parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
@@ -581,12 +530,18 @@ parse_BuiltinType([{'SET',_},{'{',_},{'...',Line},{'!',_}|Rest]) ->
val = ExceptionIdentification}]}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ {ComponentTypeLists,Rest3}=
+ parse_ComponentTypeLists2(Rest2,[#'EXTENSIONMARK'{pos=Line}]),
+ case Rest3 of
+ [{'}',_}|Rest4] ->
+ {#type{def=#'SET'{components=ComponentTypeLists}},Rest4};
+ _ ->
+ parse_error(Rest3)
+ end
end;
parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{ComponentTypeLists,Rest2} = parse_ComponentTypeLists(Rest),
- case Rest2 of
+ case Rest2 of
[{'}',_}|Rest3] ->
ComponentTypeLists2 =
case {[Ext||Ext = #'EXTENSIONMARK'{} <- ComponentTypeLists],
@@ -597,184 +552,128 @@ parse_BuiltinType([{'SET',_},{'{',_}|Rest]) ->
{#type{def=#'SET'{components = ComponentTypeLists2}},
Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
-
-parse_BuiltinType([{'SET',_},{'OF',_},Id={identifier,_,_},Lt={'<',_}|Rest]) ->
-%% TODO: take care of the identifier for something useful
- {Type,Rest2} = parse_SelectionType([Id,Lt|Rest]),
- {#type{def={'SET OF',#type{def=Type,tag=[]}}},Rest2};
-
-
-parse_BuiltinType([{'SET',_},{'OF',_},{identifier,_,_}|Rest]) ->
+parse_BuiltinType([{'SET',_},{'OF',_}|
+ [#identifier{},{'<',_}|_]=Tokens0]) ->
+ {Type,Tokens} = parse_SelectionType(Tokens0),
+ {#type{def={'SET OF',Type}},Tokens};
+parse_BuiltinType([{'SET',_},{'OF',_},#identifier{}|Rest]) ->
%%TODO: take care of the identifier for something useful
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
parse_BuiltinType([{'SET',_},{'OF',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#type{def={'SET OF',Type}},Rest2};
-
-%% The so called Useful types
parse_BuiltinType([{'GeneralizedTime',_}|Rest]) ->
{#type{def='GeneralizedTime'},Rest};
parse_BuiltinType([{'UTCTime',_}|Rest]) ->
{#type{def='UTCTime'},Rest};
parse_BuiltinType([{'ObjectDescriptor',_}|Rest]) ->
{#type{def='ObjectDescriptor'},Rest};
-
-%% For compatibility with old standard
-parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},{identifier,_,Id}|Rest]) ->
+parse_BuiltinType([{'ANY',_},{'DEFINED',_},{'BY',_},#identifier{val=Id}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def={'ANY_DEFINED_BY',Id}},Rest};
parse_BuiltinType([{'ANY',_}|Rest]) ->
+ %% For compatibility with the old standard.
{#type{def='ANY'},Rest};
-
parse_BuiltinType(Tokens) ->
parse_ObjectClassFieldType(Tokens).
-% throw({asn1_error,unhandled_type}).
-parse_TypeWithConstraint([{'SEQUENCE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SEQUENCE',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,{'SizeConstraint',C},
+ none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SEQUENCE OF',Type}, constraint = merge_constraints([Constraint2])},Rest5};
-parse_TypeWithConstraint([{'SET',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint])},Rest5};
-parse_TypeWithConstraint([{'SET',_},{'SIZE',_},Lpar = {'(',_}|Rest]) ->
- {Constraint,Rest2} = parse_Constraint([Lpar|Rest]),
+parse_TypeWithConstraint([{'SET',_},{'SIZE',_}|[{'(',_}|_]=Rest0]) ->
+ {Constraint,Rest2} = parse_Constraint(Rest0),
#constraint{c=C} = Constraint,
- Constraint2 = Constraint#constraint{c={'SizeConstraint',C}},
+ Constraint2 = Constraint#constraint{c={element_set,
+ {'SizeConstraint',C},none}},
Rest4 = case Rest2 of
- [{'OF',_}, {identifier,_,_Id}|Rest3] ->
+ [{'OF',_},#identifier{}|Rest3] ->
%%% TODO: make some use of the identifier, maybe useful in the XML mapping
Rest3;
[{'OF',_}|Rest3] ->
Rest3;
_ ->
- throw({asn1_error,
- {get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'OF']}})
+ parse_error(Rest2)
end,
{Type,Rest5} = parse_Type(Rest4),
{#type{def = {'SET OF',Type},
constraint = merge_constraints([Constraint2])},Rest5};
parse_TypeWithConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['SEQUENCE','SEQUENCE SIZE','SET','SET SIZE'],
- followed,by,a,constraint]}}).
+ parse_error(Tokens).
%% --------------------------
parse_ReferencedType(Tokens) ->
- Flist = [fun parse_DefinedType/1,
+ Flist = [fun parse_ParameterizedType/1,
+ fun parse_DefinedType/1,
fun parse_SelectionType/1,
- fun parse_TypeFromObject/1,
- fun parse_ValueSetFromObjects/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ fun parse_TypeFromObject/1],
+ parse_or(Tokens, Flist).
-parse_DefinedType(Tokens=[{typereference,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType(Tokens=[{typereference,L1,TypeName},
- T2={typereference,_,_},T3={'{',_}|Rest]) ->
- case (catch parse_ParameterizedType(Tokens)) of
- {'EXIT',_Reason} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- {asn1_error,_} ->
- Rest2 = [T2,T3|Rest],
- {#type{def = #'Externaltypereference'{pos=L1,
- module=resolve_module(TypeName),
- type=TypeName}},Rest2};
- Result ->
- Result
- end;
-parse_DefinedType(Tokens=[{typereference,_L1,_Module},{'.',_},
- {typereference,_,_TypeName},{'{',_}|_Rest]) ->
- parse_ParameterizedType(Tokens);
-parse_DefinedType([{typereference,L1,Module},{'.',_},{typereference,_,TypeName}|Rest]) ->
- {#type{def = #'Externaltypereference'{pos=L1,module=Module,type=TypeName}},Rest};
-parse_DefinedType([{typereference,L1,TypeName}|Rest]) ->
- case is_pre_defined_class(TypeName) of
- false ->
- {#type{def = #'Externaltypereference'{pos=L1,module=resolve_module(TypeName),
- type=TypeName}},Rest};
- _ ->
- throw({asn1_error,
- {L1,get(asn1_module),
- [got,TypeName,expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}})
- end;
+parse_DefinedType([{typereference,L1,Module},
+ {'.',_},
+ {typereference,_,TypeName}|Tokens]) ->
+ {#type{def = #'Externaltypereference'{pos=L1,module=Module,
+ type=TypeName}},Tokens};
+parse_DefinedType([{typereference,_,_}=Tr|Tokens]) ->
+ {#type{def=tref2Exttref(Tr)},Tokens};
parse_DefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference',
- 'typereference typereference']]}}).
+ parse_error(Tokens).
-parse_SelectionType([{identifier,_,Name},{'<',_}|Rest]) ->
+parse_SelectionType([#identifier{val=Name},{'<',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
- {{'SelectionType',Name,Type},Rest2};
+ {#type{def={'SelectionType',Name,Type}},Rest2};
parse_SelectionType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'identifier <']}}).
+ parse_error(Tokens).
resolve_module(Type) ->
@@ -787,30 +686,13 @@ resolve_module(_Type, Current, undefined) ->
resolve_module(Type, Current, Imports) ->
case [Mod || #'SymbolsFromModule'{symbols = S, module = Mod} <- Imports,
#'Externaltypereference'{type = T} <- S,
- Type == T] of
+ Type =:= T] of
[#'Externaltypereference'{type = Mod}|_] -> Mod;
%% This allows the same symbol to be imported several times
%% which ought to be checked elsewhere and flagged as an error
[] -> Current
end.
-%% --------------------------
-
-
-%% This should probably be removed very soon
-% parse_ConstrainedType(Tokens) ->
-% case (catch parse_TypeWithConstraint(Tokens)) of
-% {'EXIT',Reason} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% {asn1_error,Reason2} ->
-% {Type,Rest} = parse_Type(Tokens),
-% {Constraint,Rest2} = parse_Constraint(Rest),
-% {Type#type{constraint=Constraint},Rest2};
-% Result ->
-% Result
-% end.
parse_Constraints(Tokens) ->
parse_Constraints(Tokens,[]).
@@ -819,9 +701,9 @@ parse_Constraints(Tokens,Acc) ->
{Constraint,Rest} = parse_Constraint(Tokens),
case Rest of
[{'(',_}|_Rest2] ->
- parse_Constraints(Rest,[Constraint|Acc]);
+ parse_Constraints(Rest, [Constraint|Acc]);
_ ->
- {lists:reverse([Constraint|Acc]),Rest}
+ {lists:reverse(Acc, [Constraint]),Rest}
end.
parse_Constraint([{'(',_}|Rest]) ->
@@ -830,46 +712,27 @@ parse_Constraint([{'(',_}|Rest]) ->
case Rest3 of
[{')',_}|Rest4] ->
{#constraint{c=Constraint,e=Exception},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
- end;
-parse_Constraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'(']}}).
+ [_|_] ->
+ parse_error(Rest3)
+ end.
parse_ConstraintSpec(Tokens) ->
Flist = [fun parse_GeneralConstraint/1,
fun parse_SubtypeConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ExceptionSpec([LPar={')',_}|Rest]) ->
{undefined,[LPar|Rest]};
parse_ExceptionSpec([{'!',_}|Rest]) ->
parse_ExceptionIdentification(Rest);
parse_ExceptionSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,[')','!']]}}).
+ parse_error(Tokens).
parse_ExceptionIdentification(Tokens) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1,
fun parse_TypeColonValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_TypeColonValue(Tokens) ->
{Type,Rest} = parse_Type(Tokens),
@@ -877,32 +740,28 @@ parse_TypeColonValue(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ [_|_] ->
+ parse_error(Rest)
end.
parse_SubtypeConstraint(Tokens) ->
parse_ElementSetSpecs(Tokens).
-parse_ElementSetSpecs([{'...',_}|Rest]) ->
- {Elements,Rest2} = parse_ElementSetSpec(Rest),
- {{[],Elements},Rest2};
parse_ElementSetSpecs(Tokens) ->
{RootElems,Rest} = parse_ElementSetSpec(Tokens),
case Rest of
[{',',_},{'...',_},{',',_}|Rest2] ->
{AdditionalElems,Rest3} = parse_ElementSetSpec(Rest2),
- {{RootElems,AdditionalElems},Rest3};
+ {{element_set,RootElems,AdditionalElems},Rest3};
[{',',_},{'...',_}|Rest2] ->
- {{RootElems,[]},Rest2};
+ {{element_set,RootElems,empty},Rest2};
_ ->
- {RootElems,Rest}
+ {{element_set,RootElems,none},Rest}
end.
parse_ElementSetSpec([{'ALL',_},{'EXCEPT',_}|Rest]) ->
{Exclusions,Rest2} = parse_Elements(Rest),
- {{'ALL',{'EXCEPT',Exclusions}},Rest2};
+ {{'ALL-EXCEPT',Exclusions},Rest2};
parse_ElementSetSpec(Tokens) ->
parse_Unions(Tokens).
@@ -918,14 +777,8 @@ parse_Unions(Tokens) ->
case {InterSec,Unions} of
{InterSec,[]} ->
{InterSec,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest2};
{V1,V2} ->
- {[V1,union,V2],Rest2}
-% Other ->
-% throw(Other)
+ {{union,V1,V2},Rest2}
end.
parse_UnionsRec([{'|',_}|Rest]) ->
@@ -934,12 +787,8 @@ parse_UnionsRec([{'|',_}|Rest]) ->
case {InterSec,URec} of
{V1,[]} ->
{V1,Rest3};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',ordsets:union(to_set(V1),to_set(V2))},Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [union|V2],Rest3};
{V1,V2} ->
- {[V1,union,V2],Rest3}
+ {{union,V1,V2},Rest3}
end;
parse_UnionsRec([{'UNION',Info}|Rest]) ->
parse_UnionsRec([{'|',Info}|Rest]);
@@ -952,13 +801,8 @@ parse_Intersections(Tokens) ->
case {InterSec,IRec} of
{V1,[]} ->
{V1,Rest2};
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest2};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest2}
+ {{intersection,V1,V2},Rest2}
end.
%% parse_IElemsRec(Tokens) -> Result
@@ -967,15 +811,10 @@ parse_IElemsRec([{'^',_}|Rest]) ->
{InterSec,Rest2} = parse_IntersectionElements(Rest),
{IRec,Rest3} = parse_IElemsRec(Rest2),
case {InterSec,IRec} of
- {{'SingleValue',V1},{'SingleValue',V2}} ->
- {{'SingleValue',
- ordsets:intersection(to_set(V1),to_set(V2))},Rest3};
{V1,[]} ->
- {V1,Rest3};
- {V1,V2} when is_list(V2) ->
- {[V1] ++ [intersection|V2],Rest3};
+ {V1,Rest2};
{V1,V2} ->
- {[V1,intersection,V2],Rest3}
+ {{intersection,V1,V2},Rest3}
end;
parse_IElemsRec([{'INTERSECTION',Info}|Rest]) ->
parse_IElemsRec([{'^',Info}|Rest]);
@@ -992,7 +831,7 @@ parse_IntersectionElements(Tokens) ->
case Rest of
[{'EXCEPT',_}|Rest2] ->
{Exclusion,Rest3} = parse_Elements(Rest2),
- {{InterSec,{'EXCEPT',Exclusion}},Rest3};
+ {{'EXCEPT',InterSec,Exclusion},Rest3};
Rest ->
{InterSec,Rest}
end.
@@ -1006,102 +845,73 @@ parse_Elements([{'(',_}|Rest]) ->
case Rest2 of
[{')',_}|Rest3] ->
{Elems,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,')']}})
+ [_|_] ->
+ parse_error(Rest2)
end;
parse_Elements(Tokens) ->
Flist = [fun parse_ObjectSetElements/1,
fun parse_SubtypeElements/1,
-% fun parse_Value/1,
-% fun parse_Type/1,
fun parse_Object/1,
fun parse_DefinedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- Err = {asn1_error,_} ->
- throw(Err);
- Result = {Val,_} when is_record(Val,type) ->
- Result;
-
- Result ->
- Result
- end.
-
-
+ parse_or(Tokens, Flist).
%% --------------------------
-parse_DefinedObjectClass([{typereference,_,_ModName},{'.',_},Tr={typereference,_,_ObjClName}|Rest]) ->
-%% {{objectclassname,ModName,ObjClName},Rest};
-% {{objectclassname,tref2Exttref(Tr)},Rest};
- {tref2Exttref(Tr),Rest};
+parse_DefinedObjectClass([{typereference,_,ModName},{'.',_},
+ {typereference,Pos,Name}|Tokens]) ->
+ Ext = #'Externaltypereference'{pos=Pos,
+ module=ModName,
+ type=Name},
+ {Ext,Tokens};
parse_DefinedObjectClass([Tr={typereference,_,_ObjClName}|Rest]) ->
-% {{objectclassname,tref2Exttref(Tr)},Rest};
{tref2Exttref(Tr),Rest};
parse_DefinedObjectClass(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference . typereference',
- typereference,
- 'TYPE-IDENTIFIER',
- 'ABSTRACT-SYNTAX']]}}).
-
-parse_ObjectClassAssignment([{typereference,L1,ObjClName},{'::=',_}|Rest]) ->
- {Type,Rest2} = parse_ObjectClass(Rest),
- {#classdef{pos=L1,name=ObjClName,module=resolve_module(Type),
- typespec=Type},Rest2};
-parse_ObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'typereference ::=']}}).
+ parse_error(Tokens).
parse_ObjectClass(Tokens) ->
- Flist = [fun parse_DefinedObjectClass/1,
- fun parse_ObjectClassDefn/1,
- fun parse_ParameterizedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason2} ->
- throw({asn1_error,Reason2});
- Result ->
- Result
- end.
+ Flist = [fun parse_ObjectClassDefn/1,
+ fun parse_DefinedObjectClass/1],
+ parse_or(Tokens, Flist).
parse_ObjectClassDefn([{'CLASS',_},{'{',_}|Rest]) ->
{Type,Rest2} = parse_FieldSpec(Rest),
{WithSyntaxSpec,Rest3} = parse_WithSyntaxSpec(Rest2),
{#objectclass{fields=Type,syntax=WithSyntaxSpec},Rest3};
parse_ObjectClassDefn(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'CLASS {']}}).
+ parse_error(Tokens).
parse_FieldSpec(Tokens) ->
parse_FieldSpec(Tokens,[]).
-parse_FieldSpec(Tokens,Acc) ->
- Flist = [fun parse_FixedTypeValueFieldSpec/1,
- fun parse_VariableTypeValueFieldSpec/1,
- fun parse_ObjectFieldSpec/1,
- fun parse_FixedTypeValueSetFieldSpec/1,
- fun parse_VariableTypeValueSetFieldSpec/1,
- fun parse_TypeFieldSpec/1,
- fun parse_ObjectSetFieldSpec/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+parse_FieldSpec(Tokens0, Acc) ->
+ Fl = case Tokens0 of
+ [{valuefieldreference,_,_}|_] ->
+ %% 1) &field Type
+ %% &object CLASS-NAME
+ %% 2) &field &FieldName
+ %% A fixed type field cannot be distinguished from
+ %% an object field without type information.
+ [fun parse_FixedTypeValueFieldSpec/1,
+ fun parse_VariableTypeValueFieldSpec/1];
+ [{typefieldreference,_,_}|_] ->
+ %% 1) &Set Type
+ %% &ObjectSet CLASS-NAME
+ %% 2) &Set &FieldName
+ %% 3) &Type
+ %% A value set and an object cannot be distinguished
+ %% without type information.
+ [fun parse_FixedTypeValueSetFieldSpec/1,
+ fun parse_VariableTypeValueSetFieldSpec/1,
+ fun parse_TypeFieldSpec/1];
+ [_|_] ->
+ parse_error(Tokens0)
+ end,
+ case parse_or(Tokens0, Fl) of
{Type,[{'}',_}|Rest]} ->
- {lists:reverse([Type|Acc]),Rest};
+ {lists:reverse(Acc, [Type]),Rest};
{Type,[{',',_}|Rest2]} ->
- parse_FieldSpec(Rest2,[Type|Acc]);
- {_,[H|_T]} ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ parse_FieldSpec(Rest2, [Type|Acc])
end.
parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
@@ -1109,27 +919,19 @@ parse_PrimitiveFieldName([{typefieldreference,_,FieldName}|Rest]) ->
parse_PrimitiveFieldName([{valuefieldreference,_,FieldName}|Rest]) ->
{{valuefieldreference,FieldName},Rest};
parse_PrimitiveFieldName(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typefieldreference,valuefieldreference]]}}).
+ parse_error(Tokens).
parse_FieldName(Tokens) ->
{Field,Rest} = parse_PrimitiveFieldName(Tokens),
parse_FieldName(Rest,[Field]).
-parse_FieldName([{'.',_}|Rest],Acc) ->
- case (catch parse_PrimitiveFieldName(Rest)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {FieldName,Rest2} ->
- parse_FieldName(Rest2,[FieldName|Acc])
- end;
-parse_FieldName(Tokens,Acc) ->
+parse_FieldName([{'.',_}|Rest0],Acc) ->
+ {FieldName,Rest1} = parse_PrimitiveFieldName(Rest0),
+ parse_FieldName(Rest1, [FieldName|Acc]);
+parse_FieldName(Tokens, Acc) ->
{lists:reverse(Acc),Tokens}.
-parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
+parse_FixedTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{Unique,Rest3} =
case Rest2 of
@@ -1139,109 +941,61 @@ parse_FixedTypeValueFieldSpec([{valuefieldreference,L1,VFieldName}|Rest]) ->
{undefined,Rest2}
end,
{OptionalitySpec,Rest5} = parse_ValueOptionalitySpec(Rest3),
- case {Unique,Rest5} of
- {'UNIQUE',[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- case OptionalitySpec of
- {'DEFAULT',_} ->
- throw({asn1_error,
- {L1,get(asn1_module),
- ['UNIQUE and DEFAULT in same field',VFieldName]}});
- _ ->
- {{fixedtypevaluefield,VFieldName,Type,Unique,OptionalitySpec},Rest5}
- end;
- {_,[{Del,_}|_]} when Del =:= ','; Del =:= '}' ->
- {{object_or_fixedtypevalue_field,VFieldName,Type,Unique,OptionalitySpec},Rest5};
- _ ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,get_token(hd(Rest5)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+ case is_end_delimiter(Rest5) of
+ false -> parse_error(Rest5);
+ true -> ok
+ end,
+ Tag = case Unique of
+ 'UNIQUE' -> fixedtypevaluefield;
+ _ -> object_or_fixedtypevalue_field
+ end,
+ {{Tag,VFieldName,Type,Unique,OptionalitySpec},Rest5}.
+
+parse_VariableTypeValueFieldSpec([{valuefieldreference,_,VFieldName}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_VariableTypeValueFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluefield,VFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_TypeFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {OptionalitySpec,Rest} = parse_TypeOptionalitySpec(Rest0),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{typefield,Name,OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_ObjectFieldSpec([{valuefieldreference,L,VFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectfield,VFieldName,Class,undefined,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,valuefieldreference]}}).
+parse_FixedTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {Type,Rest1} = parse_Type(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{objectset_or_fixedtypevalueset_field,Name,Type,
+ OptionalitySpec},Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_TypeFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {OptionalitySpec,Rest2} = parse_TypeOptionalitySpec(Rest),
- case Rest2 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{typefield,TFieldName,OptionalitySpec},Rest2};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest2)),expected,[',','}']]}})
- end;
-parse_TypeFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+parse_VariableTypeValueSetFieldSpec([{typefieldreference,_,Name}|Rest0]) ->
+ {FieldRef,Rest1} = parse_FieldName(Rest0),
+ {OptionalitySpec,Rest} = parse_ValueSetOptionalitySpec(Rest1),
+ case is_end_delimiter(Rest) of
+ true ->
+ {{variabletypevaluesetfield,Name,FieldRef,OptionalitySpec},
+ Rest};
+ false ->
+ parse_error(Rest)
+ end.
-parse_FixedTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Type,Rest2} = parse_Type(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectset_or_fixedtypevalueset_field,TFieldName,Type,
- OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_FixedTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_VariableTypeValueSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {FieldRef,Rest2} = parse_FieldName(Rest),
- {OptionalitySpec,Rest3} = parse_ValueSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{variabletypevaluesetfield,TFieldName,FieldRef,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_VariableTypeValueSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
-
-parse_ObjectSetFieldSpec([{typefieldreference,L,TFieldName}|Rest]) ->
- {Class,Rest2} = parse_DefinedObjectClass(Rest),
- {OptionalitySpec,Rest3} = parse_ObjectSetOptionalitySpec(Rest2),
- case Rest3 of
- [{Del,_}|_] when Del =:= ','; Del =:= '}' ->
- {{objectsetfield,TFieldName,Class,OptionalitySpec},Rest3};
- _ ->
- throw({asn1_error,{L,get(asn1_module),
- [got,get_token(hd(Rest3)),expected,[',','}']]}})
- end;
-parse_ObjectSetFieldSpec(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,typefieldreference]}}).
+is_end_delimiter([{',',_}|_]) -> true;
+is_end_delimiter([{'}',_}|_]) -> true;
+is_end_delimiter([_|_]) -> false.
parse_ValueOptionalitySpec(Tokens)->
case Tokens of
@@ -1252,15 +1006,6 @@ parse_ValueOptionalitySpec(Tokens)->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {Object,Rest2} = parse_Object(Rest),
- {{'DEFAULT',Object},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_TypeOptionalitySpec(Tokens) ->
case Tokens of
[{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
@@ -1279,65 +1024,44 @@ parse_ValueSetOptionalitySpec(Tokens) ->
_ -> {'MANDATORY',Tokens}
end.
-parse_ObjectSetOptionalitySpec(Tokens) ->
- case Tokens of
- [{'OPTIONAL',_}|Rest] -> {'OPTIONAL',Rest};
- [{'DEFAULT',_}|Rest] ->
- {ObjectSet,Rest2} = parse_ObjectSet(Rest),
- {{'DEFAULT',ObjectSet},Rest2};
- _ -> {'MANDATORY',Tokens}
- end.
-
parse_WithSyntaxSpec([{'WITH',_},{'SYNTAX',_}|Rest]) ->
{SyntaxList,Rest2} = parse_SyntaxList(Rest),
{{'WITH SYNTAX',SyntaxList},Rest2};
parse_WithSyntaxSpec(Tokens) ->
{[],Tokens}.
-parse_SyntaxList([{'{',_},{'}',_}|Rest]) ->
- {[],Rest};
parse_SyntaxList([{'{',_}|Rest]) ->
parse_SyntaxList(Rest,[]);
parse_SyntaxList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_SyntaxList(Tokens,Acc) ->
+parse_SyntaxList(Tokens, Acc) ->
{SyntaxList,Rest} = parse_TokenOrGroupSpec(Tokens),
case Rest of
[{'}',_}|Rest2] ->
- {lists:reverse([SyntaxList|Acc]),Rest2};
+ {lists:reverse(Acc, [SyntaxList]),Rest2};
_ ->
- parse_SyntaxList(Rest,[SyntaxList|Acc])
+ parse_SyntaxList(Rest, [SyntaxList|Acc])
end.
parse_TokenOrGroupSpec(Tokens) ->
Flist = [fun parse_RequiredToken/1,
fun parse_OptionalGroup/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_RequiredToken([{typereference,L1,WordName}|Rest]) ->
+parse_RequiredToken([{typereference,_,WordName}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
parse_RequiredToken([{',',L1}|Rest]) ->
{{',',L1},Rest};
-parse_RequiredToken([{WordName,L1}|Rest]) ->
+parse_RequiredToken([{WordName,_}|Rest]=Tokens) ->
case is_word(WordName) of
false ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,WordName,expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{WordName,Rest}
end;
@@ -1347,7 +1071,9 @@ parse_RequiredToken(Tokens) ->
parse_OptionalGroup([{'[',_}|Rest]) ->
{Spec,Rest2} = parse_TokenOrGroupSpec(Rest),
{SpecList,Rest3} = parse_OptionalGroup(Rest2,[Spec]),
- {SpecList,Rest3}.
+ {SpecList,Rest3};
+parse_OptionalGroup(Tokens) ->
+ parse_error(Tokens).
parse_OptionalGroup([{']',_}|Rest],Acc) ->
{lists:reverse(Acc),Rest};
@@ -1355,82 +1081,55 @@ parse_OptionalGroup(Tokens,Acc) ->
{Spec,Rest} = parse_TokenOrGroupSpec(Tokens),
parse_OptionalGroup(Rest,[Spec|Acc]).
-parse_DefinedObject([Id={identifier,_,_ObjName}|Rest]) ->
+parse_DefinedObject([#identifier{}=Id|Rest]) ->
{{object,identifier2Extvalueref(Id)},Rest};
-parse_DefinedObject([{typereference,L1,ModName},{'.',_},{identifier,_,ObjName}|Rest]) ->
+parse_DefinedObject([{typereference,L1,ModName},{'.',_},#identifier{val=ObjName}|Rest]) ->
{{object, #'Externaltypereference'{pos=L1,module=ModName,type=ObjName}},Rest};
parse_DefinedObject(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'typereference.identifier']]}}).
+ parse_error(Tokens).
-parse_ObjectAssignment([{identifier,L1,ObjName}|Rest]) ->
+parse_ObjectAssignment([#identifier{pos=L1,val=ObjName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Object,Rest4} = parse_Object(Rest3),
{#typedef{pos=L1,name=ObjName,
typespec=#'Object'{classname=Class,def=Object}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}});
- Other ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,Other,expected,'::=']}})
- end;
-parse_ObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_Object(Tokens) -> Ret
%% Tokens = [Tok]
%% Tok = tuple()
%% Ret = {object,_} | {object, _, _}
parse_Object(Tokens) ->
- Flist=[fun parse_ObjectDefn/1,
- fun parse_ObjectFromObject/1,
- fun parse_ParameterizedObject/1,
- fun parse_DefinedObject/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ %% The ObjectFromObject production is not included here,
+ %% since it will have been catched by the ValueFromObject
+ %% before we reach this point.
+ Flist = [fun parse_ObjectDefn/1,
+ fun parse_DefinedObject/1],
+ parse_or(Tokens, Flist).
parse_ObjectDefn(Tokens) ->
Flist=[fun parse_DefaultSyntax/1,
fun parse_DefinedSyntax/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
-parse_DefaultSyntax([{'{',_},{'}',_}|Rest]) ->
- {{object,defaultsyntax,[]},Rest};
parse_DefaultSyntax([{'{',_}|Rest]) ->
parse_DefaultSyntax(Rest,[]);
parse_DefaultSyntax(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['{}','{']]}}).
+ parse_error(Tokens).
-parse_DefaultSyntax(Tokens,Acc) ->
+parse_DefaultSyntax(Tokens, Acc) ->
{Setting,Rest} = parse_FieldSetting(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_DefaultSyntax(Rest2,[Setting|Acc]);
[{'}',_}|Rest3] ->
- {{object,defaultsyntax,lists:reverse([Setting|Acc])},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {{object,defaultsyntax,lists:reverse(Acc, [Setting])},Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_FieldSetting(Tokens) ->
@@ -1439,7 +1138,9 @@ parse_FieldSetting(Tokens) ->
{{PrimFieldName,Setting},Rest2}.
parse_DefinedSyntax([{'{',_}|Rest]) ->
- parse_DefinedSyntax(Rest,[]).
+ parse_DefinedSyntax(Rest, []);
+parse_DefinedSyntax(Tokens) ->
+ parse_error(Tokens).
parse_DefinedSyntax(Tokens,Acc) ->
case Tokens of
@@ -1455,95 +1156,70 @@ parse_DefinedSyntax(Tokens,Acc) ->
%% Literal ::= word | ','
%% Setting ::= Type | Value | ValueSet | Object | ObjectSet
%% word equals typereference, but no lower cases
-parse_DefinedSyntaxToken([{',',L1}|Rest]) ->
- {{',',L1},Rest};
+parse_DefinedSyntaxToken([{',',_}=Comma|Rest]) ->
+ {Comma,Rest};
%% ObjectClassFieldType or a defined type with a constraint.
%% Should also be able to parse a parameterized type. It may be
%% impossible to distinguish between a parameterized type and a Literal
%% followed by an object set.
-parse_DefinedSyntaxToken(Tokens=[{typereference,L1,_Name},{T,_}|_Rest])
- when T == '.'; T == '(' ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- throw({asn1_error,{L1,get(asn1_module),
- [got,hd(Tokens), expected,['Word',setting]]}});
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
- Result
- end;
-parse_DefinedSyntaxToken(Tokens=[TRef={typereference,L1,Name}|Rest]) ->
+parse_DefinedSyntaxToken([{typereference,_,_Name},{T,_}|_]=Tokens)
+ when T =:= '.'; T =:= '(' ->
+ parse_Setting(Tokens);
+parse_DefinedSyntaxToken([{typereference,L1,Name}=TRef|Rest]=Tokens) ->
case is_word(Name) of
false ->
case lookahead_definedsyntax(Rest) of
word_or_setting ->
{{setting,L1,tref2Exttref(TRef)},Rest};
- _ ->
+ setting ->
parse_Setting(Tokens)
end;
true ->
- %% {{word_or_setting,L1,Name},Rest}
{{word_or_setting,L1,tref2Exttref(TRef)},Rest}
end;
parse_DefinedSyntaxToken(Tokens) ->
- case catch parse_Setting(Tokens) of
- {asn1_error,_} ->
- parse_Word(Tokens);
- {'EXIT',Reason} ->
- exit(Reason);
- Result ->
+ try parse_Setting(Tokens) of
+ {_,_}=Result ->
Result
+ catch
+ throw:{asn1_error,_} ->
+ parse_Word(Tokens)
end.
lookahead_definedsyntax([{typereference,_,Name}|_Rest]) ->
- case is_word(Name) of
+ case is_word(Name) of
true -> word_or_setting;
- _ -> setting
+ false -> setting
end;
lookahead_definedsyntax([{'}',_}|_Rest]) ->
word_or_setting;
lookahead_definedsyntax(_) ->
setting.
-parse_Word([{Name,Pos}|Rest]) ->
+parse_Word([{Name,Pos}|Rest]=Tokens) ->
case is_word(Name) of
false ->
- throw({asn1_error,{Pos,get(asn1_module),
- [got,Name, expected,a,'Word']}});
+ parse_error(Tokens);
true ->
{{word_or_setting,Pos,tref2Exttref(Pos,Name)},Rest}
- end.
+ end;
+parse_Word(Tokens) ->
+ parse_error(Tokens).
parse_Setting(Tokens) ->
Flist = [{type_tag,fun parse_Type/1},
{value_tag,fun parse_Value/1},
{object_tag,fun parse_Object/1},
{objectset_tag,fun parse_ObjectSet/1}],
- case (catch parse_or_tag(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result = {{value_tag,_},_} ->
+ case parse_or_tag(Tokens, Flist) of
+ {{value_tag,_},_}=Result ->
+ %% Keep the value_tag.
Result;
{{Tag,Setting},Rest} when is_atom(Tag) ->
+ %% Remove all other tags.
{Setting,Rest}
end.
-%% parse_Setting(Tokens) ->
-%% Flist = [fun parse_Type/1,
-%% fun parse_Value/1,
-%% fun parse_Object/1,
-%% fun parse_ObjectSet/1],
-%% case (catch parse_or(Tokens,Flist)) of
-%% {'EXIT',Reason} ->
-%% exit(Reason);
-%% AsnErr = {asn1_error,_} ->
-%% throw(AsnErr);
-%% Result ->
-%% Result
-%% end.
-
parse_DefinedObjectSet([{typereference,L1,ModuleName},{'.',_},
{typereference,L2,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L2,module=ModuleName,
@@ -1552,9 +1228,7 @@ parse_DefinedObjectSet([{typereference,L1,ObjSetName}|Rest]) ->
{{objectset,L1,#'Externaltypereference'{pos=L1,module=resolve_module(ObjSetName),
type=ObjSetName}},Rest};
parse_DefinedObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{Class,Rest2} = parse_DefinedObjectClass(Rest),
@@ -1564,16 +1238,9 @@ parse_ObjectSetAssignment([{typereference,L1,ObjSetName}|Rest]) ->
{#typedef{pos=L1,name=ObjSetName,
typespec=#'ObjectSet'{class=Class,
set=ObjectSet}},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ObjectSet(Tokens) -> {Ret,Rest}
%% Tokens = [Tok]
@@ -1590,26 +1257,20 @@ parse_ObjectSet([{'{',_}|Rest]) ->
case Rest2 of
[{'}',_}|Rest3] ->
{ObjSetSpec,Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ObjectSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ObjectSetSpec([{'...',_}|Rest]) ->
- case Rest of
- [{',',_}|Rest2] ->
- {Elements,Rest3}=parse_ElementSetSpecs(Rest2),
- {{[],Elements},Rest3};
- _ ->
- {['EXTENSIONMARK'],Rest}
- end;
+parse_ObjectSetSpec([{'...',_},{',',_}|Tokens0]) ->
+ {Elements,Tokens} = parse_ElementSetSpec(Tokens0),
+ {{element_set,empty,Elements},Tokens};
+parse_ObjectSetSpec([{'...',_}|Tokens]) ->
+ {{element_set,empty,empty},Tokens};
parse_ObjectSetSpec(Tokens) ->
parse_ElementSetSpecs(Tokens).
-% moved fun parse_Object/1 and fun parse_DefinedObjectSet/1 to parse_Elements
%% parse_ObjectSetElements(Tokens) -> {Result,Rest}
%% Result ::= {'ObjectSetFromObjects',Objects,Name} | {pos,ObjectSet,Params}
%% Objects ::= ReferencedObjects
@@ -1619,18 +1280,9 @@ parse_ObjectSetSpec(Tokens) ->
%% ObjectSet ::= {objectset,integer(),#'Externaltypereference'{}}
%% Params ::= list() (see parse_ActualParameterList/1)
parse_ObjectSetElements(Tokens) ->
- Flist = [%fun parse_Object/1,
- %fun parse_DefinedObjectSet/1,
- fun parse_ObjectSetFromObjects/1,
+ Flist = [fun parse_ObjectSetFromObjects/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ObjectClassFieldType(Tokens) ->
{Class,Rest} = parse_DefinedObjectClass(Tokens),
@@ -1641,25 +1293,10 @@ parse_ObjectClassFieldType(Tokens) ->
classname=Class,
class=Class,fieldname=FieldName},
{#type{def=OCFT},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest)
end.
-%parse_ObjectClassFieldValue(Tokens) ->
-% Flist = [fun parse_OpenTypeFieldVal/1,
-% fun parse_FixedTypeFieldVal/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
parse_ObjectClassFieldValue(Tokens) ->
parse_OpenTypeFieldVal(Tokens).
@@ -1669,28 +1306,10 @@ parse_OpenTypeFieldVal(Tokens) ->
[{':',_}|Rest2] ->
{Value,Rest3} = parse_Value(Rest2),
{{opentypefieldvalue,Type,Value},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_FixedTypeFieldVal(Tokens) ->
-% parse_Value(Tokens).
-
-% parse_InformationFromObjects(Tokens) ->
-% Flist = [fun parse_ValueFromObject/1,
-% fun parse_ValueSetFromObjects/1,
-% fun parse_TypeFromObject/1,
-% fun parse_ObjectFromObject/1],
-% case (catch parse_or(Tokens,Flist)) of
-% {'EXIT',Reason} ->
-% throw(Reason);
-% AsnErr = {asn1_error,_} ->
-% throw(AsnErr);
-% Result ->
-% Result
-% end.
-
%% parse_ReferencedObjects(Tokens) -> {Result,Rest}
%% Result ::= DefObject | DefObjSet |
%% {po,DefObject,Params} | {pos,DefObjSet,Params} |
@@ -1702,18 +1321,11 @@ parse_OpenTypeFieldVal(Tokens) ->
parse_ReferencedObjects(Tokens) ->
Flist = [fun parse_DefinedObject/1,
fun parse_DefinedObjectSet/1,
- fun parse_ParameterizedObject/1,
fun parse_ParameterizedObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ValueFromObject(Tokens) ->
+ %% This production also matches ObjectFromObject.
{Objects,Rest} = parse_ReferencedObjects(Tokens),
case Rest of
[{'.',_}|Rest2] ->
@@ -1722,35 +1334,10 @@ parse_ValueFromObject(Tokens) ->
{valuefieldreference,_} ->
{{'ValueFromObject',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,typefieldreference,expected,
- valuefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ValueSetFromObjects(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- case lists:last(Name) of
- {typefieldreference,_FieldName} ->
- {{'ValueSetFromObjects',Objects,Name},Rest3};
- _ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
- end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
+ _ ->
+ parse_error(Rest)
end.
parse_TypeFromObject(Tokens) ->
@@ -1760,28 +1347,12 @@ parse_TypeFromObject(Tokens) ->
{Name,Rest3} = parse_FieldName(Rest2),
case lists:last(Name) of
{typefieldreference,_FieldName} ->
- {{'TypeFromObject',Objects,Name},Rest3};
+ {#type{def={'TypeFromObject',Objects,Name}},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
-%%% Other ->
-%%% throw({asn1_error,{got,Other,expected,'.'}})
- end.
-
-parse_ObjectFromObject(Tokens) ->
- {Objects,Rest} = parse_ReferencedObjects(Tokens),
- case Rest of
- [{'.',_}|Rest2] ->
- {Name,Rest3} = parse_FieldName(Rest2),
- {{'ObjectFromObject',Objects,Name},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
%% parse_ObjectSetFromObjects(Tokens) -> {Result,Rest}
@@ -1799,23 +1370,12 @@ parse_ObjectSetFromObjects(Tokens) ->
{typefieldreference,_FieldName} ->
{{'ObjectSetFromObjects',Objects,Name},Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,
- typefieldreference]}})
+ parse_error(Rest2)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'.']}})
+ _ ->
+ parse_error(Rest)
end.
-% parse_InstanceOfType([{'INSTANCE',_},{'OF',_}|Rest]) ->
-% {Class,Rest2} = parse_DefinedObjectClass(Rest),
-% {{'InstanceOfType',Class},Rest2}.
-
-% parse_InstanceOfValue(Tokens) ->
-% parse_Value(Tokens).
-
-
%% X.682 constraint specification
@@ -1823,14 +1383,7 @@ parse_GeneralConstraint(Tokens) ->
Flist = [fun parse_UserDefinedConstraint/1,
fun parse_TableConstraint/1,
fun parse_ContentsConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_UserDefinedConstraint([{'CONSTRAINED',_},{'BY',_},{'{',_},{'}',_}|Rest])->
{{constrained_by,[]},Rest};
@@ -1841,32 +1394,23 @@ parse_UserDefinedConstraint([{'CONSTRAINED',_},
case Rest2 of
[{'}',_}|Rest3] ->
{{constrained_by,Param},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_UserDefinedConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['CONSTRAINED BY {}','CONSTRAINED BY {']]}}).
+ parse_error(Tokens).
parse_UserDefinedConstraintParameter(Tokens) ->
- parse_UserDefinedConstraintParameter(Tokens,[]).
-parse_UserDefinedConstraintParameter(Tokens,Acc) ->
+ parse_UserDefinedConstraintParameter(Tokens, []).
+
+parse_UserDefinedConstraintParameter(Tokens0, Acc) ->
Flist = [fun parse_GovernorAndActualParameter/1,
fun parse_ActualParameter/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Result,Rest} ->
- case Rest of
- [{',',_}|_Rest2] ->
- parse_UserDefinedConstraintParameter(Tokens,[Result|Acc]);
- _ ->
- {lists:reverse([Result|Acc]),Rest}
- end
+ case parse_or(Tokens0, Flist) of
+ {Result,[{',',_}|Tokens]} ->
+ parse_UserDefinedConstraintParameter(Tokens, [Result|Acc]);
+ {Result,Tokens} ->
+ {lists:reverse(Acc, [Result]),Tokens}
end.
parse_GovernorAndActualParameter(Tokens) ->
@@ -1875,26 +1419,18 @@ parse_GovernorAndActualParameter(Tokens) ->
[{':',_}|Rest2] ->
{Params,Rest3} = parse_ActualParameter(Rest2),
{{'Governor_Params',Governor,Params},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_TableConstraint(Tokens) ->
Flist = [fun parse_ComponentRelationConstraint/1,
fun parse_SimpleTableConstraint/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_SimpleTableConstraint(Tokens) ->
{ObjectSet,Rest} = parse_ObjectSet(Tokens),
- {{simpletable,ObjectSet},Rest}.
+ {{element_set,{simpletable,ObjectSet},none},Rest}.
parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{ObjectSet,Rest2} = parse_DefinedObjectSet(Rest),
@@ -1903,21 +1439,18 @@ parse_ComponentRelationConstraint([{'{',_}|Rest]) ->
{AtNot,Rest4} = parse_AtNotationList(Rest3,[]),
case Rest4 of
[{'}',_}|Rest5] ->
- {{componentrelation,ObjectSet,AtNot},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ Ret = {element_set,
+ {componentrelation,ObjectSet,AtNot},
+ none},
+ {Ret,Rest5};
+ _ ->
+ parse_error(Rest4)
end;
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,
- 'ComponentRelationConstraint',ended,with,'}']}})
-%%% Other ->
-%%% throw(Other)
+ _ ->
+ parse_error(Rest2)
end;
parse_ComponentRelationConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_AtNotationList(Tokens,Acc) ->
{AtNot,Rest} = parse_AtNotation(Tokens),
@@ -1925,7 +1458,7 @@ parse_AtNotationList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_AtNotationList(Rest2,[AtNot|Acc]);
_ ->
- {lists:reverse([AtNot|Acc]),Rest}
+ {lists:reverse(Acc, [AtNot]),Rest}
end.
parse_AtNotation([{'@',_},{'.',_}|Rest]) ->
@@ -1935,20 +1468,17 @@ parse_AtNotation([{'@',_}|Rest]) ->
{CIdList,Rest2} = parse_ComponentIdList(Rest),
{{outermost,CIdList},Rest2};
parse_AtNotation(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,['@','@.']]}}).
+ parse_error(Tokens).
parse_ComponentIdList(Tokens) ->
parse_ComponentIdList(Tokens,[]).
-parse_ComponentIdList([Id = {identifier,_,_},{'.',_}|Rest],Acc) ->
+parse_ComponentIdList([#identifier{}=Id,{'.',_}|Rest], Acc) ->
parse_ComponentIdList(Rest,[identifier2Extvalueref(Id)|Acc]);
-parse_ComponentIdList([Id = {identifier,_,_}|Rest],Acc) ->
- {lists:reverse([identifier2Extvalueref(Id)|Acc]),Rest};
+parse_ComponentIdList([#identifier{}=Id|Rest], Acc) ->
+ {lists:reverse(Acc, [identifier2Extvalueref(Id)]),Rest};
parse_ComponentIdList(Tokens,_) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [identifier,'identifier.']]}}).
+ parse_error(Tokens).
parse_ContentsConstraint([{'CONTAINING',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
@@ -1963,24 +1493,14 @@ parse_ContentsConstraint([{'ENCODED',_},{'BY',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{contentsconstraint,[],Value},Rest2};
parse_ContentsConstraint(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- 'CONTAINING','or','ENCODED BY']}}).
-
+ parse_error(Tokens).
% X.683 Parameterization of ASN.1 specifications
parse_Governor(Tokens) ->
Flist = [fun parse_Type/1,
fun parse_DefinedObjectClass/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ActualParameter(Tokens) ->
Flist = [fun parse_Type/1,
@@ -1989,32 +1509,7 @@ parse_ActualParameter(Tokens) ->
fun parse_DefinedObjectClass/1,
fun parse_Object/1,
fun parse_ObjectSet/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-parse_ParameterizedAssignment(Tokens) ->
- Flist = [fun parse_ParameterizedTypeAssignment/1,
- fun parse_ParameterizedValueAssignment/1,
- fun parse_ParameterizedValueSetTypeAssignment/1,
- fun parse_ParameterizedObjectClassAssignment/1,
- fun parse_ParameterizedObjectAssignment/1,
- fun parse_ParameterizedObjectSetAssignment/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- AsnAssErr = {asn1_assignment_error,_} ->
- throw(AsnAssErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
%% parse_ParameterizedTypeAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2025,18 +1520,13 @@ parse_ParameterizedTypeAssignment([{typereference,L1,Name}|Rest]) ->
{Type,Rest4} = parse_Type(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Type},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedValueAssignment(Tokens) -> Result
%% Result = {#pvaluedef{},Rest} | throw()
-parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedValueAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Type,Rest3} = parse_Type(Rest2),
case Rest3 of
@@ -2044,13 +1534,9 @@ parse_ParameterizedValueAssignment([{identifier,L1,Name}|Rest]) ->
{Value,Rest5} = parse_Value(Rest4),
{#pvaluedef{pos=L1,name=Name,args=ParameterList,type=Type,
value=Value},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedValueSetTypeAssignment(Tokens) -> Result
%% Result = {#pvaluesetdef{},Rest} | throw()
@@ -2062,14 +1548,9 @@ parse_ParameterizedValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest5} = parse_ValueSet(Rest4),
{#pvaluesetdef{pos=L1,name=Name,args=ParameterList,
type=Type,valueset=ValueSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterizedObjectClassAssignment(Tokens) -> Result
%% Result = {#ptypedef{},Rest} | throw()
@@ -2080,18 +1561,13 @@ parse_ParameterizedObjectClassAssignment([{typereference,L1,Name}|Rest]) ->
{Class,Rest4} = parse_ObjectClass(Rest3),
{#ptypedef{pos=L1,name=Name,args=ParameterList,typespec=Class},
Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ParameterizedObjectClassAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
%% parse_ParameterizedObjectAssignment(Tokens) -> Result
%% Result = {#pobjectdef{},Rest} | throw()
-parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
+parse_ParameterizedObjectAssignment([#identifier{pos=L1,val=Name}|Rest]) ->
{ParameterList,Rest2} = parse_ParameterList(Rest),
{Class,Rest3} = parse_DefinedObjectClass(Rest2),
case Rest3 of
@@ -2099,36 +1575,9 @@ parse_ParameterizedObjectAssignment([{identifier,L1,Name}|Rest]) ->
{Object,Rest5} = parse_Object(Rest4),
{#pobjectdef{pos=L1,name=Name,args=ParameterList,
class=Class,def=Object},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
-
-%% parse_ParameterizedObjectSetAssignment(Tokens) -> Result
-%% Result = {#pobjectsetdef{},Rest} | throw{}
-parse_ParameterizedObjectSetAssignment([{typereference,L1,Name}|Rest]) ->
- {ParameterList,Rest2} = parse_ParameterList(Rest),
- {Class,Rest3} = parse_DefinedObjectClass(Rest2),
- case Rest3 of
- [{'::=',_}|Rest4] ->
- {ObjectSet,Rest5} = parse_ObjectSet(Rest4),
- {#pobjectsetdef{pos=L1,name=Name,args=ParameterList,
- class=Class,def=ObjectSet},Rest5};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
-%%% Other ->
-%%% throw(Other)
- end;
-parse_ParameterizedObjectSetAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest3)
+ end.
%% parse_ParameterList(Tokens) -> Result
%% Result = [Parameter]
@@ -2137,35 +1586,24 @@ parse_ParameterizedObjectSetAssignment(Tokens) ->
%% Type = #type{}
%% DefinedObjectClass = #'Externaltypereference'{}
%% Reference = #'Externaltypereference'{} | #'Externalvaluereference'{}
-parse_ParameterList([{'{',_}|Rest]) ->
- parse_ParameterList(Rest,[]);
-parse_ParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+parse_ParameterList([{'{',_}|Tokens]) ->
+ parse_ParameterList(Tokens, []).
parse_ParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_Parameter(Tokens),
case Rest of
[{',',_}|Rest2] ->
- parse_ParameterList(Rest2,[Parameter|Acc]);
+ parse_ParameterList(Rest2, [Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
parse_Parameter(Tokens) ->
Flist = [fun parse_ParamGovAndRef/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_ParamGovAndRef(Tokens) ->
{ParamGov,Rest} = parse_ParamGovernor(Tokens),
@@ -2173,86 +1611,54 @@ parse_ParamGovAndRef(Tokens) ->
[{':',_}|Rest2] ->
{Ref,Rest3} = parse_Reference(Rest2),
{{ParamGov,Ref},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,':']}})
+ _ ->
+ parse_error(Rest)
end.
parse_ParamGovernor(Tokens) ->
Flist = [fun parse_Governor/1,
fun parse_Reference/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
-
-% parse_ParameterizedReference(Tokens) ->
-% {Ref,Rest} = parse_Reference(Tokens),
-% case Rest of
-% [{'{',_},{'}',_}|Rest2] ->
-% {{ptref,Ref},Rest2};
-% _ ->
-% {{ptref,Ref},Rest}
-% end.
+ parse_or(Tokens, Flist).
parse_SimpleDefinedType([{typereference,L1,ModuleName},{'.',_},
{typereference,_,TypeName}|Rest]) ->
{#'Externaltypereference'{pos=L1,module=ModuleName,
type=TypeName},Rest};
parse_SimpleDefinedType([Tref={typereference,_,_}|Rest]) ->
-% {#'Externaltypereference'{pos=L2,module=get(asn1_module),
-% type=TypeName},Rest};
{tref2Exttref(Tref),Rest};
parse_SimpleDefinedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [typereference,'typereference.typereference']]}}).
+ parse_error(Tokens).
parse_SimpleDefinedValue([{typereference,L1,ModuleName},{'.',_},
- {identifier,_,Value}|Rest]) ->
+ #identifier{val=Value}|Rest]) ->
{{simpledefinedvalue,#'Externalvaluereference'{pos=L1,module=ModuleName,
value=Value}},Rest};
-parse_SimpleDefinedValue([Id={identifier,_,_Value}|Rest]) ->
+parse_SimpleDefinedValue([#identifier{}=Id|Rest]) ->
{{simpledefinedvalue,identifier2Extvalueref(Id)},Rest};
parse_SimpleDefinedValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- ['typereference.identifier',identifier]]}}).
+ parse_error(Tokens).
parse_ParameterizedType(Tokens) ->
+ %% May also be a parameterized class.
{Type,Rest} = parse_SimpleDefinedType(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
- {{pt,Type,Params},Rest2}.
+ {#type{def={pt,Type,Params}},Rest2}.
parse_ParameterizedValue(Tokens) ->
+ %% May also be a parameterized object.
{Value,Rest} = parse_SimpleDefinedValue(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pv,Value,Params},Rest2}.
-parse_ParameterizedObjectClass(Tokens) ->
- {Type,Rest} = parse_DefinedObjectClass(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{poc,Type,Params},Rest2}.
-
parse_ParameterizedObjectSet(Tokens) ->
{ObjectSet,Rest} = parse_DefinedObjectSet(Tokens),
{Params,Rest2} = parse_ActualParameterList(Rest),
{{pos,ObjectSet,Params},Rest2}.
-parse_ParameterizedObject(Tokens) ->
- {Object,Rest} = parse_DefinedObject(Tokens),
- {Params,Rest2} = parse_ActualParameterList(Rest),
- {{po,Object,Params},Rest2}.
-
parse_ActualParameterList([{'{',_}|Rest]) ->
parse_ActualParameterList(Rest,[]);
parse_ActualParameterList(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
parse_ActualParameterList(Tokens,Acc) ->
{Parameter,Rest} = parse_ActualParameter(Tokens),
@@ -2260,43 +1666,22 @@ parse_ActualParameterList(Tokens,Acc) ->
[{',',_}|Rest2] ->
parse_ActualParameterList(Rest2,[Parameter|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Parameter|Acc]),Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,[',','}']]}})
-%%% Other ->
-%%% throw(Other)
+ {lists:reverse(Acc, [Parameter]),Rest3};
+ _ ->
+ parse_error(Rest)
end.
-
-
-
-
-
-
-%-------------------------
-
+%% Test whether Token is allowed in a syntax list.
is_word(Token) ->
- case not_allowed_word(Token) of
+ List = atom_to_list(Token),
+ case not_allowed_word(List) of
true -> false;
- _ ->
- if
- is_atom(Token) ->
- Item = atom_to_list(Token),
- is_word(Item);
- is_list(Token), length(Token) == 1 ->
- check_one_char_word(Token);
- is_list(Token) ->
- [A|Rest] = Token,
- case check_first(A) of
- true ->
- check_rest(Rest);
- _ ->
- false
- end
- end
+ false -> is_word_1(List)
end.
+is_word_1([H|T]) ->
+ check_first(H) andalso check_rest(T).
+
not_allowed_word(Name) ->
lists:member(Name,["BIT",
"BOOLEAN",
@@ -2321,257 +1706,123 @@ not_allowed_word(Name) ->
"TRUE",
"UNION"]).
-check_one_char_word([A]) when $A =< A, $Z >= A ->
- true;
-check_one_char_word([_]) ->
- false. %% unknown item in SyntaxList
+check_first(C) ->
+ $A =< C andalso C =< $Z.
-check_first(A) when $A =< A, $Z >= A ->
- true;
-check_first(_) ->
- false. %% unknown item in SyntaxList
-
-check_rest([R,R|_Rs]) when $- == R ->
- false; %% two consecutive hyphens are not allowed in a word
-check_rest([R]) when $- == R ->
- false; %% word cannot end with hyphen
-check_rest([R|Rs]) when $A=<R, $Z>=R; $-==R ->
+check_rest([R|Rs]) when $A =< R, R =< $Z; R =:= $- ->
check_rest(Rs);
check_rest([]) ->
true;
check_rest(_) ->
false.
+%%%
+%%% Parse alternative type lists for CHOICE.
+%%%
+
+parse_AlternativeTypeLists(Tokens0) ->
+ {Root,Tokens1} = parse_AlternativeTypeList(Tokens0),
+ case Tokens1 of
+ [{',',_}|Tokens2] ->
+ {ExtMarker,Tokens3} = parse_ExtensionAndException(Tokens2),
+ {ExtAlts,Tokens4} = parse_ExtensionAdditionAlternatives(Tokens3),
+ {_,Tokens} = parse_OptionalExtensionMarker(Tokens4, []),
+ {Root++ExtMarker++ExtAlts,Tokens};
+ Tokens ->
+ {Root,Tokens}
+ end.
+
+parse_ExtensionAndException([{'...',L}|Tokens0]) ->
+ {[#'EXTENSIONMARK'{pos=L}],
+ case Tokens0 of
+ [{'!',_}|Tokens1] ->
+ {_,Tokens} = parse_ExceptionIdentification(Tokens1),
+ Tokens;
+ _ ->
+ Tokens0
+ end}.
+
+parse_AlternativeTypeList([#identifier{}|_]=Tokens0) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType]);
+parse_AlternativeTypeList(Tokens) ->
+ parse_error(Tokens).
+
+parse_AlternativeTypeList_1([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {AltType,Tokens} = parse_NamedType(Tokens0),
+ parse_AlternativeTypeList_1(Tokens, [AltType|Acc]);
+parse_AlternativeTypeList_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
-to_set(V) when is_list(V) ->
- ordsets:from_list(V);
-to_set(V) ->
- ordsets:from_list([V]).
-
-parse_AlternativeTypeLists(Tokens) ->
- parse_AlternativeTypeLists(Tokens,[]).
-
-parse_AlternativeTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_AlternativeTypeList(Tokens,[]),
- parse_AlternativeTypeLists(Rest1,Clist++CompList);
-parse_AlternativeTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_AlternativeTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
-parse_AlternativeTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists([{'...',L1}|Rest02],Clist0) ->
- parse_AlternativeTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-parse_AlternativeTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
-
-parse_AlternativeTypeLists2(Tokens,Clist) ->
- {ExtAdd,Rest} = parse_ExtensionAdditionAlternatives(Tokens,Clist),
- {Clist2,Rest2} = parse_OptionalExtensionMarker(Rest,lists:flatten(ExtAdd)),
- case Rest2 of
- [{',',_}|Rest3] ->
- {CompList,Rest4} = parse_AlternativeTypeList(Rest3,[]),
- {Clist2 ++ CompList,Rest4};
- _ ->
- {Clist2,Rest2}
- end.
-
-
-
-parse_AlternativeTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_AlternativeTypeList(Rest2,[AlternativeType|Acc]);
-parse_AlternativeTypeList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_AlternativeTypeList(Tokens,[]) ->
- {AlternativeType,Rest} = parse_NamedType(Tokens),
- parse_AlternativeTypeList(Rest,[AlternativeType]);
-parse_AlternativeTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-parse_ExtensionAdditionAlternatives(Tokens =[{',',_}|_],Clist) ->
- {ExtAddList,Rest2} = parse_ExtensionAdditionAlternativesList(Tokens,[]),
- {Clist++lists:flatten(ExtAddList),Rest2};
-parse_ExtensionAdditionAlternatives(Tokens,Clist) ->
- %% Empty
- {Clist,Tokens}.
+parse_ExtensionAdditionAlternatives([{',',_}|_]=Tokens0) ->
+ parse_ExtensionAdditionAlternativesList(Tokens0, []);
+parse_ExtensionAdditionAlternatives(Tokens) ->
+ {[],Tokens}.
-parse_ExtensionAdditionAlternativesList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {AlternativeType,Rest2} = parse_NamedType([Id|Rest]),
- parse_ExtensionAdditionAlternativesList(Rest2,[AlternativeType|Acc]);
-parse_ExtensionAdditionAlternativesList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionAlternativesGroup([C1,C2|Rest],[]),
- parse_ExtensionAdditionAlternativesList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionAlternativesList(Tokens = [{'}',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens = [{',',_},{'...',_}|_],Acc) ->
- {lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionAlternativesList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,Num);
-parse_ExtensionAdditionAlternativesGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionAlternativesGroup2(Rest,undefined);
-parse_ExtensionAdditionAlternativesGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
-
-
-parse_ExtensionAdditionAlternativesGroup2(Tokens,Num) ->
- {CompTypeList,Rest} = parse_AlternativeTypeList(Tokens,[]),
- case Rest of
- [{']',_},{']',_}|Rest2] ->
- {[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
- ['ExtensionAdditionGroupEnd'],Rest2};
+parse_ExtensionAdditionAlternativesList([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_ExtensionAdditionAlternative(Tokens1) of
+ {ExtAddAlt,Tokens2} ->
+ parse_ExtensionAdditionAlternativesList(Tokens2, [ExtAddAlt|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:append(lists:reverse(Acc)),Tokens0}
+ end;
+parse_ExtensionAdditionAlternativesList(Tokens, Acc) ->
+ {lists:append(lists:reverse(Acc)),Tokens}.
+
+parse_ExtensionAdditionAlternative([#identifier{}|_]=Tokens0) ->
+ {NamedType,Tokens} = parse_NamedType(Tokens0),
+ {[NamedType],Tokens};
+parse_ExtensionAdditionAlternative([{'[',_},{'[',_}|Tokens0]) ->
+ Tokens2 = case Tokens0 of
+ [{number,_,_},{':',_}|Tokens1] -> Tokens1;
+ _ -> Tokens0
+ end,
+ {GroupList,Tokens3} = parse_AlternativeTypeList(Tokens2),
+ case Tokens3 of
+ [{']',_},{']',_}|Tokens] ->
+ {GroupList,Tokens};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% parse_AlternativeTypeLists(Tokens,ExtensionDefault) ->
-%% {AltTypeList,Rest1} = parse_AlternativeTypeList(Tokens),
-%% {ExtensionAndException,Rest2} =
-%% case Rest1 of
-%% [{',',_},{'...',L1},{'!',_}|Rest12] ->
-%% {_,Rest13} = parse_ExceptionIdentification(Rest12),
-%% %% Exception info is currently thrown away
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest13};
-%% [{',',_},{'...',L1}|Rest12] ->
-%% {[#'EXTENSIONMARK'{pos=L1}],Rest12};
-%% _ ->
-%% {[],Rest1}
-%% end,
-%% {AltTypeList2,Rest5} =
-%% case ExtensionAndException of
-%% [] ->
-%% {AltTypeList,Rest2};
-%% _ ->
-%% {ExtensionAddition,Rest3} =
-%% case Rest2 of
-%% [{',',_}|Rest23] ->
-%% parse_ExtensionAdditionAlternativeList(Rest23);
-%% _ ->
-%% {[],Rest2}
-%% end,
-%% {OptionalExtensionMarker,Rest4} =
-%% case Rest3 of
-%% [{',',_},{'...',L3}|Rest31] ->
-%% {[#'EXTENSIONMARK'{pos=L3}],Rest31};
-%% _ ->
-%% {[],Rest3}
-%% end,
-%% {AltTypeList ++ ExtensionAndException ++
-%% ExtensionAddition ++ OptionalExtensionMarker, Rest4}
-%% end,
-%% AltTypeList3 =
-%% case [X || X=#'EXTENSIONMARK'{} <- AltTypeList2] of
-%% [] when ExtensionDefault == 'IMPLIED' ->
-%% AltTypeList2 ++ [#'EXTENSIONMARK'{}];
-%% _ ->
-%% AltTypeList2
-%% end,
-%% {AltTypeList3,Rest5}.
-
-
-%% parse_AlternativeTypeList(Tokens) ->
-%% parse_AlternativeTypeList(Tokens,[]).
+ parse_error(Tokens3)
+ end;
+parse_ExtensionAdditionAlternative(Tokens) ->
+ parse_error(Tokens).
-%% parse_AlternativeTypeList(Tokens,Acc) ->
-%% {NamedType,Rest} = parse_NamedType(Tokens),
-%% case Rest of
-%% [{',',_},Id = {identifier,_,_}|Rest2] ->
-%% parse_AlternativeTypeList([Id|Rest2],[NamedType|Acc]);
-%% _ ->
-%% {lists:reverse([NamedType|Acc]),Rest}
-%% end.
+%%%
+%%% End of parsing of alternative type lists.
+%%%
-
-
-%% parse_ExtensionAdditionAlternativeList(Tokens) ->
-%% parse_ExtensionAdditionAlternativeList(Tokens,[]).
-
-%% parse_ExtensionAdditionAlternativeList([{'[[',_}|Rest],Acc) ->
-%% parse_ExtensionAdditionAlternativeList(Rest,Acc);
-%% parse_ExtensionAdditionAlternativeList(Tokens = [{identifier,_,_}|_Rest],Acc) ->
-%% {Element,Rest0} = parse_NamedType(Tokens);
-%% case Rest0 of
-%% [{',',_}|Rest01] ->
-%% parse_ExtensionAdditionAlternativeList(Rest01,[Element|Acc]);
-%% _ ->
-%% {lists:reverse([Element|Acc]),Rest0}
-%% end.
-
-%% parse_ExtensionAdditionAlternatives([{'[[',_}|Rest]) ->
-%% parse_ExtensionAdditionAlternatives(Rest,[]);
-%% parse_ExtensionAdditionAlternatives(Tokens) ->
-%% throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
-%% [got,get_token(hd(Tokens)),expected,'[[']}}).
-
-%% parse_ExtensionAdditionAlternatives([Id = {identifier,_,_}|Rest],Acc) ->
-%% {NamedType, Rest2} = parse_NamedType([Id|Rest]),
-%% case Rest2 of
-%% [{',',_}|Rest21] ->
-%% parse_ExtensionAdditionAlternatives(Rest21,[NamedType|Acc]);
-%% [{']]',_}|Rest21] ->
-%% {lists:reverse(Acc),Rest21};
-%% _ ->
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,[',',']]']]}})
-%% end.
-
-parse_NamedType([{identifier,L1,Idname}|Rest]) ->
+parse_NamedType([#identifier{pos=L1,val=Idname}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{#'ComponentType'{pos=L1,name=Idname,typespec=Type,prop=mandatory},Rest2};
parse_NamedType(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+%%%
+%%% Parse component type lists for SEQUENCE and SET.
+%%%
parse_ComponentTypeLists(Tokens) ->
- parse_ComponentTypeLists(Tokens,[]).
+ parse_ComponentTypeLists(Tokens, []).
-parse_ComponentTypeLists(Tokens = [{identifier,_,_}|_Rest0],Clist) ->
- {CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
- parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists(Tokens = [{'COMPONENTS',_},{'OF',_}|_Rest],Clist) ->
+parse_ComponentTypeLists([#identifier{}|_Rest0]=Tokens, Clist) ->
{CompList,Rest1} = parse_ComponentTypeList(Tokens,[]),
parse_ComponentTypeLists(Rest1,Clist++CompList);
-parse_ComponentTypeLists([{'...',L1},{'!',_}|Rest02],Clist0) ->
- {_,Rest03} = parse_ExceptionIdentification(Rest02),
- %% Exception info is currently thrown away
- parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
+parse_ComponentTypeLists([{'COMPONENTS',_},{'OF',_}|_]=Tokens,Clist) ->
+ {CompList,Rest1} = parse_ComponentTypeList(Tokens, []),
+ parse_ComponentTypeLists(Rest1, Clist++CompList);
parse_ComponentTypeLists([{',',L1},{'...',_},{'!',_}|Rest02],Clist0) when Clist0 =/= []->
{_,Rest03} = parse_ExceptionIdentification(Rest02),
%% Exception info is currently thrown away
parse_ComponentTypeLists2(Rest03,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
-
- parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
+parse_ComponentTypeLists([{',',_},{'...',L1}|Rest02],Clist0) when Clist0 =/= []->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists([{'...',L1}|Rest02],Clist0) ->
parse_ComponentTypeLists2(Rest02,Clist0++[#'EXTENSIONMARK'{pos=L1}]);
parse_ComponentTypeLists(Tokens = [{'}',_L1}|_Rest02],Clist0) ->
- {Clist0,Tokens}.
+ {Clist0,Tokens};
+parse_ComponentTypeLists(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentTypeLists2(Tokens,Clist) ->
{ExtAdd,Rest} = parse_ExtensionAdditions(Tokens,Clist),
@@ -2590,12 +1841,12 @@ parse_OptionalExtensionMarker(Tokens,Clist) ->
{Clist,Tokens}.
-parse_ComponentTypeList([{',',_},Id = {identifier,_,_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
-parse_ComponentTypeList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) when Acc =/= [] ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ComponentTypeList(Rest2,[ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[#identifier{}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
+parse_ComponentTypeList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) when Acc =/= [] ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ComponentTypeList(Tokens, [ComponentType|Acc]);
parse_ComponentTypeList(Tokens = [{'}',_}|_],Acc) ->
{lists:reverse(Acc),Tokens};
parse_ComponentTypeList(Tokens = [{']',_},{']',_}|_],Acc) ->
@@ -2606,10 +1857,7 @@ parse_ComponentTypeList(Tokens,[]) ->
{ComponentType,Rest} = parse_ComponentType(Tokens),
parse_ComponentTypeList(Rest,[ComponentType]);
parse_ComponentTypeList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
+ parse_error(Tokens).
parse_ExtensionAdditions(Tokens=[{',',_}|_],Clist) ->
{ExtAddList,Rest2} = parse_ExtensionAdditionList(Tokens,[]),
@@ -2618,46 +1866,36 @@ parse_ExtensionAdditions(Tokens,Clist) ->
%% Empty
{Clist,Tokens}.
-parse_ExtensionAdditionList([{',',_},Id = {identifier,_,_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([Id|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1={'COMPONENTS',_},C2={'OF',_}|Rest],Acc) ->
- {ComponentType,Rest2} = parse_ComponentType([C1,C2|Rest]),
- parse_ExtensionAdditionList(Rest2,[ComponentType|Acc]);
-parse_ExtensionAdditionList([{',',_},C1 = {'[',_},C2 = {'[',_}|Rest],Acc) ->
- {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup([C1,C2|Rest],[]),
+parse_ExtensionAdditionList([{',',_}|[#identifier{}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_}|[{'COMPONENTS',_},{'OF',_}|_]=Tokens0], Acc) ->
+ {ComponentType,Tokens} = parse_ComponentType(Tokens0),
+ parse_ExtensionAdditionList(Tokens, [ComponentType|Acc]);
+parse_ExtensionAdditionList([{',',_},{'[',_},{'[',_}|Tokens], Acc) ->
+ {ExtAddGroup,Rest2} = parse_ExtensionAdditionGroup(Tokens),
parse_ExtensionAdditionList(Rest2,[ExtAddGroup|Acc]);
-parse_ExtensionAdditionList(Tokens = [{'}',_}|_],Acc) ->
+parse_ExtensionAdditionList([{'}',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens = [{',',_},{'...',_}|_],Acc) ->
+parse_ExtensionAdditionList([{',',_},{'...',_}|_]=Tokens, Acc) ->
{lists:reverse(Acc),Tokens};
-parse_ExtensionAdditionList(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['}',', identifier']]}}).
-
+parse_ExtensionAdditionList(Tokens, _) ->
+ parse_error(Tokens).
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_},_VsnNr = {number,_,Num},{':',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,Num);
-parse_ExtensionAdditionGroup([ {'[',_},{'[',_}|Rest],[]) ->
- parse_ExtensionAdditionGroup2(Rest,undefined);
-parse_ExtensionAdditionGroup(Tokens,_) ->
- throw({asn1_error,
- {get_line(hd(Tokens)),get(asn1_module),
- [got,[get_token(hd(Tokens)),get_token(hd(tl(Tokens)))],
- expected,['[[']]}}).
+parse_ExtensionAdditionGroup([{number,_,Num},{':',_}|Tokens]) ->
+ parse_ExtensionAdditionGroup2(Tokens, Num);
+parse_ExtensionAdditionGroup(Tokens) ->
+ parse_ExtensionAdditionGroup2(Tokens, undefined).
-parse_ExtensionAdditionGroup2(Tokens,Num) ->
+parse_ExtensionAdditionGroup2(Tokens, Num) ->
{CompTypeList,Rest} = parse_ComponentTypeList(Tokens,[]),
case Rest of
[{']',_},{']',_}|Rest2] ->
{[{'ExtensionAdditionGroup',Num}|CompTypeList] ++
['ExtensionAdditionGroupEnd'],Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,[']]']]}})
+ parse_error(Rest)
end.
@@ -2676,83 +1914,81 @@ parse_ComponentType(Tokens) ->
Result
end.
-
+%%%
+%%% Parse ENUMERATED.
+%%%
-parse_SignedNumber([{number,_,Value}|Rest]) ->
- {Value,Rest};
-parse_SignedNumber([{'-',_},{number,_,Value}|Rest]) ->
- {-Value,Rest};
-parse_SignedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- [number,'-number']]}}).
-
-parse_Enumerations(Tokens=[{identifier,_,_}|_Rest],ExtensionDefault) ->
- parse_Enumerations(Tokens,[],ExtensionDefault);
-parse_Enumerations([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
-
-parse_Enumerations(Tokens = [{identifier,_,_},{'(',_}|_Rest], Acc, ExtensionDefault) ->
- {NamedNumber,Rest2} = parse_NamedNumber(Tokens),
- case Rest2 of
- [{',',_}|Rest3] ->
- parse_Enumerations(Rest3,[NamedNumber|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED'->
- {lists:reverse(['EXTENSIONMARK',NamedNumber|Acc]),Rest2};
+parse_Enumerations(Tokens0) ->
+ {Root,Tokens1} = parse_Enumeration(Tokens0),
+ case Tokens1 of
+ [{',',_},{'...',_},{',',_}|Tokens2] ->
+ {Ext,Tokens} = parse_Enumeration(Tokens2),
+ {Root++['EXTENSIONMARK'|Ext],Tokens};
+ [{',',_},{'...',_}|Tokens] ->
+ {Root++['EXTENSIONMARK'],Tokens};
_ ->
- {lists:reverse([NamedNumber|Acc]),Rest2}
- end;
-parse_Enumerations([{identifier,_,Id}|Rest], Acc, ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,[Id|Acc], ExtensionDefault);
- _ when ExtensionDefault == 'IMPLIED' ->
- {lists:reverse(['EXTENSIONMARK', Id |Acc]),Rest};
- _ ->
- {lists:reverse([Id|Acc]),Rest}
- end;
-parse_Enumerations([{'...',_}|Rest], Acc, _ExtensionDefault) ->
- case Rest of
- [{',',_}|Rest2] ->
- parse_Enumerations(Rest2,['EXTENSIONMARK'|Acc],undefined);
- _ ->
- {lists:reverse(['EXTENSIONMARK'|Acc]),Rest}
+ case get(extensiondefault) of
+ 'IMPLIED' ->
+ {Root++['EXTENSIONMARK'],Tokens1};
+ _ ->
+ {Root,Tokens1}
+ end
+ end.
+
+parse_Enumeration(Tokens0) ->
+ {Item,Tokens} = parse_EnumerationItem(Tokens0),
+ parse_Enumeration_1(Tokens, [Item]).
+
+parse_Enumeration_1([{',',_}|Tokens1]=Tokens0, Acc) ->
+ try parse_EnumerationItem(Tokens1) of
+ {Item,Tokens} ->
+ parse_Enumeration_1(Tokens, [Item|Acc])
+ catch
+ throw:{asn1_error,_} ->
+ {lists:reverse(Acc),Tokens0}
end;
-parse_Enumerations([H|_T],_,_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_Enumeration_1(Tokens, Acc) ->
+ {lists:reverse(Acc),Tokens}.
+
+parse_EnumerationItem([#identifier{},{'(',_}|_]=Tokens) ->
+ parse_NamedNumber(Tokens);
+parse_EnumerationItem([#identifier{val=Id}|Tokens]) ->
+ {Id,Tokens};
+parse_EnumerationItem(Tokens) ->
+ parse_error(Tokens).
+
+%%%
+%%% End of parsing of ENUMERATED.
+%%%
parse_NamedNumberList(Tokens) ->
- parse_NamedNumberList(Tokens,[]).
+ parse_NamedNumberList(Tokens, []).
-parse_NamedNumberList(Tokens,Acc) ->
+parse_NamedNumberList(Tokens, Acc) ->
{NamedNum,Rest} = parse_NamedNumber(Tokens),
case Rest of
[{',',_}|Rest2] ->
parse_NamedNumberList(Rest2,[NamedNum|Acc]);
_ ->
- {lists:reverse([NamedNum|Acc]),Rest}
+ {lists:reverse(Acc, [NamedNum]),Rest}
end.
-parse_NamedNumber([{identifier,_,Name},{'(',_}|Rest]) ->
+parse_NamedNumber([#identifier{val=Name},{'(',_}|Rest]) ->
Flist = [fun parse_SignedNumber/1,
fun parse_DefinedValue/1],
- case (catch parse_or(Rest,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
+ case parse_or(Rest, Flist) of
{NamedNum,[{')',_}|Rest2]} ->
{{'NamedNumber',Name,NamedNum},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'NamedNumberList']}})
+ parse_error(Rest)
end;
parse_NamedNumber(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
+parse_SignedNumber([{number,_,Value}|Rest]) ->
+ {Value,Rest};
+parse_SignedNumber(Tokens) ->
+ parse_error(Tokens).
parse_Tag([{'[',_}|Rest]) ->
{Class,Rest2} = parse_Class(Rest),
@@ -2767,12 +2003,8 @@ parse_Tag([{'[',_}|Rest]) ->
[{']',_}|Rest4] ->
{#tag{class=Class,number=ClassNumber},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest3)),get(asn1_module),
- [got,get_token(hd(Rest3)),expected,']']}})
- end;
-parse_Tag(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'[']}}).
+ parse_error(Rest3)
+ end.
parse_Class([{'UNIVERSAL',_}|Rest]) ->
{'UNIVERSAL',Rest};
@@ -2791,15 +2023,7 @@ parse_Value(Tokens) ->
Flist = [fun parse_BuiltinValue/1,
fun parse_ValueFromObject/1,
fun parse_DefinedValue/1],
-
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end.
+ parse_or(Tokens, Flist).
parse_BuiltinValue([{bstring,_,Bstr}|Rest]) ->
{{bstring,Bstr},Rest};
@@ -2812,18 +2036,11 @@ parse_BuiltinValue(Tokens = [{'{',_}|_Rest]) ->
fun parse_SequenceOfValue/1,
fun parse_SequenceValue/1,
fun parse_ObjectIdentifierValue/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- Result ->
- Result
- end;
-parse_BuiltinValue([{identifier,_,IdName},{':',_}|Rest]) ->
+ parse_or(Tokens, Flist);
+parse_BuiltinValue([#identifier{val=IdName},{':',_}|Rest]) ->
{Value,Rest2} = parse_Value(Rest),
{{'CHOICE',{IdName,Value}},Rest2};
-parse_BuiltinValue(Tokens=[{'NULL',_},{':',_}|_Rest]) ->
+parse_BuiltinValue([{'NULL',_},{':',_}|_]=Tokens) ->
parse_ObjectClassFieldValue(Tokens);
parse_BuiltinValue([{'NULL',_}|Rest]) ->
{'NULL',Rest};
@@ -2839,31 +2056,29 @@ parse_BuiltinValue([{cstring,_,Cstr}|Rest]) ->
{Cstr,Rest};
parse_BuiltinValue([{number,_,Num}|Rest]) ->
{Num,Rest};
-parse_BuiltinValue([{'-',_},{number,_,Num}|Rest]) ->
- {- Num,Rest};
parse_BuiltinValue(Tokens) ->
parse_ObjectClassFieldValue(Tokens).
-parse_DefinedValue(Tokens=[{identifier,_,_},{'{',_}|_Rest]) ->
- parse_ParameterizedValue(Tokens);
-%% Externalvaluereference
-parse_DefinedValue([{typereference,L1,Tname},{'.',_},{identifier,_,Idname}|Rest]) ->
+parse_DefinedValue(Tokens) ->
+ Flist = [fun parse_ParameterizedValue/1,
+ fun parse_DefinedValue2/1],
+ parse_or(Tokens, Flist).
+
+parse_DefinedValue2([{typereference,L1,Tname},
+ {'.',_},
+ #identifier{val=Idname}|Rest]) ->
{#'Externalvaluereference'{pos=L1,module=Tname,value=Idname},Rest};
%% valuereference
-parse_DefinedValue([Id = {identifier,_,_}|Rest]) ->
+parse_DefinedValue2([#identifier{}=Id|Rest]) ->
{identifier2Extvalueref(Id),Rest};
-%% ParameterizedValue
-parse_DefinedValue(Tokens) ->
- parse_ParameterizedValue(Tokens).
+parse_DefinedValue2(Tokens) ->
+ parse_error(Tokens).
parse_SequenceValue([{'{',_}|Tokens]) ->
- parse_SequenceValue(Tokens,[]);
-parse_SequenceValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceValue(Tokens, []).
-parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
+parse_SequenceValue([#identifier{pos=Pos,val=IdName}|Rest],Acc) ->
{Value,Rest2} = parse_Value(Rest),
SeqTag = #seqtag{pos=Pos,module=get(asn1_module),val=IdName},
case Rest2 of
@@ -2872,18 +2087,13 @@ parse_SequenceValue([{identifier,Pos,IdName}|Rest],Acc) ->
[{'}',_}|Rest3] ->
{lists:reverse(Acc, [{SeqTag,Value}]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end;
parse_SequenceValue(Tokens,_Acc) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Tokens).
parse_SequenceOfValue([{'{',_}|Tokens]) ->
- parse_SequenceOfValue(Tokens,[]);
-parse_SequenceOfValue(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_SequenceOfValue(Tokens, []).
parse_SequenceOfValue(Tokens,Acc) ->
{Value,Rest2} = parse_Value(Tokens),
@@ -2891,10 +2101,9 @@ parse_SequenceOfValue(Tokens,Acc) ->
[{',',_}|Rest3] ->
parse_SequenceOfValue(Rest3,[Value|Acc]);
[{'}',_}|Rest3] ->
- {lists:reverse([Value|Acc]),Rest3};
+ {lists:reverse(Acc, [Value]),Rest3};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'}']}})
+ parse_error(Rest2)
end.
parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
@@ -2904,49 +2113,31 @@ parse_ValueSetTypeAssignment([{typereference,L1,Name}|Rest]) ->
{ValueSet,Rest4} = parse_ValueSet(Rest3),
{#valuedef{pos=L1,name=Name,type=Type,value=ValueSet,
module=get(asn1_module)},Rest4};
- [H|_T] ->
- throw({asn1_error,{get_line(L1),get(asn1_module),
- [got,get_token(H),expected,'::=']}})
- end;
-parse_ValueSetTypeAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,
- typereference]}}).
+ _ ->
+ parse_error(Rest2)
+ end.
parse_ValueSet([{'{',_}|Rest]) ->
{Elems,Rest2} = parse_ElementSetSpecs(Rest),
case Rest2 of
[{'}',_}|Rest3] ->
{{valueset,Elems},Rest3};
- [H|_T] ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,'}']}})
+ _ ->
+ parse_error(Rest2)
end;
parse_ValueSet(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'{']}}).
+ parse_error(Tokens).
-parse_ValueAssignment([{identifier,L1,IdName}|Rest]) ->
+parse_ValueAssignment([#identifier{pos=L1,val=IdName}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
case Rest2 of
[{'::=',_}|Rest3] ->
{Value,Rest4} = parse_Value(Rest3),
- case catch lookahead_assignment(Rest4) of
- ok ->
- {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
- module=get(asn1_module)},Rest4};
- Error ->
- throw(Error)
-%% throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
-%% [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
+ {#valuedef{pos=L1,name=IdName,type=Type,value=Value,
+ module=get(asn1_module)},Rest4};
_ ->
- throw({asn1_error,{get_line(hd(Rest2)),get(asn1_module),
- [got,get_token(hd(Rest2)),expected,'::=']}})
- end;
-parse_ValueAssignment(Tokens) ->
- throw({asn1_assignment_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,identifier]}}).
+ parse_error(Rest2)
+ end.
%% SizeConstraint
parse_SubtypeElements([{'SIZE',_}|Tokens]) ->
@@ -2966,8 +2157,7 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_},{'...',_},{',',_}|Tok
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'PartialSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
{Constraint,Rest} = parse_TypeConstraints(Tokens),
@@ -2975,28 +2165,18 @@ parse_SubtypeElements([{'WITH',_},{'COMPONENTS',_},{'{',_}|Tokens]) ->
[{'}',_}|Rest2] ->
{{'WITH COMPONENTS',{'FullSpecification',Constraint}},Rest2};
_ ->
- throw({asn1_error,{get_line(hd(Rest)),get(asn1_module),
- [got,get_token(hd(Rest)),expected,'}']}})
+ parse_error(Rest)
end;
parse_SubtypeElements([{'PATTERN',_}|Tokens]) ->
{Value,Rest} = parse_Value(Tokens),
{{pattern,Value},Rest};
-%% SingleValue
-%% ContainedSubtype
-%% ValueRange
-%% TypeConstraint
-%% Moved fun parse_Value/1 and fun parse_Type/1 to parse_Elements
parse_SubtypeElements(Tokens) ->
Flist = [fun parse_ContainedSubtype/1,
fun parse_Value/1,
- fun([{'MIN',_}|T]) -> {'MIN',T} end,
+ fun parse_MIN/1,
fun parse_Type/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- {asn1_error,Reason} ->
- throw(Reason);
- Result = {Val,_} when is_record(Val,type) ->
+ case parse_or(Tokens, Flist) of
+ {#type{},_}=Result ->
Result;
{Lower,[{'..',_}|Rest]} ->
{Upper,Rest2} = parse_UpperEndpoint(Rest),
@@ -3014,10 +2194,7 @@ parse_ContainedSubtype([{'INCLUDES',_}|Rest]) ->
{Type,Rest2} = parse_Type(Rest),
{{'ContainedSubtype',Type},Rest2};
parse_ContainedSubtype(Tokens) ->
- throw({asn1_error,{get_line(hd(Tokens)),get(asn1_module),
- [got,get_token(hd(Tokens)),expected,'INCLUDES']}}).
-%%parse_ContainedSubtype(Tokens) -> %this option is moved to parse_SubtypeElements
-%% parse_Type(Tokens).
+ parse_error(Tokens).
parse_UpperEndpoint([{'<',_}|Rest]) ->
parse_UpperEndpoint(lt,Rest);
@@ -3025,33 +2202,38 @@ parse_UpperEndpoint(Tokens) ->
parse_UpperEndpoint(false,Tokens).
parse_UpperEndpoint(Lt,Tokens) ->
- Flist = [ fun([{'MAX',_}|T]) -> {'MAX',T} end,
- fun parse_Value/1],
- case (catch parse_or(Tokens,Flist)) of
- {'EXIT',Reason} ->
- exit(Reason);
- AsnErr = {asn1_error,_} ->
- throw(AsnErr);
- {Value,Rest2} when Lt == lt ->
+ Flist = [fun parse_MAX/1,
+ fun parse_Value/1],
+ case parse_or(Tokens, Flist) of
+ {Value,Rest2} when Lt =:= lt ->
{{lt,Value},Rest2};
{Value,Rest2} ->
{Value,Rest2}
end.
+parse_MIN([{'MIN',_}|T]) ->
+ {'MIN',T};
+parse_MIN(Tokens) ->
+ parse_error(Tokens).
+
+parse_MAX([{'MAX',_}|T]) ->
+ {'MAX',T};
+parse_MAX(Tokens) ->
+ parse_error(Tokens).
+
parse_TypeConstraints(Tokens) ->
- parse_TypeConstraints(Tokens,[]).
+ parse_TypeConstraints(Tokens, []).
-parse_TypeConstraints([{identifier,_,_}|Rest],Acc) ->
+parse_TypeConstraints([#identifier{}|Rest], Acc) ->
{ComponentConstraint,Rest2} = parse_ComponentConstraint(Rest),
case Rest2 of
[{',',_}|Rest3] ->
- parse_TypeConstraints(Rest3,[ComponentConstraint|Acc]);
+ parse_TypeConstraints(Rest3, [ComponentConstraint|Acc]);
_ ->
- {lists:reverse([ComponentConstraint|Acc]),Rest2}
+ {lists:reverse(Acc, [ComponentConstraint]),Rest2}
end;
-parse_TypeConstraints([H|_T],_) ->
- throw({asn1_error,{get_line(H),get(asn1_module),
- [got,get_token(H),expected,identifier]}}).
+parse_TypeConstraints(Tokens, _) ->
+ parse_error(Tokens).
parse_ComponentConstraint(Tokens = [{'(',_}|_Rest]) ->
{ValueConstraint,Rest2} = parse_Constraint(Tokens),
@@ -3071,145 +2253,36 @@ parse_PresenceConstraint(Tokens) ->
{asn1_empty,Tokens}.
-% merge_constraints({Rlist,ExtList}) -> % extensionmarker in constraint
-% {merge_constraints(Rlist,[],[]),
-% merge_constraints(ExtList,[],[])};
-
-%% An arg with a constraint with extension marker will look like
-%% [#constraint{c={Root,Ext}}|Rest]
-
merge_constraints(Clist) ->
merge_constraints(Clist, [], []).
-merge_constraints([Ch|Ct],Cacc, Eacc) ->
- NewEacc = case Ch#constraint.e of
- undefined -> Eacc;
- E -> [E|Eacc]
- end,
- merge_constraints(Ct,[fixup_constraint(Ch#constraint.c)|Cacc],NewEacc);
-
-merge_constraints([],Cacc,[]) ->
-%% lists:flatten(Cacc);
+merge_constraints([#constraint{c=C,e=E}|T], Cacc0, Eacc0) ->
+ Eacc = case E of
+ undefined -> Eacc0;
+ E -> [E|Eacc0]
+ end,
+ Cacc = [C|Cacc0],
+ merge_constraints(T, Cacc, Eacc);
+merge_constraints([], Cacc, []) ->
lists:reverse(Cacc);
-merge_constraints([],Cacc,Eacc) ->
-%% lists:flatten(Cacc) ++ [{'Errors',Eacc}].
- lists:reverse(Cacc) ++ [{'Errors',Eacc}].
-
-
-fixup_constraint(C) ->
- case C of
- {'SingleValue',SubType} when element(1,SubType) == 'ContainedSubtype' ->
- SubType;
- {'SingleValue',V} when is_list(V) ->
- C;
- %% [C,{'ValueRange',{lists:min(V),lists:max(V)}}];
- %% bug, turns wrong when an element in V is a reference to a defined value
- {'PermittedAlphabet',{'SingleValue',V}} when is_list(V) ->
- %%sort and remove duplicates
- V2 = {'SingleValue',
- ordsets:from_list(lists:flatten(V))},
- {'PermittedAlphabet',V2};
- {'PermittedAlphabet',{'SingleValue',V}} ->
- V2 = {'SingleValue',[V]},
- {'PermittedAlphabet',V2};
- {'SizeConstraint',Sc} ->
- {'SizeConstraint',fixup_size_constraint(Sc)};
-
- List when is_list(List) -> %% In This case maybe a union or intersection
- [fixup_constraint(Xc)||Xc <- List];
- Other ->
- Other
- end.
+merge_constraints([], Cacc, Eacc) ->
+ lists:reverse(Cacc) ++ [{element_set,{'Errors',Eacc},none}].
-fixup_size_constraint({'ValueRange',{Lb,Ub}}) ->
- {Lb,Ub};
-fixup_size_constraint({{'ValueRange',R},[]}) ->
- {R,[]};
-fixup_size_constraint({[],{'ValueRange',R}}) ->
- {[],R};
-fixup_size_constraint({{'ValueRange',R1},{'ValueRange',R2}}) ->
- {R1,R2};
-fixup_size_constraint({'SingleValue',[Sv]}) ->
- fixup_size_constraint({'SingleValue',Sv});
-fixup_size_constraint({'SingleValue',L}) when is_list(L) ->
- ordsets:from_list(L);
-fixup_size_constraint({'SingleValue',L}) ->
- {L,L};
-fixup_size_constraint({'SizeConstraint',C}) ->
- %% this is a second SIZE
- fixup_size_constraint(C);
-fixup_size_constraint({C1,C2}) ->
- %% this is with extension marks
- {turn2vr(fixup_size_constraint(C1)), extension_size(fixup_size_constraint(C2))};
-fixup_size_constraint(CList) when is_list(CList) ->
- [fixup_constraint(Xc)||Xc <- CList].
-
-turn2vr(L) when is_list(L) ->
- L2 =[X||X<-ordsets:from_list(L),is_integer(X)],
- case L2 of
- [H|_] ->
- {H,hd(lists:reverse(L2))};
- _ ->
- L
- end;
-turn2vr(VR) ->
- VR.
-extension_size({I,I}) ->
- [I];
-extension_size({I1,I2}) ->
- [I1,I2];
-extension_size(C) ->
- C.
-
-get_line({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
+get_line({Token,Pos,_}) when is_integer(Pos), is_atom(Token) ->
Pos;
get_line({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Pos;
-get_line(_) ->
- undefined.
-
-get_token({_,Pos,Token}) when is_integer(Pos),is_atom(Token) ->
- Token;
+ Pos.
+
+get_token({valuefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({typefieldreference,_,FieldName}) ->
+ list_to_atom([$&|atom_to_list(FieldName)]);
+get_token({Token,Pos,Value}) when is_integer(Pos), is_atom(Token) ->
+ Value;
get_token({'$end',Pos}) when is_integer(Pos) ->
- undefined;
+ 'END-OF-FILE';
get_token({Token,Pos}) when is_integer(Pos),is_atom(Token) ->
- Token;
-get_token(_) ->
- undefined.
-
-prioritize_error(ErrList) ->
- case lists:keymember(asn1_error,1,ErrList) of
- false -> % only asn1_assignment_error -> take the last
- lists:last(ErrList);
- true -> % contains errors from deeper in a Type
- NewErrList = [_Err={_,_}|_RestErr] =
- lists:filter(fun({asn1_error,_})->true;(_)->false end,
- ErrList),
- SplitErrs =
- lists:splitwith(fun({_,X})->
- case element(1,X) of
- Int when is_integer(Int) -> true;
- _ -> false
- end
- end,
- NewErrList),
- case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
- lists:last(UndefPosErrs);
- {IntPosErrs,_} ->
- IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
- SortedReasons = lists:keysort(1,IntPosReasons),
- {asn1_error,lists:last(SortedReasons)}
- end
- end.
-
-%% most_prio_error([H={_,Reason}|T],Atom,Err) when is_atom(Atom) ->
-%% most_prio_error(T,element(1,Reason),H);
-%% most_prio_error([H={_,Reason}|T],Greatest,Err) ->
-%% case element(1,Reason) of
-%% Pos when is_integer(Pos),Pos>Greatest ->
-%% most_prio_error(
-
+ Token.
tref2Exttref(#typereference{pos=Pos,val=Name}) ->
#'Externaltypereference'{pos=Pos,
@@ -3226,19 +2299,5 @@ identifier2Extvalueref(#identifier{pos=Pos,val=Name}) ->
module=resolve_module(Name),
value=Name}.
-%% lookahead_assignment/1 checks that the next sequence of tokens
-%% in Token contain a valid assignment or the
-%% 'END' token. Otherwise an exception is thrown.
-lookahead_assignment([{'END',_}|_Rest]) ->
- ok;
-lookahead_assignment(Tokens) ->
- parse_Assignment(Tokens),
- ok.
-
-is_pre_defined_class('TYPE-IDENTIFIER') ->
- true;
-is_pre_defined_class('ABSTRACT-SYNTAX') ->
- true;
-is_pre_defined_class(_) ->
- false.
-
+parse_error(Tokens) ->
+ throw({asn1_error,{parse_error,Tokens}}).
diff --git a/lib/asn1/src/asn1ct_tok.erl b/lib/asn1/src/asn1ct_tok.erl
index 8687ed955c..d51fea6402 100644
--- a/lib/asn1/src/asn1ct_tok.erl
+++ b/lib/asn1/src/asn1ct_tok.erl
@@ -21,191 +21,177 @@
%% Tokenize ASN.1 code (input to parser generated with yecc)
--export([get_name/2,tokenise/4, file/1]).
+-export([file/1,format_error/1]).
-
-file(File) ->
- case file:open(File, [read]) of
+file(File0) ->
+ case file:open(File0, [read]) of
{error, Reason} ->
- {error,{File,file:format_error(Reason)}};
+ {error,{File0,file:format_error(Reason)}};
{ok,Stream} ->
- process(Stream,0,[])
+ try
+ process(Stream, 1, [])
+ catch
+ throw:{error,Line,Reason} ->
+ File = filename:basename(File0),
+ Error = {structured_error,{File,Line},?MODULE,Reason},
+ {error,[Error]}
+ end
end.
-process(Stream,Lno,R) ->
- process(io:get_line(Stream, ''), Stream,Lno+1,R).
+process(Stream, Lno, R) ->
+ process(io:get_line(Stream, ''), Stream, Lno, R).
-process(eof, Stream,Lno,R) ->
+process(eof, Stream, Lno, Acc) ->
ok = file:close(Stream),
- lists:flatten(lists:reverse([{'$end',Lno}|R]));
-
-
-process(L, Stream,Lno,R) when is_list(L) ->
- %%io:format('read:~s',[L]),
- case catch tokenise(Stream,L,Lno,[]) of
- {'ERR',Reason} ->
- io:format("Tokeniser error on line: ~w ~w~n",[Lno,Reason]),
- exit(0);
- {NewLno,T} ->
- %%io:format('toks:~w~n',[T]),
- process(Stream,NewLno,[T|R])
- end.
-
-tokenise(Stream,[H|T],Lno,R) when $a =< H , H =< $z ->
- {X, T1} = get_name(T, [H]),
- tokenise(Stream,T1,Lno,[{identifier,Lno, list_to_atom(X)}|R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{typefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[$&,H|T],Lno,R) when $a =< H , H =< $z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- tokenise(Stream,T1,Lno,[{valuefieldreference, Lno, X} | R]);
-
-tokenise(Stream,[H|T],Lno,R) when $A =< H , H =< $Z ->
- {Y, T1} = get_name(T, [H]),
- X = list_to_atom(Y),
- case reserved_word(X) of
- true ->
- tokenise(Stream,T1,Lno,[{X,Lno}|R]);
- false ->
- tokenise(Stream,T1,Lno,[{typereference,Lno,X}|R]);
- rstrtype ->
- tokenise(Stream,T1,Lno,[{restrictedcharacterstringtype,Lno,X}|R])
- end;
-
-tokenise(Stream,[$-,H|T],Lno,R) when $0 =< H , H =< $9 ->
- {X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,-1 * list_to_integer(X)}|R]);
+ lists:reverse([{'$end',Lno}|Acc]);
+process(L, Stream, Lno0, Acc) when is_list(L) ->
+ try tokenise(Stream, L, Lno0, []) of
+ {Lno,[]} ->
+ process(Stream, Lno, Acc);
+ {Lno,Ts} ->
+ process(Stream, Lno, Ts++Acc)
+ catch
+ throw:{error,Reason} ->
+ throw({error,Lno0,Reason})
+ end.
-tokenise(Stream,[H|T],Lno,R) when $0 =< H , H =< $9 ->
+format_error(eof_in_comment) ->
+ "premature end of file in multi-line comment";
+format_error(eol_in_token) ->
+ "end of line in token";
+format_error({invalid_binary_number,Str}) ->
+ io_lib:format("invalid binary number: '~s'", [Str]);
+format_error({invalid_hex_number,Str}) ->
+ io_lib:format("invalid hex number: '~s'", [Str]);
+format_error(Other) ->
+ io_lib:format("~p", [Other]).
+
+tokenise(Stream, [$&,H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{typefieldreference,Lno,X}|R]);
+tokenise(Stream, [$&,H|T], Lno, R) when $a =< H , H =< $z ->
+ {X,T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{valuefieldreference,Lno,X}|R]);
+
+tokenise(Stream, "--"++T, Lno, R) ->
+ tokenise(Stream, skip_comment(T), Lno, R);
+
+tokenise(Stream, [$-,H|T], Lno, R) when $0 =< H , H =< $9 ->
{X, T1} = get_number(T, [H]),
- tokenise(Stream,T1,Lno,[{number,Lno,list_to_integer(X)}|R]);
-
-tokenise(Stream,[$-,$-|T],Lno,R) ->
- tokenise(Stream,skip_comment(T),Lno,R);
+ tokenise(Stream, T1, Lno, [{number,Lno,-list_to_integer(X)}|R]);
-tokenise(Stream,[$/,$*|T],Lno,R) ->
- {NewLno,T1} = skip_multiline_comment(Stream,T,Lno,0),
- tokenise(Stream,T1,NewLno,R);
+tokenise(Stream, "/*"++T, Lno0, R) ->
+ {Lno,T1} = skip_multiline_comment(Stream, T, Lno0, 0),
+ tokenise(Stream, T1, Lno, R);
-tokenise(Stream,[$:,$:,$=|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'::=',Lno}|R]);
-
-tokenise(Stream,[$'|T],Lno,R) ->
- case catch collect_quoted(T,Lno,[]) of
- {'ERR',_} ->
- throw({'ERR','bad_quote'});
- {Thing, T1} ->
- tokenise(Stream,T1,Lno,[Thing|R])
- end;
+tokenise(Stream, "::="++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'::=',Lno}|R]);
+tokenise(Stream, ":"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{':',Lno}|R]);
+tokenise(Stream, "'"++T0, Lno, R) ->
+ {Thing, T1} = collect_quoted(T0, Lno, []),
+ tokenise(Stream, T1, Lno, [Thing|R]);
tokenise(Stream,[$"|T],Lno,R) ->
{Str,T1} = collect_string(T,Lno),
tokenise(Stream,T1,Lno,[Str|R]);
-tokenise(Stream,[${|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'{',Lno}|R]);
-
-tokenise(Stream,[$}|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'}',Lno}|R]);
-
-%% tokenise(Stream,[$],$]|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{']]',Lno}|R]);
+tokenise(Stream, "{"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'{',Lno}|R]);
+tokenise(Stream, "}"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'}',Lno}|R]);
%% Even though x.680 specify '[[' and ']]' as lexical items
-%% it does not work to have them as such since the single [ and ] can
-%% be used beside each other in the SYNTAX OF in x.681
-%% the solution chosen here , i.e. to have them as separate lexical items
+%% it does not work to have them as such since the single '[' and ']' can
+%% be used beside each other in 'WITH SYNTAX' in x.681.
+%% The solution chosen here, i.e. to have them as separate lexical items
%% will not detect the cases where there is white space between them
-%% which would be an error in the use in ExtensionAdditionGroups
-
-%% tokenise(Stream,[$[,$[|T],Lno,R) ->
-%% tokenise(Stream,T,Lno,[{'[[',Lno}|R]);
-
-tokenise(Stream,[$]|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{']',Lno}|R]);
-
-tokenise(Stream,[$[|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'[',Lno}|R]);
+%% which would be an error in the use in ExtensionAdditionGroups.
-tokenise(Stream,[$,|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{',',Lno}|R]);
+tokenise(Stream, "]"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{']',Lno}|R]);
+tokenise(Stream, "["++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'[',Lno}|R]);
-tokenise(Stream,[$(|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'(',Lno}|R]);
-tokenise(Stream,[$)|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{')',Lno}|R]);
+tokenise(Stream, ","++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{',',Lno}|R]);
-tokenise(Stream,[$.,$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'...',Lno}|R]);
+tokenise(Stream, "("++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'(',Lno}|R]);
+tokenise(Stream, ")"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{')',Lno}|R]);
-tokenise(Stream,[$.,$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'..',Lno}|R]);
+tokenise(Stream, "..."++T,Lno,R) ->
+ tokenise(Stream, T, Lno, [{'...',Lno}|R]);
+tokenise(Stream, ".."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'..',Lno}|R]);
+tokenise(Stream, "."++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'.',Lno}|R]);
-tokenise(Stream,[$.|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'.',Lno}|R]);
-tokenise(Stream,[$^|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'^',Lno}|R]);
-tokenise(Stream,[$!|T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'!',Lno}|R]);
-tokenise(Stream,[$||T],Lno,R) ->
- tokenise(Stream,T,Lno,[{'|',Lno}|R]);
+tokenise(Stream, "|"++T, Lno, R) ->
+ tokenise(Stream, T, Lno, [{'|',Lno}|R]);
-tokenise(Stream,[H|T],Lno,R) ->
- case white_space(H) of
+tokenise(Stream, [H|T], Lno, R) when $A =< H , H =< $Z ->
+ {X,T1} = get_name(T, [H]),
+ case reserved_word(X) of
true ->
- tokenise(Stream,T,Lno,R);
+ tokenise(Stream, T1, Lno, [{X,Lno}|R]);
false ->
- tokenise(Stream,T,Lno,[{list_to_atom([H]),Lno}|R])
+ tokenise(Stream, T1, Lno, [{typereference,Lno,X}|R]);
+ rstrtype ->
+ tokenise(Stream, T1, Lno, [{restrictedcharacterstringtype,Lno,X}|R])
end;
-tokenise(_Stream,[],Lno,R) ->
- {Lno,lists:reverse(R)}.
+tokenise(Stream, [H|T], Lno, R) when $a =< H , H =< $z ->
+ {X, T1} = get_name(T, [H]),
+ tokenise(Stream, T1, Lno, [{identifier,Lno,X}|R]);
-collect_string(L,Lno) ->
- collect_string(L,Lno,[]).
+tokenise(Stream, [H|T], Lno, R) when $0 =< H , H =< $9 ->
+ {X, T1} = get_number(T, [H]),
+ tokenise(Stream, T1, Lno, [{number,Lno,list_to_integer(X)}|R]);
-collect_string([],_,_) ->
- throw({'ERR','bad_quote found eof'});
+tokenise(Stream, [H|T], Lno, R) when H =< $\s ->
+ tokenise(Stream, T, Lno, R);
-collect_string([H|T],Lno,Str) ->
- case H of
- $" ->
- {{cstring,1,lists:reverse(Str)},T};
- Ch ->
- collect_string(T,Lno,[Ch|Str])
- end.
-
+tokenise(Stream, [H|T], Lno, R) ->
+ tokenise(Stream, T, Lno, [{list_to_atom([H]),Lno}|R]);
+tokenise(_Stream, [], Lno, R) ->
+ {Lno+1,R}.
-% <name> is letters digits hyphens
-% hypen is not the last character. Hypen hyphen is NOT allowed
-%
-% <identifier> ::= <lowercase> <name>
+collect_string(L, Lno) ->
+ collect_string(L, Lno, []).
-get_name([$-,Char|T], L) ->
+collect_string([$"|T], _Lno, Str) ->
+ {{cstring,1,lists:reverse(Str)},T};
+collect_string([H|T], Lno, Str) ->
+ collect_string(T, Lno, [H|Str]);
+collect_string([], _, _) ->
+ throw({error,missing_quote_at_eof}).
+
+%% <name> is letters digits hyphens.
+%% Hypen is not the last character. Hypen hyphen is NOT allowed.
+%%
+%% <identifier> ::= <lowercase> <name>
+
+get_name([$-,Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char,$-|L]);
+ get_name(T, [Char,$-|Acc]);
false ->
- {lists:reverse(L),[$-,Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([$-|T], L) ->
- {lists:reverse(L),[$-|T]};
-get_name([Char|T], L) ->
+get_name([$-|_]=T, Acc) ->
+ {list_to_atom(lists:reverse(Acc)),T};
+get_name([Char|T]=T0, Acc) ->
case isalnum(Char) of
true ->
- get_name(T,[Char|L]);
+ get_name(T, [Char|Acc]);
false ->
- {lists:reverse(L),[Char|T]}
+ {list_to_atom(lists:reverse(Acc)),T0}
end;
-get_name([], L) ->
- {lists:reverse(L), []}.
-
+get_name([], Acc) ->
+ {list_to_atom(lists:reverse(Acc)),[]}.
isalnum(H) when $A =< H , H =< $Z ->
true;
@@ -221,67 +207,54 @@ isdigit(H) when $0 =< H , H =< $9 ->
isdigit(_) ->
false.
-white_space(9) -> true;
-white_space(10) -> true;
-white_space(13) -> true;
-white_space(32) -> true;
-white_space(_) -> false.
-
-
-get_number([H|T], L) ->
+get_number([H|T]=T0, L) ->
case isdigit(H) of
true ->
get_number(T, [H|L]);
false ->
- {lists:reverse(L), [H|T]}
+ {lists:reverse(L), T0}
end;
get_number([], L) ->
{lists:reverse(L), []}.
-skip_comment([]) ->
- [];
-skip_comment([$-,$-|T]) ->
- T;
-skip_comment([_|T]) ->
- skip_comment(T).
-
+skip_comment([]) -> [];
+skip_comment("--"++T) -> T;
+skip_comment([_|T]) -> skip_comment(T).
-skip_multiline_comment(Stream,[],Lno,Level) ->
- case io:get_line(Stream,'') of
+skip_multiline_comment(Stream, [], Lno, Level) ->
+ case io:get_line(Stream, '') of
eof ->
- io:format("Tokeniser error on line: ~w~n"
- "premature end of multiline comment~n",[Lno]),
- exit(0);
+ throw({error,eof_in_comment});
Line ->
- skip_multiline_comment(Stream,Line,Lno+1,Level)
+ skip_multiline_comment(Stream, Line, Lno+1, Level)
end;
-skip_multiline_comment(_Stream,[$*,$/|T],Lno,0) ->
+skip_multiline_comment(_Stream, "*/"++T, Lno, 0) ->
{Lno,T};
-skip_multiline_comment(Stream,[$*,$/|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level - 1);
-skip_multiline_comment(Stream,[$/,$*|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level + 1);
-skip_multiline_comment(Stream,[_|T],Lno,Level) ->
- skip_multiline_comment(Stream,T,Lno,Level).
-
-collect_quoted([$',$B|T],Lno, L) ->
+skip_multiline_comment(Stream, "*/"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level - 1);
+skip_multiline_comment(Stream, "/*"++T, Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level + 1);
+skip_multiline_comment(Stream, [_|T], Lno, Level) ->
+ skip_multiline_comment(Stream, T, Lno, Level).
+
+collect_quoted("'B"++T, Lno, L) ->
case check_bin(L) of
true ->
- {{bstring,Lno, lists:reverse(L)}, T};
+ {{bstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_binary_number,lists:reverse(L)}})
end;
-collect_quoted([$',$H|T],Lno, L) ->
+collect_quoted("'H"++T, Lno, L) ->
case check_hex(L) of
true ->
- {{hstring,Lno, lists:reverse(L)}, T};
+ {{hstring,Lno,lists:reverse(L)}, T};
false ->
- throw({'ERR',{invalid_binary_number, lists:reverse(L)}})
+ throw({error,{invalid_hex_number,lists:reverse(L)}})
end;
collect_quoted([H|T], Lno, L) ->
collect_quoted(T, Lno,[H|L]);
collect_quoted([], _, _) -> % This should be allowed FIX later
- throw({'ERR',{eol_in_token}}).
+ throw({error,eol_in_token}).
check_bin([$0|T]) ->
check_bin(T);
@@ -351,7 +324,6 @@ reserved_word('INCLUDES') -> true;
reserved_word('INSTANCE') -> true;
reserved_word('INTEGER') -> true;
reserved_word('INTERSECTION') -> true;
-reserved_word('ISO646String') -> rstrtype;
reserved_word('MAX') -> true;
reserved_word('MIN') -> true;
reserved_word('MINUS-INFINITY') -> true;
diff --git a/lib/asn1/src/asn1ct_value.erl b/lib/asn1/src/asn1ct_value.erl
index 221cd991a7..c5901d5489 100644
--- a/lib/asn1/src/asn1ct_value.erl
+++ b/lib/asn1/src/asn1ct_value.erl
@@ -352,8 +352,9 @@ random_unnamed_bit_string(M, C) ->
%% end.
random(Upper) ->
- {A1,A2,A3} = erlang:now(),
- _ = random:seed(A1, A2, A3),
+ _ = random:seed(erlang:phash2([erlang:node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
random:uniform(Upper).
size_random(C) ->
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index b1b08aa9f9..ea5a0f857e 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -78,6 +78,7 @@ MODULES= \
testEnumExt \
testInfObjectClass \
testInfObj \
+ testInfObjExtract \
testParameterizedInfObj \
testFragmented \
testMergeCompile \
@@ -104,14 +105,19 @@ MODULES= \
test_compile_options \
testDoubleEllipses \
test_modified_x420 \
- testX420 \
test_x691 \
testWSParamClass \
+ testValueTest \
+ testUniqueObjectSets \
+ testRfcs \
+ testImporting \
+ testExtensibilityImplied \
asn1_test_lib \
asn1_app_test \
asn1_appup_test \
asn1_SUITE \
- error_SUITE
+ error_SUITE \
+ syntax_SUITE
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index 432197eec0..9dfcc3f571 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -52,9 +52,7 @@ all() ->
groups() ->
Parallel = asn1_test_lib:parallel(),
[{compile, Parallel,
- [c_syntax,
- c_string,
- c_implicit_before_choice,
+ [c_string,
constraint_equivalence]},
{ber, Parallel,
@@ -89,6 +87,7 @@ groups() ->
ber_other,
der,
h323test]},
+ testExtensibilityImplied,
testChoPrim,
testChoExtension,
testChoOptional,
@@ -135,19 +134,19 @@ groups() ->
testChoiceIndefinite,
per_open_type,
testInfObjectClass,
+ testUniqueObjectSets,
+ testInfObjExtract,
testParam,
testFragmented,
testMergeCompile,
testobj,
testDeepTConstr,
- testExport,
testImport,
testDER,
testDEFAULT,
testMvrasn6,
testContextSwitchingTypes,
testOpenTypeImplicitTag,
- duplicate_tags,
testROSE,
testINSTANCE_OF,
testTCAP,
@@ -158,16 +157,19 @@ groups() ->
testNortel,
% Uses 'PKCS7', 'InformationFramework'
{group, [], [test_WS_ParamClass,
- test_modified_x420,
- testX420]},
- testTcapsystem,
- testNBAPsystem,
- testS1AP,
+ test_modified_x420]},
+ %% Don't run all these at the same time.
+ {group, [],
+ [testTcapsystem,
+ testNBAPsystem,
+ testS1AP,
+ testRfcs]},
test_compile_options,
testDoubleEllipses,
test_x691,
ticket_6143,
- test_OTP_9688]},
+ test_OTP_9688,
+ testValueTest]},
{performance, [],
[testTimer_ber,
@@ -196,7 +198,7 @@ init_per_testcase(Func, Config) ->
true = code:add_patha(CaseDir),
Dog = case Func of
- testX420 -> ct:timetrap({minutes, 90});
+ testRfcs -> ct:timetrap({minutes, 90});
_ -> ct:timetrap({minutes, 60})
end,
[{case_dir, CaseDir}, {watchdog, Dog}|Config].
@@ -374,6 +376,12 @@ testExternal(Config, Rule, Opts) ->
testSetOfTag:main(Rule),
testSetTag:main(Rule).
+testExtensibilityImplied(Config) ->
+ test(Config, fun testExtensibilityImplied/3).
+testExtensibilityImplied(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ExtensibilityImplied", Config,
+ [Rule,no_ok_wrapper|Opts]),
+ testExtensibilityImplied:main().
testChoPrim(Config) -> test(Config, fun testChoPrim/3).
testChoPrim(Config, Rule, Opts) ->
@@ -561,39 +569,21 @@ testSetOfCho(Config, Rule, Opts) ->
asn1_test_lib:compile("SetOfCho", Config, [Rule|Opts]),
testSetOfCho:main(Rule).
-c_syntax(Config) ->
- DataDir = ?config(data_dir, Config),
- [{error, _} = asn1ct:compile(filename:join(DataDir, F))
- || F <-["Syntax",
- "BadTypeEnding",
- "BadValueAssignment1",
- "BadValueAssignment2",
- "BadValueSet",
- "ChoiceBadExtension",
- "EnumerationBadExtension",
- "Example",
- "Export1",
- "MissingEnd",
- "SequenceBadComma",
- "SequenceBadComponentName",
- "SequenceBadComponentType",
- "SeqBadComma"]].
-
c_string(Config) ->
test(Config, fun c_string/3).
c_string(Config, Rule, Opts) ->
asn1_test_lib:compile("String", Config, [Rule|Opts]),
asn1ct:test('String').
-c_implicit_before_choice(Config) ->
- test(Config, fun c_implicit_before_choice/3, [ber]).
-c_implicit_before_choice(Config, Rule, Opts) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, _R2} = asn1ct:compile(filename:join(DataDir, "CCSNARG3"),
- [Rule, {outdir, CaseDir}|Opts]).
-
constraint_equivalence(Config) ->
+ constraint_equivalence_abs(Config),
+ test(Config, fun constraint_equivalence/3).
+
+constraint_equivalence(Config, Rule, Opts) ->
+ M = 'ConstraintEquivalence',
+ asn1_test_lib:compile(M, Config, [Rule|Opts]).
+
+constraint_equivalence_abs(Config) ->
DataDir = ?config(data_dir, Config),
CaseDir = ?config(case_dir, Config),
Asn1Spec = "ConstraintEquivalence",
@@ -765,6 +755,16 @@ testInfObjectClass(Config, Rule, Opts) ->
testInfObjectClass:main(Rule),
testInfObj:main(Rule).
+testUniqueObjectSets(Config) -> test(Config, fun testUniqueObjectSets/3).
+testUniqueObjectSets(Config, Rule, Opts) ->
+ CaseDir = ?config(case_dir, Config),
+ testUniqueObjectSets:main(CaseDir, Rule, Opts).
+
+testInfObjExtract(Config) -> test(Config, fun testInfObjExtract/3).
+testInfObjExtract(Config, Rule, Opts) ->
+ asn1_test_lib:compile("InfObjExtract", Config, [Rule|Opts]),
+ testInfObjExtract:main().
+
testParam(Config) ->
test(Config, fun testParam/3, [ber,{ber,[der]},per,uper]).
testParam(Config, Rule, Opts) ->
@@ -804,18 +804,14 @@ testDeepTConstr(Config, Rule, Opts) ->
[Rule|Opts]),
testDeepTConstr:main(Rule).
-testExport(Config) ->
- {error, _} =
- asn1ct:compile(filename:join(?config(data_dir, Config),
- "IllegalExport"),
- [{outdir, ?config(case_dir, Config)}]).
-
testImport(Config) ->
test(Config, fun testImport/3).
testImport(Config, Rule, Opts) ->
- Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3"],
+ Files = ["ImportsFrom","ImportsFrom2","ImportsFrom3",
+ "Importing","Exporting"],
asn1_test_lib:compile_all(Files, Config, [Rule|Opts]),
42 = 'ImportsFrom':i(),
+ testImporting:main(),
ok.
testMegaco(Config) -> test(Config, fun testMegaco/3).
@@ -839,24 +835,20 @@ testContextSwitchingTypes(Config, Rule, Opts) ->
testTypeValueNotation(Config) -> test(Config, fun testTypeValueNotation/3).
testTypeValueNotation(Config, Rule, Opts) ->
- asn1_test_lib:compile_all(["SeqTypeRefPrim", "ValueTest"], Config,
- [Rule|Opts]),
+ asn1_test_lib:compile("SeqTypeRefPrim", Config, [Rule|Opts]),
testTypeValueNotation:main(Rule, Opts).
+testValueTest(Config) -> test(Config, fun testValueTest/3).
+testValueTest(Config, Rule, Opts) ->
+ asn1_test_lib:compile("ValueTest", Config, [Rule|Opts]),
+ testValueTest:main().
+
testOpenTypeImplicitTag(Config) ->
test(Config, fun testOpenTypeImplicitTag/3).
testOpenTypeImplicitTag(Config, Rule, Opts) ->
asn1_test_lib:compile("OpenTypeImplicitTag", Config, [Rule|Opts]),
testOpenTypeImplicitTag:main(Rule).
-duplicate_tags(Config) ->
- DataDir = ?config(data_dir, Config),
- CaseDir = ?config(case_dir, Config),
- {error, [{error, {type, _, _, 'SeqOpt1Imp',
- {asn1, {duplicates_of_the_tags, _}}}}]} =
- asn1ct:compile(filename:join(DataDir, "SeqOptional2"),
- [abs, {outdir, CaseDir}]).
-
rtUI(Config) -> test(Config, fun rtUI/3).
rtUI(Config, Rule, Opts) ->
asn1_test_lib:compile("Prim", Config, [Rule|Opts]),
@@ -990,13 +982,22 @@ testS1AP(Config, Rule, Opts) ->
ok
end.
+testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]).
+testRfcs(Config, Rule, Opts) ->
+ case erlang:system_info(system_architecture) of
+ "sparc-sun-solaris2.10" ->
+ {skip,"Too slow for an old Sparc"};
+ _ ->
+ testRfcs:compile(Config, Rule, Opts),
+ testRfcs:test()
+ end.
+
test_compile_options(Config) ->
ok = test_compile_options:wrong_path(Config),
ok = test_compile_options:path(Config),
ok = test_compile_options:noobj(Config),
ok = test_compile_options:record_name_prefix(Config),
- ok = test_compile_options:verbose(Config),
- ok = test_compile_options:warnings_as_errors(Config).
+ ok = test_compile_options:verbose(Config).
testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3).
testDoubleEllipses(Config, Rule, Opts) ->
@@ -1084,6 +1085,7 @@ test_modules() ->
"CommonDataTypes",
"Constraints",
"ContextSwitchingTypes",
+ "CoverParser",
"DS-EquipmentUser-CommonFunctionOrig-TransmissionPath",
"Enum",
"From",
@@ -1118,7 +1120,9 @@ test_modules() ->
"Def",
"Opt",
"ELDAPv3",
- "LDAP"].
+ "LDAP",
+ "SeqOptional2",
+ "CCSNARG3"].
test_OTP_9688(Config) ->
PrivDir = ?config(case_dir, Config),
diff --git a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn b/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
deleted file mode 100644
index 3ccd838ac0..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadTypeEnding.asn
+++ /dev/null
@@ -1,6 +0,0 @@
-BadTypeEnding DEFINITIONS ::=
-BEGIN
-
-T ::= Typ;
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
deleted file mode 100644
index a5d4984e60..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment1.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment1 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 integer ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
deleted file mode 100644
index 7a96406001..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueAssignment2.asn1
+++ /dev/null
@@ -1,8 +0,0 @@
-BadValueAssignment2 DEFINITIONS ::=
-BEGIN
-
-int INTEGER ::= 3
-
-int2 ::= 3
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1 b/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
deleted file mode 100644
index 68bd4380b7..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/BadValueSet.asn1
+++ /dev/null
@@ -1,9 +0,0 @@
-BadValueSet DEFINITIONS ::=
-BEGIN
-
-Int INTEGER ::= {1|2|3}
-
-Int2 INTEGER ::= {
- 1,2,3}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
index 23c1f32ceb..8932238adc 100644
--- a/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
+++ b/lib/asn1/test/asn1_SUITE_data/CCSNARG3.asn
@@ -3,7 +3,7 @@ BEGIN
CallCentreServiceNotificationArg ::= SEQUENCE {
scriptInformation [0] ScriptToScriptInformation,
- eventInformation [1] IMPLICIT EventInformation OPTIONAL
+ eventInformation [1] EventInformation OPTIONAL
}
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
index f6fe18be10..18473bae30 100644
--- a/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ChoExtension.asn1
@@ -41,10 +41,4 @@ ChoExt4 ::= CHOICE
str OCTET STRING
}
-ChoEmptyRoot ::= CHOICE {
- ...,
- bool BOOLEAN,
- int INTEGER (0..7)
-}
-
END
diff --git a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
deleted file mode 100644
index d0789d7414..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/ChoiceBadExtension.asn1
+++ /dev/null
@@ -1,27 +0,0 @@
-ChoiceBadExtension DEFINITIONS ::=
-BEGIN
-
-Seq ::= SEQUENCE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho1 ::= CHOICE {
- name PrintableString,
- ...,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-Cho2 ::= CHOICE {
- ...,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER
- }
-
-END
-
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1 b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
index 8b3d151502..648275dd66 100644
--- a/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ConstraintEquivalence.asn1
@@ -11,6 +11,10 @@ BEGIN
SingleValueX8 ::= INTEGER (integer42)
SingleValueX9 ::= INTEGER (integer42..integer42)
SingleValueX10 ::= INTEGER ((integer42) INTERSECTION (40..49))
+ SingleValueX11 ::= INTEGER (40..49) (integer42)
+ SingleValueX12 ::= INTEGER ((MIN..0) ^ (1..10) | integer42)
+ SingleValueX13 ::= INTEGER ((11..20) ^ (1..10) | integer42)
+ SingleValueX14 ::= INTEGER ((MIN..42) ^ (1..100) ^ (42..50))
UnconstrainedX0 ::= INTEGER
UnconstrainedX1 ::= INTEGER (MIN..MAX)
@@ -19,6 +23,10 @@ BEGIN
UnconstrainedX4 ::= INTEGER ((MIN..MAX)|9|10)
UnconstrainedX5 ::= INTEGER ((MIN..MAX)|10..20)
UnconstrainedX6 ::= INTEGER ((MIN..MAX) UNION (10..20))
+ UnconstrainedX7 ::= INTEGER ((MIN..MAX) ^ ((MIN..MAX) UNION (10..20)))
+ UnconstrainedX8 ::= INTEGER ((-100..MAX) ^ (42..MAX) | (MIN..41))
+ UnconstrainedX9 ::= INTEGER (UnconstrainedX0)
+ UnconstrainedX10 ::= INTEGER (UnconstrainedX0)(MIN..MAX)
RangeX00 ::= INTEGER (5..10)
RangeX01 ::= INTEGER (4<..<11)
@@ -38,22 +46,66 @@ BEGIN
RangeX16 ::= INTEGER ((5|6) UNION (7) UNION (7<..<11))
RangeX20 ::= INTEGER (0..20) (5..10)
- RangeX21 ::= INTEGER (0..10) (5..20)
- RangeX22 ::= INTEGER (0..10) (5..20) (MIN..MAX)
- RangeX23 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
- RangeX24 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX21 ::= INTEGER ((0..10) ^ (5..20))
+ RangeX22 ::= INTEGER ((0..10) ^ (5..20) ^ (MIN..MAX))
+ RangeX23 ::= INTEGER (MIN..MAX) (-100..20) (5..10)
+ RangeX24 ::= INTEGER (MIN..MAX) (0..100) (5..20) (5..10)
+ RangeX25 ::= INTEGER ((0..10) INTERSECTION (5..20) ^ (MIN..MAX))
+ RangeX26 ::= INTEGER ((5|6|7|8|9|10) INTERSECTION (5..20) ^ (MIN..MAX))
+
+ RangeX30 ::= INTEGER (((5|6) | (5..20)) ^ (0..10))
+ RangeX31 ::= INTEGER (((((5|6) | (5..20)) ^ (0..10))) ^ (MIN..MAX))
+ RangeX32 ::= INTEGER ((5|7) | (5..10))
+
+ Semi00 ::= INTEGER (0..MAX)
+ Semi01 ::= INTEGER (0..MAX) (MIN..MAX)
+ Semi02 ::= INTEGER ((0..100) UNION (200..MAX) UNION (50..1024))
+
+ RangeExtX00 ::= INTEGER (5..10, ...)
+ RangeExtX01 ::= INTEGER (0..20) (5..10, ...)
+ RangeExtX02 ::= INTEGER (RangeX26) (5..10, ...)
+-- RangeExtX03 ::= RangeX26 (5..10, ...)
+
+ MinRangeX00 ::= INTEGER (MIN..10)
+ MinRangeX01 ::= INTEGER ((MIN..0) | (0..10))
+ MinRangeX02 ::= INTEGER (MIN..MAX) (MIN..100) (MIN..10)
+ MinRangeX03 ::= INTEGER (((MIN..-100)|(-60..-50)) | (MIN..10))
+
+ DisjointRangeX00 ::= INTEGER (0..5 UNION 95..99)
+ DisjointRangeX01 ::= INTEGER (0|1|2|3|4|5|95|96|97|98|99)
+ DisjointRangeX02 ::= INTEGER (0..100) (0..2 UNION 95..99 UNION 3|4|5)
+ DisjointRangeX03 ::= INTEGER (MIN..MAX) (0..2 UNION 95..99 UNION 3|4|5)
+
+ MinDisjointRangeX00 ::= INTEGER (MIN..-100 UNION 100..1000)
+ MinDisjointRangeX01 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX02 ::= INTEGER (MIN..-50000 UNION 100..1000 UNION (MIN..-100))
+ MinDisjointRangeX03 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX04 ::= INTEGER (MIN..-100 UNION 100..1000 UNION (MIN..-1000000))
+ MinDisjointRangeX05 ::= INTEGER (MIN..-100 ^ (MIN..-100) UNION 100..1000)
+ MinDisjointRangeX06 ::= INTEGER (MIN..-100 ^ (MIN..0) UNION 100..1000)
UnconstrainedStringX00 ::= IA5String
UnconstrainedStringX01 ::= IA5String (SIZE (0..MAX))
+ UnconstrainedStringX02 ::= IA5String (SIZE (0..42|43..MAX))
ConstrainedStringX00 ::= IA5String (SIZE (0..5))
ConstrainedStringX01 ::= IA5String (SIZE (0|1|2|3|4|5))
+ StringExtFromX00 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX01 ::= IA5String (FROM ("AB", ..., "CD"))(SIZE (1..10, ..., 15..20))
+ StringExtFromX02 ::= IA5String ((FROM ("AB", ..., "CD")) ^ ((SIZE (1..10, ..., 15..20))))
+ StringExtFromX03 ::= IA5String ((FROM ("AB", ..., "CD")) ^ (SIZE (1..10, ..., 15..20)))
+ StringExtFromX04 ::= IA5String (StringExtFromX00)
+
-- Note: None of the back-ends care about the exact values
-- outside of the root range.
ExtConstrainedStringX00 ::= IA5String (SIZE (1..2, ...))
ExtConstrainedStringX01 ::= IA5String (SIZE (1|2, ..., 3))
ExtConstrainedStringX02 ::= IA5String (SIZE (1|2, ..., 3|4|5))
+ ExtConstrainedStringX03 ::= IA5String (SIZE (1|2, ..., 1|2|3|4|5))
+ ExtConstrainedStringX04 ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3|4|5))
+ ExtConstrainedStringX05 ::= IA5String (SIZE (1|2, ...), ...,
+ SIZE (1|2|3|4|5, ...))
integer4 INTEGER ::= 4
integer11 INTEGER ::= 11
diff --git a/lib/asn1/test/asn1_SUITE_data/Constraints.py b/lib/asn1/test/asn1_SUITE_data/Constraints.py
index 3495cd841b..a40c513141 100644
--- a/lib/asn1/test/asn1_SUITE_data/Constraints.py
+++ b/lib/asn1/test/asn1_SUITE_data/Constraints.py
@@ -81,7 +81,7 @@ maxNrOfCellPortionsPerCell-1 INTEGER ::= 35
CellPortionID ::= INTEGER (0..maxNrOfCellPortionsPerCell-1,...)
-- OTP-6763
-T ::= IA5String (SIZE (1|2, ..., SIZE (1|2|3))) -- Dubuisson 268
+T ::= IA5String (SIZE (1|2), ..., SIZE (1|2|3)) -- Dubuisson 268
T2 ::= IA5String (SIZE (1|2, ..., 3)) -- equal with T
-- OTP-8046
@@ -144,5 +144,47 @@ NonOverlapping ::= INTEGER (7280..7560 |
23000..24000 |
24960..26900)
+--
+-- Test INTEGER constraints from fields in objects.
+--
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+IntObjectConstr ::= INTEGER (int-holder-2.&obj.&id..int-holder-2.&id)
+
+--
+-- INTEGER constraints defined using named INTEGERs.
+--
+
+ConstrainedNamedInt ::= INTEGER {v1(42)} (v1)
+constrainedNamedInt-1 INTEGER {v1(42)} (v1) ::= 42
+constrainedNamedInt-2 ConstrainedNamedInt ::= 100
+
+SeqWithNamedInt ::= SEQUENCE {
+ int INTEGER {v2(7)} (v2)
+}
+
+--
+-- Cover simpletable constraint checking code.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {OCTET STRING IDENTIFIED BY {2 1 1 1 1 1 1}}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
END
diff --git a/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1 b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
new file mode 100644
index 0000000000..75d40188ca
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/CoverParser.asn1
@@ -0,0 +1,57 @@
+CoverParser DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Cho1 ::= CHOICE {
+ i INTEGER,
+ ... ! 42,
+ [[ b BOOLEAN ]]
+ }
+
+ Cho2 ::= CHOICE {
+ i INTEGER,
+ ...,
+ [[ b BOOLEAN,
+ s IA5String ]],
+ ...
+ }
+
+ Int1 ::= INTEGER (CONSTRAINED BY {INTEGER:1,INTEGER:2})
+
+ Seq1 ::= SEQUENCE {
+ ... ! INTEGER:1
+ }
+
+ Seq2 ::= SEQUENCE {
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq3 ::= SEQUENCE {
+ b BOOLEAN,
+ ... ! INTEGER:1,
+ i INTEGER
+ }
+
+ Seq4 ::= SEQUENCE {
+ a INTEGER OPTIONAL,
+ b OCTET STRING OPTIONAL
+ } (WITH COMPONENTS {a ABSENT, b OPTIONAL} |
+ WITH COMPONENTS {a PRESENT, b PRESENT})
+
+ SeqOf1 ::= SEQUENCE OF INTEGER
+ SeqOf2 ::= SeqOf1 (WITH COMPONENT (0..7))
+
+ SegOf3 ::= SEQUENCE (SIZE (1..10)) OF id INTEGER
+
+ Set1 ::= SET {
+ ... ! INTEGER:1
+ }
+
+ Set2 ::= SET {
+ ... ! INTEGER:1,
+ a INTEGER
+ }
+
+ SetOf3 ::= SET (SIZE (1..10)) OF id INTEGER
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
index e90cf55d61..846c3e7569 100644
--- a/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
+++ b/lib/asn1/test/asn1_SUITE_data/DoubleEllipses.asn
@@ -12,6 +12,15 @@ Seq ::= SEQUENCE
c BOOLEAN
}
+SeqV1 ::= SEQUENCE
+ {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
+
SeqV2 ::= SEQUENCE
{
a INTEGER,
@@ -50,6 +59,18 @@ SeqAltV2 ::= SEQUENCE
g INTEGER
}
+SeqDoubleEmpty1 ::= SEQUENCE {
+ ...,
+ ...
+}
+
+SeqDoubleEmpty2 ::= SEQUENCE {
+ a BOOLEAN,
+ b INTEGER OPTIONAL,
+ ...,
+ ...
+}
+
Set ::= SET {
a INTEGER,
...,
@@ -57,6 +78,14 @@ Set ::= SET {
c BOOLEAN
}
+
+SetV1 ::= SET {
+ a INTEGER,
+ ...,
+ b BOOLEAN,
+ ...
+ }
+
SetV2 ::= SET
{
a INTEGER,
@@ -96,4 +125,4 @@ SetAltV2 ::= SET
}
-END \ No newline at end of file
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1 b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
index 74fa97e7aa..55ad5a01a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/EnumExt.asn1
@@ -53,5 +53,7 @@ SeqBig ::= SEQUENCE {
i INTEGER
}
+EnumSkip ::= ENUMERATED {a(2), ..., b, c, d, e, f}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Example.asn1 b/lib/asn1/test/asn1_SUITE_data/Example.asn1
deleted file mode 100644
index 2639f63940..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Example.asn1
+++ /dev/null
@@ -1,20 +0,0 @@
-Example DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c Typ}
---ECLASS ::= CLASS {
--- &num INTEGER UNIQUE,
--- &Typo
--- } WITH SYNTAX {
--- &Typo DETERMINED BY &num
--- }
-
---v1 ECLASS ::= {INTEGER DETERMINED BY 12}
-
---v2 INTEGER ::= 13
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Export1.asn b/lib/asn1/test/asn1_SUITE_data/Export1.asn
deleted file mode 100644
index 78ead8f4d2..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Export1.asn
+++ /dev/null
@@ -1,7 +0,0 @@
-Export1 DEFINITIONS ::=
-BEGIN
-EXPORTS T
-
-T ::= Typ
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Exporting.asn1 b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
new file mode 100644
index 0000000000..e4f32f6788
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Exporting.asn1
@@ -0,0 +1,18 @@
+Exporting DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= SEQUENCE { id INTEGER, f BOOLEAN }
+ PtSeq{T} ::= SEQUENCE { a T }
+
+ CL ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+ } WITH SYNTAX {
+ ID &id TYPE &Type
+ }
+
+ obj CL ::= { ID 1 TYPE OCTET STRING }
+
+ pt-object{CL:ob} CL ::= {ID ob.&id TYPE OCTET STRING}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1 b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
new file mode 100644
index 0000000000..d59b0edda5
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/ExtensibilityImplied.asn1
@@ -0,0 +1,30 @@
+ExtensibilityImplied DEFINITIONS
+AUTOMATIC TAGS
+EXTENSIBILITY IMPLIED
+::=
+BEGIN
+
+Enum1 ::= ENUMERATED { root, ..., ext }
+Enum2 ::= ENUMERATED { root }
+
+Seq1 ::= SEQUENCE {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Seq2 ::= SEQUENCE {
+ b BOOLEAN
+}
+
+Set1 ::= SET {
+ b BOOLEAN,
+ ...,
+ i INTEGER
+}
+
+Set2 ::= SET {
+ b BOOLEAN
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1 b/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
deleted file mode 100644
index 1b5e42ad3c..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/IllegalExport.asn1
+++ /dev/null
@@ -1,7 +0,0 @@
-IllegalExport DEFINITIONS ::=
-BEGIN
-EXPORTS T, KalleAnka;
-
-T ::= INTEGER
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Importing.asn1 b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
new file mode 100644
index 0000000000..2f2699c576
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Importing.asn1
@@ -0,0 +1,20 @@
+Importing DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+ Seq ::= Exporting.PtSeq{ INTEGER(0..7) }
+ OtherSeq ::= Exporting.Seq
+
+ seq Exporting.Seq ::= { id 42, f TRUE }
+
+ o1 Exporting.CL ::= { ID 2 TYPE INTEGER (0..63) }
+
+ ObjSet Exporting.CL ::= { o1 | Exporting.obj }
+
+ ObjSeq ::= SEQUENCE {
+ id Exporting.CL.&id ({ObjSet}),
+ type Exporting.CL.&Type ({ObjSet}{@id})
+ }
+
+ o1-cloned Exporting.CL ::= Exporting.pt-object{o1}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObj.asn b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
index 719119f418..3b88770d78 100644
--- a/lib/asn1/test/asn1_SUITE_data/InfObj.asn
+++ b/lib/asn1/test/asn1_SUITE_data/InfObj.asn
@@ -206,7 +206,9 @@ ConstructedDefaultSet CONSTRUCTED-DEFAULT ::= {
{ &id 4, &Type SET { a INTEGER, b BIT STRING } } |
{ &id 5, &Type CHOICE { i INTEGER, b BIT STRING } } |
{ &id 6, &Type SEQUENCE OF INTEGER (1..16) } |
- { &id 7, &Type SET OF INTEGER (1..64) }
+ { &id 7, &Type SET OF INTEGER (1..64) } |
+ { &id 8, &Type SEQUENCE OF SEQUENCE { x INTEGER, y INTEGER } } |
+ { &id 9, &Type SET OF SEQUENCE { x INTEGER, y INTEGER } }
}
ConstructedPdu ::= SEQUENCE {
@@ -288,18 +290,196 @@ OstSeq1234 ::= ObjectSetTest{ {Ost1234} }
OstSeq45 ::= ObjectSetTest{ {Ost45} }
OstSeq12345 ::= ObjectSetTest{ {Ost12345} }
+OstSeq12Except ::= ObjectSetTest{ {Ost123 EXCEPT ost3} }
+OstSeq123Except ::= ObjectSetTest{ {Ost12345 EXCEPT Ost45} }
+
+ExOst1 OBJECT-SET-TEST ::= { ost1, ... }
ExOst12 OBJECT-SET-TEST ::= { ost1, ..., ost2 }
ExOst123 OBJECT-SET-TEST ::= { ost3, ..., ExOst12 }
---ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
+ExOst1234 OBJECT-SET-TEST ::= { ExOst123, ..., ost4 }
ExOst45 OBJECT-SET-TEST ::= { ost4, ..., ost5 }
ExOst12345 OBJECT-SET-TEST ::= { ExOst123, ..., ExOst45 }
+ExOstSeq1 ::= ObjectSetTest{ {ExOst1} }
ExOstSeq12 ::= ObjectSetTest{ {ExOst12} }
ExOstSeq123 ::= ObjectSetTest{ {ExOst123} }
---ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
+ExOstSeq1234 ::= ObjectSetTest{ {ExOst1234} }
ExOstSeq45 ::= ObjectSetTest{ {ExOst45} }
ExOstSeq12345 ::= ObjectSetTest{ {ExOst12345} }
-END
+ExOstSeq12Except ::= ObjectSetTest{ {ExOst123 EXCEPT ost3} }
+ExOstSeq123Except ::= ObjectSetTest{ {ExOst12345 EXCEPT ExOst45} }
+
+ExInlOst1 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...
+}
+ExInlOst12 OBJECT-SET-TEST ::= {
+ { 1 IS BIT STRING },
+ ...,
+ { 2 IS OCTET STRING }
+}
+
+ExInlOstSeq1 ::= ObjectSetTest{ {ExInlOst1} }
+ExInlOstSeq12 ::= ObjectSetTest{ {ExInlOst12} }
+
+--
+-- Test that extensions in a simple class works.
+--
+
+ExtClassSeq ::= SEQUENCE {
+ arg EXT-CLASS.&id({Extend})
+}
+
+EXT-CLASS ::= CLASS {
+ &id INTEGER UNIQUE
+} WITH SYNTAX {
+ ID &id
+}
+
+Extend EXT-CLASS ::= { { ID alt1 } | { ID alt2 }, ... }
+
+alt1 INTEGER ::= 4
+alt2 INTEGER ::= 5
+
+
+--
+-- Test a BIT STRING which is optional in the simplified syntax.
+--
+
+PUBLIC-KEY ::= CLASS {
+ &id INTEGER UNIQUE,
+ &keyUsage KeyUsage OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [OPTIONAL-BIT-STRING &keyUsage]
+}
+
+KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1),
+ keyEncipherment (2)
+ }
+
+object-with-optional-bit-string PUBLIC-KEY ::= {
+ IDENTIFIER 42
+ OPTIONAL-BIT-STRING {digitalSignature, nonRepudiation, keyEncipherment}
+}
+
+-- Test object identifiers from objects.
+
+CONTAINER ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &rid RELATIVE-OID OPTIONAL,
+ &Type OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIED BY &id
+ [REL-OID &rid]
+ [TYPE &Type]
+}
+
+id1 OBJECT IDENTIFIER ::= {1 2 42}
+obj1 CONTAINER ::= { IDENTIFIED BY id1 REL-OID {100 101} }
+
+value-2 OBJECT IDENTIFIER ::= { value-1 25 }
+value-1 OBJECT IDENTIFIER ::= obj1.&id
+value-3 RELATIVE-OID ::= obj1.&rid
+value-4 OBJECT IDENTIFIER ::= { 1 2 value-3 }
+
+
+-- Test an obscure issue when ATTRIBUTE.&id was not
+-- properly evaluated.
+
+Rdn ::= SingleAttribute { {SupportedAttributes} }
+
+ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+
+SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+}
+AttributeType ::= ATTRIBUTE.&id
+SupportedAttributes ATTRIBUTE ::= { at-name }
+
+id-at OBJECT IDENTIFIER ::= { 2 5 4 41 }
+id-at-name AttributeType ::= id-at
+at-name ATTRIBUTE ::= { &Type PrintableString, &id id-at-name }
+
+--
+-- Test using an alias for TYPE-IDENTIFIER.
+--
+
+TiAliasParameterized { TI-ALIAS:InfoObjectSet } ::= SEQUENCE {
+ algorithm TI-ALIAS.&id({InfoObjectSet}),
+ parameters TI-ALIAS.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+TI-ALIAS ::= TYPE-IDENTIFIER
+
+TiAliasSeq ::= SEQUENCE {
+ prf TiAliasParameterized {{TiAliasSet}}
+}
+
+TiAliasSet TI-ALIAS ::= {
+ {NULL IDENTIFIED BY {2 1 2}},
+ ...
+}
+
+--
+-- Test using an alias for a class.
+--
+
+ALIAS-CONTAINER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INDIRECT-CLASS
+}
+
+INDIRECTED-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+}
+
+INDIRECT-CLASS ::= INDIRECTED-CLASS
+
+--
+-- Indirect ObjectClassFieldType in a SEQUENCE.
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType, -- Indirect ObjectClassFieldType
+ content TYPE-IDENTIFIER.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+Contents TYPE-IDENTIFIER ::= {
+ {IA5String IDENTIFIED BY id-content-type}
+}
+
+ContentType ::= TYPE-IDENTIFIER.&id({Contents})
+id-content-type ContentType ::= { 2 7 8 9 }
+
+--
+-- Tricky parsing of simplified syntax.
+--
+
+TrickyType-1 ::= BIT STRING
+TrickyType-2 ::= OCTET STRING
+
+TRICKY ::= CLASS {
+ &Type1,
+ &Type2
+} WITH SYNTAX {
+ TYPE &Type1 &Type2
+}
+
+tricky-object TRICKY ::= {TYPE TrickyType-1 TrickyType-2}
+
+tricky-bit-string tricky-object.&Type1 ::= '1011'B
+tricky-octet-string tricky-object.&Type1 ::= 'CAFE'H
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1 b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
new file mode 100644
index 0000000000..13981b546d
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/InfObjExtract.asn1
@@ -0,0 +1,136 @@
+InfObjExtract DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+DATA-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Type
+} WITH SYNTAX {
+ ID &id
+ TYPE &Type
+}
+
+data-object-1 DATA-CLASS ::= { ID 1 TYPE BOOLEAN }
+data-object-2 DATA-CLASS ::= { ID 2 TYPE OCTET STRING }
+data-object-3 DATA-CLASS ::= { ID 3 TYPE BIT STRING }
+
+ObjSet DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+OBJ-SET DATA-CLASS ::= {
+ holder-object-1.&obj |
+ data-object-2 |
+ data-object-3,
+ ...
+}
+
+SingleElementSet DATA-CLASS ::= {
+ holder-object-1.&obj
+}
+
+holder-object-1 HOLDER-CLASS ::= {
+ OBJ data-object-1
+}
+
+holder-object-2 HOLDER-CLASS ::= {
+ OBJ-SET {data-object-1}
+}
+
+holder-object-3 HOLDER-CLASS ::= {
+ OBJ-SET {holder-object-2.&ObjSet}
+}
+
+-- Note: References to object sets with names in all uppercase/hyphens
+-- may be represented differently compared to object sets with names
+-- that contain lowercase letters. CAVEAT TESTOR.
+
+HOLDER-OBJECTS HOLDER-CLASS ::= { holder-object-2 }
+HolderObjects HOLDER-CLASS ::= { holder-object-3 }
+
+holder-object-4 HOLDER-CLASS ::= {
+ OBJ-SET { HOLDER-OBJECTS.&ObjSet }
+}
+
+holder-object-5 HOLDER-CLASS ::= {
+ OBJ-SET { HolderObjects.&ObjSet }
+}
+
+holder-object-6 HOLDER-CLASS ::= {
+ OBJ-SET { OBJ-SET }
+}
+
+holder-object-7 HOLDER-CLASS ::= {
+ OBJ-SET { ObjSet }
+}
+
+HOLDER-CLASS ::= CLASS {
+ &obj DATA-CLASS OPTIONAL,
+ &ObjSet DATA-CLASS OPTIONAL
+} WITH SYNTAX {
+ [OBJ &obj]
+ [OBJ-SET &ObjSet]
+}
+
+TestSeq{DATA-CLASS:ObjectSet} ::= SEQUENCE {
+ id DATA-CLASS.&id ({ObjectSet}),
+ data DATA-CLASS.&Type ({ObjectSet}{@id})
+}
+
+DataSeq-1 ::= TestSeq{ {ObjSet} }
+DataSeq-2 ::= TestSeq{ {holder-object-3.&ObjSet} }
+
+DataSeq-3 ::= TestSeq{ {holder-object-4.&ObjSet} }
+DataSeq-4 ::= TestSeq{ {holder-object-5.&ObjSet} }
+DataSeq-5 ::= TestSeq{ {holder-object-6.&ObjSet} }
+DataSeq-6 ::= TestSeq{ {holder-object-7.&ObjSet} }
+
+DataSeqSingleSet-1 ::= TestSeq{ {SingleElementSet} }
+DataSeqSingleSet-2 ::= TestSeq{ {holder-object-1.&obj} }
+
+--
+-- Test ObjectSetFromObjects.
+--
+
+OBJ-CLASS ::= CLASS {
+ &id INTEGER UNIQUE,
+ &Data OPTIONAL,
+ &Obj OBJ-CLASS OPTIONAL,
+ &obj OBJ-CLASS OPTIONAL
+}
+
+obj-class-obj-1 OBJ-CLASS ::= { &id 1, &Data BOOLEAN }
+
+obj-class-obj-2 OBJ-CLASS ::= { &id 2, &Data BOOLEAN,
+ &Obj {obj-class-obj-1} }
+
+obj-class-obj-3 OBJ-CLASS ::= { &id 3, &Data BOOLEAN,
+ &obj {&id 99, &Obj {obj-class-obj-1}} }
+
+obj-class-obj-4 OBJ-CLASS ::= { &id 4, &Data BOOLEAN, &obj obj-class-obj-2 }
+
+obj-class-obj-5 OBJ-CLASS ::= { &id 5, &Data BOOLEAN,
+ &Obj {obj-class-obj-4.&obj} }
+
+ObjClassSet OBJ-CLASS ::= { obj-class-obj-3.&obj.&Obj |
+ obj-class-obj-4.&Obj | -- Non-existing field
+ obj-class-obj-5.&Obj
+ }
+
+TestObjClassSeq{OBJ-CLASS:ObjectSet} ::= SEQUENCE {
+ id OBJ-CLASS.&id ({ObjectSet}),
+ data OBJ-CLASS.&Data ({ObjectSet}{@id})
+}
+
+ObjClassSeq-1 ::= TestObjClassSeq{{ObjClassSet}}
+
+--
+-- Test several levels of inlined definitions.
+--
+
+obj-class-obj-6 OBJ-CLASS ::= { &id 6, &Obj {{&id 100, &Data INTEGER}},
+ &Data INTEGER }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1 b/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
deleted file mode 100644
index 66912ef693..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/MissingEnd.asn1
+++ /dev/null
@@ -1,5 +0,0 @@
-MissingEnd DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
diff --git a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1 b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
index 9368e8dceb..9193ed495c 100644
--- a/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ObjIdValues.asn1
@@ -50,6 +50,7 @@ itu-t-o OBJECT IDENTIFIER ::= {itu-t recommendation o}
itu-t-p OBJECT IDENTIFIER ::= {itu-t recommendation p}
itu-t-q OBJECT IDENTIFIER ::= {itu-t recommendation q}
itu-t-r OBJECT IDENTIFIER ::= {itu-t recommendation r}
+itu-t-s OBJECT IDENTIFIER ::= {itu-t recommendation s}
itu-t-t OBJECT IDENTIFIER ::= {itu-t recommendation t}
itu-t-u OBJECT IDENTIFIER ::= {itu-t recommendation u}
itu-t-v OBJECT IDENTIFIER ::= {itu-t recommendation v}
diff --git a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1 b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
index 68fc782f33..d203b6c816 100644
--- a/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ParamBasic.asn1
@@ -42,4 +42,37 @@ SIGNATURE-ALGORITHM ::= CLASS {
KEY &id CONTAINING &Type
}
+alg-seq-1 AnAlgorithm ::= { algorithm 1, type 42 }
+alg-seq-2 AnAlgorithm ::= { algorithm 2, type TRUE }
+
+--
+-- Test that indirect classes references are resolved.
+--
+
+AlgorithmIdentifier2 { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+Seq ::= SEQUENCE {
+ c1 AlgorithmIdentifier2 {{ObjectSet-1}},
+ c2 AlgorithmIdentifier2 {{ObjectSet-2}}
+}
+
+ObjectSet-1 ALGORITHM-IDENTIFIER ::= { {INTEGER IDENTIFIED BY {2 1 1}}, ... }
+ObjectSet-2 ALGORITHM-IDENTIFIER ::= { ... }
+
+-- Test a value that uses the instantiation of a parameterized type inline.
+-- (Adapted from PKCS-5.)
+--
+
+algid-hmacWithSHA1 AlgorithmIdentifier2 {{ObjectSet-3}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+ObjectSet-3 TYPE-IDENTIFIER ::= { {NULL IDENTIFIED BY id-hmacWithSHA1} }
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {2 9 9 9 7}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
index cc0e61422a..b4c011fd39 100644
--- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
@@ -24,6 +24,8 @@ BEGIN
friday(5),saturday(6),sunday(7)}
SingleEnumVal ::= ENUMERATED {true}
SingleEnumValExt ::= ENUMERATED {true, ...}
+ NegEnumVal ::= ENUMERATED {neg(-1), ..., zero(0)}
+ EnumVal128 ::= ENUMERATED {val(128)}
ObjId ::= OBJECT IDENTIFIER
diff --git a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
index d7bfbf1788..6163f390dd 100644
--- a/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SelectionType.asn
@@ -14,7 +14,7 @@ Element ::= CHOICE {bool BOOLEAN,
utf UTF8String,
ro RELATIVE-OID,
nums NumericString,
- symbol PrintableString,
+ symbol PrintableString,
telet TeletexString,
t61 T61String,
video VideotexString,
@@ -23,13 +23,14 @@ Element ::= CHOICE {bool BOOLEAN,
generalizedTime GeneralizedTime,
gs GraphicString,
vs VisibleString,
--- iso64 ISO646String,
generalString GeneralString,
univ UniversalString,
cs CHARACTER STRING,
bmp BMPString}
-MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevTable ::= SEQUENCE OF symbol < Element
+MendeleyevSet ::= SET OF atomic-no < Element
+
BoolType ::= bool < Element
einsteinium symbol < Element ::= "Es"
@@ -51,7 +52,6 @@ utctimev utctime < Element ::= "9805281429Z"
gTime generalizedTime < Element ::= "19980528142905.1"
gsv gs < Element ::= "graphic"
vsv vs < Element ::= "visible"
---iso64v iso64 < Element ::= "iso"
gStringv generalString < Element ::= "general"
univv univ < Element ::= "Universal"
bmov bmp < Element ::= "bmp"
diff --git a/lib/asn1/test/asn1_SUITE_data/Seq.py b/lib/asn1/test/asn1_SUITE_data/Seq.py
index f345373ab5..b68f9045a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/Seq.py
+++ b/lib/asn1/test/asn1_SUITE_data/Seq.py
@@ -142,7 +142,10 @@ SeqImp3 ::= SET
set Set1
}
-
+SeqCompOf ::= SEQUENCE {
+ ...,
+ COMPONENTS OF SeqS3
+}
END
diff --git a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
index 7de9134096..bb85c9e418 100644
--- a/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/SeqOptional2.asn
@@ -15,10 +15,10 @@ SeqOpt1Imp ::= SEQUENCE
bool1 [1] BOOLEAN OPTIONAL,
int1 INTEGER,
seq1 [2] SeqIn OPTIONAL,
- seq2 [2] SeqIn OPTIONAL,
+ seq2 [3] SeqIn OPTIONAL,
...,
- int2 [3] SeqIn,
- int3 [3] SeqIn
+ int2 [4] SeqIn,
+ int3 [5] SeqIn
}
SeqOpt1Exp ::= SEQUENCE
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn b/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
deleted file mode 100644
index 436815aa9b..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComma.asn
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComma DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
deleted file mode 100644
index 8b2b8816db..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentName.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentName DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a INTEGER,
- C Typ}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1 b/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
deleted file mode 100644
index 0c33f48906..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/SequenceBadComponentType.asn1
+++ /dev/null
@@ -1,10 +0,0 @@
-SequenceBadComponentType DEFINITIONS ::=
-BEGIN
-
-T ::= Typ
-
-Typ ::= SEQUENCE {
- a b,
- c T}
-
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/Syntax.py b/lib/asn1/test/asn1_SUITE_data/Syntax.py
deleted file mode 100644
index 867d1148e1..0000000000
--- a/lib/asn1/test/asn1_SUITE_data/Syntax.py
+++ /dev/null
@@ -1,10 +0,0 @@
-Syntax DEFINITIONS IMPLICIT TAGS ::=
-BEGIN
-EXPORTS Person;
-
-Person ::= [PRIVATE 19] SEQUENCE {,
- name PrintableString,
- location INTEGER {home(0),field(1),roving(2)},
- age INTEGER OPTIONAL
- }
-END
diff --git a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
index dae9ae498a..b2c59d686a 100644
--- a/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
+++ b/lib/asn1/test/asn1_SUITE_data/ValueTest.asn
@@ -1,4 +1,4 @@
-ValueTest DEFINITIONS ::=
+ValueTest DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
@@ -23,8 +23,15 @@ vENUMERATED RadioButton ::= button1
vBS BSNNL ::= {zero,two}
vNULL NULL ::= NULL
vOS OCTET STRING ::= '313233'H
-vOD OBJECT IDENTIFIER ::= {2 1 1}
+-- OBJECT IDENTIFIER
+vOD OBJECT IDENTIFIER ::= {2 1 1}
+one INTEGER ::= 1
+integer-first OBJECT IDENTIFIER ::= {one 2}
+rel-oid-1 RELATIVE-OID ::= {2 4 5}
+include-roid OBJECT IDENTIFIER ::= {0 rel-oid-1}
+include-oid OBJECT IDENTIFIER ::= {integer-first 1}
+include-all OBJECT IDENTIFIER ::= {integer-first 1 rel-oid-1 42}
--Character strings
numericstring NumericString ::= "01234567"
@@ -41,7 +48,6 @@ objectdescriptor ObjectDescriptor ::= "ObjectDescriptor"
graphicstring GraphicString ::= "GraphicString"
generalstring GeneralString ::= "GeneralString"
bmpstring1 BMPString ::= "BMPString"
---bmpstring2 BMPString ::= [{0,0,0,66},{0,0,0,77},{0,0,0,80},{0,0,0,115},{0,0,0,116},{0,0,0,114},{0,0,0,105},{0,0,0,110},{0,0,0,103}]
latinCapitalLetterA UniversalString ::= {0,0,0,65}
greekCapitalLetterSigma UniversalString ::= {0,0,3,145}
my-universalstring UniversalString ::= {"This is a capital A: ",
@@ -50,4 +56,88 @@ my-universalstring UniversalString ::= {"This is a capital A: ",
greekCapitalLetterSigma,
"; try and spot the difference!"}
+-- Useful parameterized SEQUENCE.
+ParamSeq{Type} ::= SEQUENCE {
+ a Type
+}
+
+-- Integer values.
+IntegerSeq ::= ParamSeq{INTEGER}
+someInteger INTEGER ::= 42
+integerSeq1 IntegerSeq ::= { a otherInteger }
+otherInteger INTEGER ::= someInteger
+
+--
+-- Values from objects.
+--
+int-from-object-1 INTEGER ::= int-holder-2.&obj.&id
+int-from-object-2 INTEGER ::= int-holder-2.&id
+
+INT-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &obj INT-HOLDER OPTIONAL
+} WITH SYNTAX {
+ ID &id
+ [OBJ &obj]
+}
+
+int-holder-1 INT-HOLDER ::= { ID 2 }
+int-holder-2 INT-HOLDER ::= { ID 4 OBJ int-holder-1 }
+
+II ::= INTEGER (int-from-object-1..int-from-object-2)
+
+-- Recursive OCTET STRING definitions.
+
+OS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &os OCTET STRING
+} WITH SYNTAX {
+ ID &id OS &os
+}
+
+os-holder-1 OS-HOLDER ::= { ID 1 OS '4041FF'H }
+
+OctetStringSeq ::= ParamSeq{OCTET STRING}
+
+someOctetString OCTET STRING ::= '404142'H
+
+octetStringSeq1 OctetStringSeq ::= { a someOctetString }
+octetStringSeq2 OctetStringSeq ::= { a otherOctetString }
+octetStringSeq3 OctetStringSeq ::= { a os-holder-1.&os }
+
+otherOctetString OCTET STRING ::= someOctetString
+
+os-1 OCTET STRING ::= os-2
+os-2 OCTET STRING ::= os-holder-1.&os
+
+-- Recursive BIT STRING definitions.
+
+BS-HOLDER ::= CLASS {
+ &id INTEGER UNIQUE,
+ &bs BIT STRING,
+ &named-bs NamedBsType
+} WITH SYNTAX {
+ ID &id BS &bs NAMED-BS &named-bs
+}
+bs-holder-1 BS-HOLDER ::= { ID 1 BS '101'B NAMED-BS {a,c} }
+
+NamedBsType ::= BIT STRING {a(0),b(1),c(2)}
+BsSeq ::= SEQUENCE {
+ a BIT STRING,
+ b NamedBsType
+}
+
+someBitString BIT STRING ::= '101101'B
+
+bsSeq1 BsSeq ::= { a someBitString, b someNamedBs }
+bsSeq2 BsSeq ::= { a otherBitString, b someOtherNamedBs }
+bsSeq3 BsSeq ::= { a bs-holder-1.&bs, b bs-holder-1.&named-bs }
+
+otherBitString BIT STRING ::= someBitString
+bsFromObjectInd BIT STRING ::= bsFromObject
+bsFromObject BIT STRING ::= bs-holder-1.&bs
+
+someOtherNamedBs NamedBsType ::= someNamedBs
+someNamedBs NamedBsType ::= {c}
+
END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
index 3f1385323a..3f1385323a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ACSE-1.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ACSE-1.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
new file mode 100644
index 0000000000..f912966c72
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AlgorithmInformation-2009.asn1
@@ -0,0 +1,466 @@
+AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+EXPORTS ALL;
+IMPORTS
+
+KeyUsage
+FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-implicit-02(59)} ;
+
+-- Suggested prefixes for algorithm objects are:
+--
+-- mda- Message Digest Algorithms
+-- sa- Signature Algorithms
+-- kta- Key Transport Algorithms (Asymmetric)
+-- kaa- Key Agreement Algorithms (Asymmetric)
+-- kwa- Key Wrap Algorithms (Symmetric)
+-- kda- Key Derivation Algorithms
+-- maca- Message Authentication Code Algorithms
+-- pk- Public Key
+-- cea- Content (symmetric) Encryption Algorithms
+-- cap- S/MIME Capabilities
+
+ParamOptions ::= ENUMERATED {
+ required, -- Parameters MUST be encoded in structure
+ preferredPresent, -- Parameters SHOULD be encoded in structure
+ preferredAbsent, -- Parameters SHOULD NOT be encoded in structure
+ absent, -- Parameters MUST NOT be encoded in structure
+ inheritable, -- Parameters are inherited if not present
+ optional, -- Parameters MAY be encoded in the structure
+ ...
+}
+
+-- DIGEST-ALGORITHM
+--
+-- Describes the basic information for ASN.1 and a digest
+-- algorithm.
+--
+-- &id - contains the OID identifying the digest algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+--
+-- Additional information such as the length of the hash could have
+-- been encoded. Without a clear understanding of what information
+-- is needed by applications, such extraneous information was not
+-- considered to be of sufficent importance.
+--
+-- Example:
+-- mda-sha1 DIGEST-ALGORITHM ::= {
+-- IDENTIFIER id-sha1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- }
+
+DIGEST-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+}
+
+-- SIGNATURE-ALGORITHM
+--
+-- Describes the basic properties of a signature algorithm
+--
+-- &id - contains the OID identifying the signature algorithm
+-- &Value - contains a type definition for the value structure of
+-- the signature; if absent, implies that no ASN.1
+-- encoding is performed on the value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &HashSet - The set of hash algorithms used with this
+-- signature algorithm
+-- &PublicKeySet - the set of public key algorithms for this
+-- signature algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- sig-RSA-PSS SIGNATURE-ALGORITHM ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- PARAMS TYPE RSASSA-PSS-params ARE required
+-- HASHES { mda-sha1 | mda-md5, ... }
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+SIGNATURE-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Value OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &HashSet DIGEST-ALGORITHM OPTIONAL,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [VALUE &Value]
+ [PARAMS [TYPE &Params] ARE &paramPresence ]
+ [HASHES &HashSet]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- PUBLIC-KEY
+--
+-- Describes the basic properties of a public key
+--
+-- &id - contains the OID identifying the public key
+-- &KeyValue - contains the type for the key value
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyUsage - contains the set of bits that are legal for this
+-- key type. Note that is does not make any statement
+-- about how bits may be paired.
+-- &PrivateKey - contains a type structure for encoding the private
+-- key information.
+--
+-- Example:
+-- pk-rsa-pss PUBLIC-KEY ::= {
+-- IDENTIFIER id-RSASSA-PSS
+-- KEY RSAPublicKey
+-- PARAMS TYPE RSASSA-PSS-params ARE optional
+-- CERT-KEY-USAGE { .... }
+-- }
+
+PUBLIC-KEY ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &KeyValue OPTIONAL,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyUsage KeyUsage OPTIONAL,
+ &PrivateKey OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [KEY &KeyValue]
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [CERT-KEY-USAGE &keyUsage]
+ [PRIVATE-KEY &PrivateKey]
+}
+
+-- KEY-TRANSPORT
+--
+-- Describes the basic properties of a key transport algorithm
+--
+-- &id - contains the OID identifying the key transport algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kta-rsaTransport KEY-TRANSPORT ::= {
+-- IDENTIFIER &id
+-- PARAMS TYPE NULL ARE required
+-- PUBLIC-KEYS { pk-rsa | pk-rsa-pss }
+-- }
+
+KEY-TRANSPORT ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-AGREE
+--
+-- Describes the basic properties of a key agreement algorithm
+--
+-- &id - contains the OID identifying the key agreement algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &PublicKeySet - specifies which public keys are used with
+-- this algorithm
+-- &Ukm - type of user keying material used
+-- &ukmPresence - specifies the requirements to define the UKM field
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kaa-dh-static-ephemeral KEY-AGREE ::= {
+-- IDENTIFIER id-alg-ESDH
+-- PARAMS TYPE KeyWrapAlgorithm ARE required
+-- PUBLIC-KEYS {
+-- {IDENTIFIER dh-public-number KEY DHPublicKey
+-- PARAMS TYPE DHDomainParameters ARE inheritable }
+-- }
+-- - - UKM should be present but is not separately ASN.1-encoded
+-- UKM ARE preferredPresent
+-- }
+
+KEY-AGREE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &PublicKeySet PUBLIC-KEY OPTIONAL,
+ &Ukm OPTIONAL,
+ &ukmPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [PUBLIC-KEYS &PublicKeySet]
+ [UKM [TYPE &Ukm] ARE &ukmPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- KEY-WRAP
+--
+-- Describes the basic properties of a key wrap algorithm
+--
+-- &id - contains the OID identifying the key wrap algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kwa-cms3DESwrap KEY-WRAP ::= {
+-- IDENTIFIER id-alg-CMS3DESwrap
+-- PARAMS TYPE NULL ARE required
+-- }
+
+KEY-WRAP ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+-- KEY-DERIVATION
+--
+-- Describes the basic properties of a key derivation algorithm
+--
+-- &id - contains the OID identifying the key derivation algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- kda-pbkdf2 KEY-DERIVATION ::= {
+-- IDENTIFIER id-PBKDF2
+-- PARAMS TYPE PBKDF2-params ARE required
+-- }
+
+KEY-DERIVATION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- MAC-ALGORITHM
+--
+-- Describes the basic properties of a message
+-- authentication code (MAC) algorithm
+--
+-- &id - contains the OID identifying the MAC algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &keyed - MAC algorithm is a keyed MAC algorithm
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Some parameters that perhaps should have been added would be
+-- fields with the minimum and maximum MAC lengths for
+-- those MAC algorithms that allow truncations.
+--
+-- Example:
+-- maca-hmac-sha1 MAC-ALGORITHM ::= {
+-- IDENTIFIER hMAC-SHA1
+-- PARAMS TYPE NULL ARE preferredAbsent
+-- IS KEYED MAC TRUE
+-- SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+-- }
+
+MAC-ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &keyed BOOLEAN,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ IS-KEYED-MAC &keyed
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- CONTENT-ENCRYPTION
+--
+-- Describes the basic properties of a content encryption
+-- algorithm
+--
+-- &id - contains the OID identifying the content
+-- encryption algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- Example:
+-- cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+-- IDENTIFIER des-ede3-cbc
+-- PARAMS TYPE IV ARE required
+-- SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+-- }
+
+CONTENT-ENCRYPTION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- ALGORITHM
+--
+-- Describes a generic algorithm identifier
+--
+-- &id - contains the OID identifying the algorithm
+-- &Params - if present, contains the type for the algorithm
+-- parameters; if absent, implies no parameters
+-- &paramPresence - parameter presence requirement
+-- &smimeCaps - contains the object describing how the S/MIME
+-- capabilities are presented.
+--
+-- This would be used for cases where an algorithm of an unknown
+-- type is used. In general however, one should either define
+-- a more complete algorithm structure (such as the one above)
+-- or use the TYPE-IDENTIFIER class.
+
+ALGORITHM ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Params OPTIONAL,
+ &paramPresence ParamOptions DEFAULT absent,
+ &smimeCaps SMIME-CAPS OPTIONAL
+} WITH SYNTAX {
+ IDENTIFIER &id
+ [PARAMS [TYPE &Params] ARE &paramPresence]
+ [SMIME-CAPS &smimeCaps]
+}
+
+-- AlgorithmIdentifier
+--
+-- Provides the generic structure that is used to encode algorithm
+-- identification and the parameters associated with the
+-- algorithm.
+--
+-- The first parameter represents the type of the algorithm being
+-- used.
+-- The second parameter represents an object set containing the
+-- algorithms that may occur in this situation.
+-- The initial list of required algorithms should occur to the
+-- left of an extension marker; all other algorithms should
+-- occur to the right of an extension marker.
+--
+-- The object class ALGORITHM can be used for generic unspecified
+-- items.
+-- If new ALGORITHM classes are defined, the fields &id and &Params
+-- need to be present as fields in the object in order to use
+-- this parameterized type.
+--
+-- Example:
+-- SignatureAlgorithmIdentifier ::=
+-- AlgorithmIdentifier{SIGNATURE-ALGORITHM, {SignatureAlgSet}}
+
+AlgorithmIdentifier{ALGORITHM-TYPE, ALGORITHM-TYPE:AlgorithmSet} ::=
+ SEQUENCE {
+ algorithm ALGORITHM-TYPE.&id({AlgorithmSet}),
+ parameters ALGORITHM-TYPE.
+ &Params({AlgorithmSet}{@algorithm}) OPTIONAL
+ }
+
+-- S/MIME Capabilities
+--
+-- We have moved the SMIME-CAPS from the module for RFC 3851 to here
+-- because it is used in RFC 4262 (X.509 Certificate Extension for
+-- S/MIME Capabilities)
+--
+--
+-- This class is used to represent an S/MIME capability. S/MIME
+-- capabilities are used to represent what algorithm capabilities
+-- an individual has. The classic example was the content encryption
+-- algorithm RC2 where the algorithm id and the RC2 key lengths
+-- supported needed to be advertised, but the IV used is not fixed.
+-- Thus, for RC2 we used
+--
+-- cap-RC2CBC SMIME-CAPS ::= {
+-- TYPE INTEGER ( 40 | 128 ) IDENTIFIED BY rc2-cbc }
+--
+-- where 40 and 128 represent the RC2 key length in number of bits.
+--
+-- Another example where information needs to be shown is for
+-- RSA-OAEP where only specific hash functions or mask generation
+-- functions are supported, but the saltLength is specified by the
+-- sender and not the recipient. In this case, one can either
+-- generate a number of capability items,
+-- or a new S/MIME capability type could be generated where
+-- multiple hash functions could be specified.
+--
+--
+-- SMIME-CAP
+--
+-- This class is used to associate the type that describes the
+-- capabilities with the object identifier.
+--
+
+SMIME-CAPS ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL
+}
+WITH SYNTAX { [TYPE &Type] IDENTIFIED BY &id }
+
+--
+-- Generic type - this is used for defining values.
+--
+
+-- Define a single S/MIME capability encoding
+
+SMIMECapability{SMIME-CAPS:CapabilitySet} ::= SEQUENCE {
+ capabilityID SMIME-CAPS.&id({CapabilitySet}),
+ parameters SMIME-CAPS.&Type({CapabilitySet}
+ {@capabilityID}) OPTIONAL
+}
+
+-- Define a sequence of S/MIME capability values
+
+SMIMECapabilities { SMIME-CAPS:CapabilitySet } ::=
+ SEQUENCE SIZE (1..MAX) OF SMIMECapability{{CapabilitySet} }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
new file mode 100644
index 0000000000..46b431af40
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AttributeCertificateVersion1-2009.asn1
@@ -0,0 +1,59 @@
+ AttributeCertificateVersion1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SIGNATURE-ALGORITHM, ALGORITHM, AlgorithmIdentifier{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ CertificateSerialNumber, UniqueIdentifier, SIGNED{}
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttCertValidityPeriod, IssuerSerial
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) } ;
+
+ -- Definition extracted from X.509-1997 [X.509-97], but
+ -- different type names are used to avoid collisions.
+
+ AttributeCertificateV1 ::= SIGNED{AttributeCertificateInfoV1}
+
+ AttributeCertificateInfoV1 ::= SEQUENCE {
+ version AttCertVersionV1 DEFAULT v1,
+ subject CHOICE {
+ baseCertificateID [0] IssuerSerial,
+ -- associated with a Public Key Certificate
+ subjectName [1] GeneralNames },
+ -- associated with a name
+ issuer GeneralNames,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM, {...}},
+ serialNumber CertificateSerialNumber,
+ attCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF AttributeSet{{AttrList}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensionsV1}} OPTIONAL }
+
+ AttCertVersionV1 ::= INTEGER { v1(0) }
+
+ AttrList ATTRIBUTE ::= {...}
+ AttributeCertExtensionsV1 EXTENSION ::= {...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
index 5cfa9062f0..5cfa9062f0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/AuthenticationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/AuthenticationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
index d8b2b687ae..d8b2b687ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/BasicAccessControl.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/BasicAccessControl.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
index 0daf2208e9..0daf2208e9 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/CertificateExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CertificateExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
index 04060cf060..04060cf060 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
index aed48ac26b..aed48ac26b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
index 7ba5bf194a..7ba5bf194a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Character-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Character-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
index 24c7fafc38..24c7fafc38 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Colour-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Colour-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..3e350294be
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntax-2009.asn1
@@ -0,0 +1,463 @@
+ CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+ SignatureAlgs, MessageDigestAlgs, KeyAgreementAlgs,
+ MessageAuthAlgs, KeyWrapAlgs, ContentEncryptionAlgs,
+ KeyTransportAlgs, KeyDerivationAlgs, KeyAgreePublicKeys
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ Name, ATTRIBUTE
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-attribute-cert-02(47) }
+
+ AttributeCertificateV1
+ FROM AttributeCertificateVersion1-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-v1AttrCert-02(49) } ;
+
+ -- Cryptographic Message Syntax
+
+ -- The following are used for version numbers using the ASN.1
+ -- idiom "[[n:"
+ -- Version 1 = PKCS #7
+ -- Version 2 = S/MIME V2
+ -- Version 3 = RFC 2630
+ -- Version 4 = RFC 3369
+ -- Version 5 = RFC 3852
+
+ CONTENT-TYPE ::= TYPE-IDENTIFIER
+ ContentType ::= CONTENT-TYPE.&id
+
+ ContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.
+ &id({ContentSet}),
+ content [0] EXPLICIT CONTENT-TYPE.
+ &Type({ContentSet}{@contentType})}
+
+ ContentSet CONTENT-TYPE ::= {
+ -- Define the set of content types to be recognized.
+ ct-Data | ct-SignedData | ct-EncryptedData | ct-EnvelopedData |
+ ct-AuthenticatedData | ct-DigestedData, ... }
+
+ SignedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithms SET OF DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ certificates [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL,
+ signerInfos SignerInfos }
+
+ SignerInfos ::= SET OF SignerInfo
+
+ EncapsulatedContentInfo ::= SEQUENCE {
+ eContentType CONTENT-TYPE.&id({ContentSet}),
+ eContent [0] EXPLICIT OCTET STRING
+ ( CONTAINING CONTENT-TYPE.
+ &Type({ContentSet}{@eContentType})) OPTIONAL }
+
+ SignerInfo ::= SEQUENCE {
+ version CMSVersion,
+ sid SignerIdentifier,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ signedAttrs [0] IMPLICIT SignedAttributes OPTIONAL,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature SignatureValue,
+ unsignedAttrs [1] IMPLICIT Attributes
+ {{UnsignedAttributes}} OPTIONAL }
+
+ SignedAttributes ::= Attributes {{ SignedAttributesSet }}
+
+ SignerIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[3: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+
+ SignedAttributesSet ATTRIBUTE ::=
+ { aa-signingTime | aa-messageDigest | aa-contentType, ... }
+
+ UnsignedAttributes ATTRIBUTE ::= { aa-countersignature, ... }
+
+ SignatureValue ::= OCTET STRING
+
+ EnvelopedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{ UnprotectedAttributes }} OPTIONAL ]] }
+
+ OriginatorInfo ::= SEQUENCE {
+ certs [0] IMPLICIT CertificateSet OPTIONAL,
+ crls [1] IMPLICIT RevocationInfoChoices OPTIONAL }
+
+ RecipientInfos ::= SET SIZE (1..MAX) OF RecipientInfo
+
+ EncryptedContentInfo ::= SEQUENCE {
+ contentType CONTENT-TYPE.&id({ContentSet}),
+ contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ encryptedContent [0] IMPLICIT OCTET STRING OPTIONAL }
+
+ -- If you want to do constraints, you might use:
+ -- EncryptedContentInfo ::= SEQUENCE {
+ -- contentType CONTENT-TYPE.&id({ContentSet}),
+ -- contentEncryptionAlgorithm ContentEncryptionAlgorithmIdentifier,
+ -- encryptedContent [0] IMPLICIT ENCRYPTED {CONTENT-TYPE.
+ -- &Type({ContentSet}{@contentType}) OPTIONAL }
+ -- ENCRYPTED {ToBeEncrypted} ::= OCTET STRING ( CONSTRAINED BY
+ -- { ToBeEncrypted } )
+
+ UnprotectedAttributes ATTRIBUTE ::= { ... }
+
+ RecipientInfo ::= CHOICE {
+ ktri KeyTransRecipientInfo,
+ ...,
+ [[3: kari [1] KeyAgreeRecipientInfo ]],
+ [[4: kekri [2] KEKRecipientInfo]],
+ [[5: pwri [3] PasswordRecipientInfo,
+ ori [4] OtherRecipientInfo ]] }
+
+ EncryptedKey ::= OCTET STRING
+
+ KeyTransRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0 or 2
+ rid RecipientIdentifier,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-TRANSPORT, {KeyTransportAlgorithmSet}},
+ encryptedKey EncryptedKey }
+
+ KeyTransportAlgorithmSet KEY-TRANSPORT ::= { KeyTransportAlgs, ... }
+
+ RecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ ...,
+ [[2: subjectKeyIdentifier [0] SubjectKeyIdentifier ]] }
+ KeyAgreeRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 3
+ originator [0] EXPLICIT OriginatorIdentifierOrKey,
+ ukm [1] EXPLICIT UserKeyingMaterial OPTIONAL,
+ keyEncryptionAlgorithm AlgorithmIdentifier
+ {KEY-AGREE, {KeyAgreementAlgorithmSet}},
+ recipientEncryptedKeys RecipientEncryptedKeys }
+
+ KeyAgreementAlgorithmSet KEY-AGREE ::= { KeyAgreementAlgs, ... }
+
+ OriginatorIdentifierOrKey ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ subjectKeyIdentifier [0] SubjectKeyIdentifier,
+ originatorKey [1] OriginatorPublicKey }
+
+ OriginatorPublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {OriginatorKeySet}},
+ publicKey BIT STRING }
+
+ OriginatorKeySet PUBLIC-KEY ::= { KeyAgreePublicKeys, ... }
+
+ RecipientEncryptedKeys ::= SEQUENCE OF RecipientEncryptedKey
+
+ RecipientEncryptedKey ::= SEQUENCE {
+ rid KeyAgreeRecipientIdentifier,
+ encryptedKey EncryptedKey }
+
+ KeyAgreeRecipientIdentifier ::= CHOICE {
+ issuerAndSerialNumber IssuerAndSerialNumber,
+ rKeyId [0] IMPLICIT RecipientKeyIdentifier }
+
+ RecipientKeyIdentifier ::= SEQUENCE {
+ subjectKeyIdentifier SubjectKeyIdentifier,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+
+ SubjectKeyIdentifier ::= OCTET STRING
+
+ KEKRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 4
+ kekid KEKIdentifier,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ KEKIdentifier ::= SEQUENCE {
+ keyIdentifier OCTET STRING,
+ date GeneralizedTime OPTIONAL,
+ other OtherKeyAttribute OPTIONAL }
+ PasswordRecipientInfo ::= SEQUENCE {
+ version CMSVersion, -- always set to 0
+ keyDerivationAlgorithm [0] KeyDerivationAlgorithmIdentifier
+ OPTIONAL,
+ keyEncryptionAlgorithm KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey }
+
+ OTHER-RECIPIENT ::= TYPE-IDENTIFIER
+
+ OtherRecipientInfo ::= SEQUENCE {
+ oriType OTHER-RECIPIENT.
+ &id({SupportedOtherRecipInfo}),
+ oriValue OTHER-RECIPIENT.
+ &Type({SupportedOtherRecipInfo}{@oriType})}
+
+ SupportedOtherRecipInfo OTHER-RECIPIENT ::= { ... }
+
+ DigestedData ::= SEQUENCE {
+ version CMSVersion,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ encapContentInfo EncapsulatedContentInfo,
+ digest Digest, ... }
+
+ Digest ::= OCTET STRING
+
+ EncryptedData ::= SEQUENCE {
+ version CMSVersion,
+ encryptedContentInfo EncryptedContentInfo,
+ ...,
+ [[2: unprotectedAttrs [1] IMPLICIT Attributes
+ {{UnprotectedAttributes}} OPTIONAL ]] }
+
+ AuthenticatedData ::= SEQUENCE {
+ version CMSVersion,
+ originatorInfo [0] IMPLICIT OriginatorInfo OPTIONAL,
+ recipientInfos RecipientInfos,
+ macAlgorithm MessageAuthenticationCodeAlgorithm,
+ digestAlgorithm [1] DigestAlgorithmIdentifier OPTIONAL,
+ encapContentInfo EncapsulatedContentInfo,
+ authAttrs [2] IMPLICIT AuthAttributes OPTIONAL,
+ mac MessageAuthenticationCode,
+ unauthAttrs [3] IMPLICIT UnauthAttributes OPTIONAL }
+
+ AuthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{AuthAttributeSet}}
+
+ AuthAttributeSet ATTRIBUTE ::= { aa-contentType | aa-messageDigest
+ | aa-signingTime, ...}
+ MessageAuthenticationCode ::= OCTET STRING
+
+ UnauthAttributes ::= SET SIZE (1..MAX) OF Attribute
+ {{UnauthAttributeSet}}
+
+ UnauthAttributeSet ATTRIBUTE ::= {...}
+
+ --
+ -- General algorithm definitions
+ --
+
+ DigestAlgorithmIdentifier ::= AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {DigestAlgorithmSet}}
+
+ DigestAlgorithmSet DIGEST-ALGORITHM ::= {
+ CryptographicMessageSyntaxAlgorithms-2009.MessageDigestAlgs, ... }
+
+ SignatureAlgorithmIdentifier ::= AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM, {SignatureAlgorithmSet}}
+
+ SignatureAlgorithmSet SIGNATURE-ALGORITHM ::=
+ { SignatureAlgs, ... }
+
+ KeyEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-WRAP, {KeyEncryptionAlgorithmSet}}
+
+ KeyEncryptionAlgorithmSet KEY-WRAP ::= { KeyWrapAlgs, ... }
+
+ ContentEncryptionAlgorithmIdentifier ::= AlgorithmIdentifier
+ {CONTENT-ENCRYPTION, {ContentEncryptionAlgorithmSet}}
+
+ ContentEncryptionAlgorithmSet CONTENT-ENCRYPTION ::=
+ { ContentEncryptionAlgs, ... }
+
+ MessageAuthenticationCodeAlgorithm ::= AlgorithmIdentifier
+ {MAC-ALGORITHM, {MessageAuthenticationCodeAlgorithmSet}}
+
+ MessageAuthenticationCodeAlgorithmSet MAC-ALGORITHM ::=
+ { MessageAuthAlgs, ... }
+
+ KeyDerivationAlgorithmIdentifier ::= AlgorithmIdentifier
+ {KEY-DERIVATION, {KeyDerivationAlgs, ...}}
+
+ RevocationInfoChoices ::= SET OF RevocationInfoChoice
+
+ RevocationInfoChoice ::= CHOICE {
+ crl CertificateList,
+ ...,
+ [[5: other [1] IMPLICIT OtherRevocationInfoFormat ]] }
+
+ OTHER-REVOK-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevocationInfoFormat ::= SEQUENCE {
+ otherRevInfoFormat OTHER-REVOK-INFO.
+ &id({SupportedOtherRevokInfo}),
+ otherRevInfo OTHER-REVOK-INFO.
+ &Type({SupportedOtherRevokInfo}{@otherRevInfoFormat})}
+
+ SupportedOtherRevokInfo OTHER-REVOK-INFO ::= { ... }
+
+ CertificateChoices ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate,
+ -- Obsolete
+ ...,
+ [[3: v1AttrCert [1] IMPLICIT AttributeCertificateV1]],
+ -- Obsolete
+ [[4: v2AttrCert [2] IMPLICIT AttributeCertificateV2]],
+ [[5: other [3] IMPLICIT OtherCertificateFormat]] }
+
+ AttributeCertificateV2 ::= AttributeCertificate
+
+ OTHER-CERT-FMT ::= TYPE-IDENTIFIER
+
+ OtherCertificateFormat ::= SEQUENCE {
+ otherCertFormat OTHER-CERT-FMT.
+ &id({SupportedCertFormats}),
+ otherCert OTHER-CERT-FMT.
+ &Type({SupportedCertFormats}{@otherCertFormat})}
+
+ SupportedCertFormats OTHER-CERT-FMT ::= { ... }
+
+ CertificateSet ::= SET OF CertificateChoices
+
+ IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber }
+
+ CMSVersion ::= INTEGER { v0(0), v1(1), v2(2), v3(3), v4(4), v5(5) }
+
+ UserKeyingMaterial ::= OCTET STRING
+
+ KEY-ATTRIBUTE ::= TYPE-IDENTIFIER
+
+ OtherKeyAttribute ::= SEQUENCE {
+ keyAttrId KEY-ATTRIBUTE.
+
+ &id({SupportedKeyAttributes}),
+ keyAttr KEY-ATTRIBUTE.
+ &Type({SupportedKeyAttributes}{@keyAttrId})}
+
+ SupportedKeyAttributes KEY-ATTRIBUTE ::= { ... }
+
+ -- Content Type Object Identifiers
+
+ id-ct-contentInfo OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) smime(16) ct(1) 6 }
+
+ ct-Data CONTENT-TYPE ::= {OCTET STRING IDENTIFIED BY id-data}
+
+ id-data OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 1 }
+
+ ct-SignedData CONTENT-TYPE ::=
+ { SignedData IDENTIFIED BY id-signedData}
+
+ id-signedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 2 }
+
+ ct-EnvelopedData CONTENT-TYPE ::=
+ { EnvelopedData IDENTIFIED BY id-envelopedData}
+
+ id-envelopedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 3 }
+
+ ct-DigestedData CONTENT-TYPE ::=
+ { DigestedData IDENTIFIED BY id-digestedData}
+
+ id-digestedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 5 }
+
+ ct-EncryptedData CONTENT-TYPE ::=
+ { EncryptedData IDENTIFIED BY id-encryptedData}
+
+ id-encryptedData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs7(7) 6 }
+
+ ct-AuthenticatedData CONTENT-TYPE ::=
+ { AuthenticatedData IDENTIFIED BY id-ct-authData}
+
+ id-ct-authData OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) ct(1) 2 }
+
+ --
+ -- The CMS Attributes
+ --
+
+ MessageDigest ::= OCTET STRING
+
+ SigningTime ::= Time
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ Countersignature ::= SignerInfo
+
+ -- Attribute Object Identifiers
+
+ aa-contentType ATTRIBUTE ::=
+ { TYPE ContentType IDENTIFIED BY id-contentType }
+ id-contentType OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 3 }
+
+ aa-messageDigest ATTRIBUTE ::=
+ { TYPE MessageDigest IDENTIFIED BY id-messageDigest}
+ id-messageDigest OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 4 }
+
+ aa-signingTime ATTRIBUTE ::=
+ { TYPE SigningTime IDENTIFIED BY id-signingTime }
+ id-signingTime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 5 }
+
+ aa-countersignature ATTRIBUTE ::=
+ { TYPE Countersignature IDENTIFIED BY id-countersignature }
+ id-countersignature OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 6 }
+
+ --
+ -- Obsolete Extended Certificate syntax from PKCS#6
+ --
+
+ ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate,
+ extendedCertificate [0] IMPLICIT ExtendedCertificate }
+
+ ExtendedCertificate ::= SEQUENCE {
+ extendedCertificateInfo ExtendedCertificateInfo,
+ signatureAlgorithm SignatureAlgorithmIdentifier,
+ signature Signature }
+
+ ExtendedCertificateInfo ::= SEQUENCE {
+ version CMSVersion,
+ certificate Certificate,
+ attributes UnauthAttributes }
+
+ Signature ::= BIT STRING
+
+ Attribute{ ATTRIBUTE:AttrList } ::= SEQUENCE {
+ attrType ATTRIBUTE.
+ &id({AttrList}),
+ attrValues SET OF ATTRIBUTE.
+ &Type({AttrList}{@attrType}) }
+
+ Attributes { ATTRIBUTE:AttrList } ::=
+ SET SIZE (1..MAX) OF Attribute {{ AttrList }}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
new file mode 100644
index 0000000000..72e8b270db
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/CryptographicMessageSyntaxAlgorithms-2009.asn1
@@ -0,0 +1,248 @@
+ CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ ParamOptions, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY, KEY-DERIVATION, KEY-WRAP, MAC-ALGORITHM,
+ KEY-AGREE, KEY-TRANSPORT, CONTENT-ENCRYPTION, ALGORITHM,
+ AlgorithmIdentifier{}, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ pk-rsa, pk-dh, pk-dsa, rsaEncryption, DHPublicKey, dhpublicnumber
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ cap-RC2CBC
+ FROM SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)};
+
+ -- 2. Hash algorithms in this document
+
+ MessageDigestAlgs DIGEST-ALGORITHM ::= {
+ -- mda-md5 | mda-sha1,
+ ... }
+
+ -- 3. Signature algorithms in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ -- See RFC 3279
+ -- sa-dsaWithSHA1 | sa-rsaWithMD5 | sa-rsaWithSHA1,
+ ... }
+
+ -- 4. Key Management Algorithms
+ -- 4.1 Key Agreement Algorithms
+
+ KeyAgreementAlgs KEY-AGREE ::= { kaa-esdh | kaa-ssdh, ...}
+ KeyAgreePublicKeys PUBLIC-KEY ::= { pk-dh, ...}
+
+ -- 4.2 Key Transport Algorithms
+
+ KeyTransportAlgs KEY-TRANSPORT ::= { kt-rsa, ... }
+
+ -- 4.3 Symmetric Key-Encryption Key Algorithms
+
+ KeyWrapAlgs KEY-WRAP ::= { kwa-3DESWrap | kwa-RC2Wrap, ... }
+
+ -- 4.4 Key Derivation Algorithms
+
+ KeyDerivationAlgs KEY-DERIVATION ::= { kda-PBKDF2, ... }
+
+ -- 5. Content Encryption Algorithms
+
+ ContentEncryptionAlgs CONTENT-ENCRYPTION ::=
+ { cea-3DES-cbc | cea-RC2-cbc, ... }
+
+ -- 6. Message Authentication Code Algorithms
+
+ MessageAuthAlgs MAC-ALGORITHM ::= { maca-hMAC-SHA1, ... }
+
+ -- S/MIME Capabilities for these items
+
+ SMimeCaps SMIME-CAPS ::= {
+ kaa-esdh.&smimeCaps |
+ kaa-ssdh.&smimeCaps |
+ kt-rsa.&smimeCaps |
+ kwa-3DESWrap.&smimeCaps |
+ kwa-RC2Wrap.&smimeCaps |
+ cea-3DES-cbc.&smimeCaps |
+ cea-RC2-cbc.&smimeCaps |
+ maca-hMAC-SHA1.&smimeCaps,
+ ...}
+
+ --
+ --
+ --
+
+ -- Algorithm Identifiers
+
+ -- rsaEncryption OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ -- us(840) rsadsi(113549) pkcs(1) pkcs-1(1) 1 }
+
+ id-alg-ESDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 5 }
+
+ id-alg-SSDH OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 10 }
+
+ id-alg-CMS3DESwrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 6 }
+
+ id-alg-CMSRC2wrap OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) alg(3) 7 }
+
+ des-ede3-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) encryptionAlgorithm(3) 7 }
+
+ rc2-cbc OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) encryptionAlgorithm(3) 2 }
+
+ hMAC-SHA1 OBJECT IDENTIFIER ::= { iso(1) identified-organization(3)
+ dod(6) internet(1) security(5) mechanisms(5) 8 1 2 }
+
+ id-PBKDF2 OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-5(5) 12 }
+
+ -- Algorithm Identifier Parameter Types
+
+ KeyWrapAlgorithm ::=
+ AlgorithmIdentifier {KEY-WRAP, {KeyWrapAlgs }}
+
+ RC2wrapParameter ::= RC2ParameterVersion
+ RC2ParameterVersion ::= INTEGER
+
+ CBCParameter ::= IV
+
+ IV ::= OCTET STRING -- exactly 8 octets
+
+ RC2CBCParameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER (1..256),
+ iv OCTET STRING } -- exactly 8 octets
+
+ maca-hMAC-SHA1 MAC-ALGORITHM ::= {
+ IDENTIFIER hMAC-SHA1
+ PARAMS TYPE NULL ARE preferredAbsent
+ IS-KEYED-MAC TRUE
+ SMIME-CAPS {IDENTIFIED BY hMAC-SHA1}
+ }
+
+ PBKDF2-PRFsAlgorithmIdentifier ::= AlgorithmIdentifier{ ALGORITHM,
+ {PBKDF2-PRFs} }
+
+ alg-hMAC-SHA1 ALGORITHM ::=
+ { IDENTIFIER hMAC-SHA1 PARAMS TYPE NULL ARE required }
+
+ PBKDF2-PRFs ALGORITHM ::= { alg-hMAC-SHA1, ... }
+
+ PBKDF2-SaltSources ALGORITHM ::= { ... }
+
+ PBKDF2-SaltSourcesAlgorithmIdentifier ::=
+ AlgorithmIdentifier {ALGORITHM, {PBKDF2-SaltSources}}
+
+ defaultPBKDF2 PBKDF2-PRFsAlgorithmIdentifier ::=
+ { algorithm alg-hMAC-SHA1.&id, parameters NULL:NULL }
+
+ PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource PBKDF2-SaltSourcesAlgorithmIdentifier },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf PBKDF2-PRFsAlgorithmIdentifier DEFAULT
+ defaultPBKDF2
+ }
+
+ --
+ -- This object is included for completeness. It should not be used
+ -- for encoding of signatures, but was sometimes used in older
+ -- versions of CMS for encoding of RSA signatures.
+ --
+ --
+ -- sa-rsa SIGNATURE-ALGORITHM ::= {
+ -- IDENTIFIER rsaEncryption
+ -- - - value is not ASN.1 encoded
+ -- PARAMS TYPE NULL ARE required
+ -- HASHES {mda-sha1 | mda-md5, ...}
+ -- PUBLIC-KEYS { pk-rsa}
+ -- }
+ --
+ -- No ASN.1 encoding is applied to the signature value
+ -- for these items
+
+ kaa-esdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-ESDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS { pk-dh }
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-ESDH}
+ }
+
+ kaa-ssdh KEY-AGREE ::= {
+ IDENTIFIER id-alg-SSDH
+ PARAMS TYPE KeyWrapAlgorithm ARE required
+ PUBLIC-KEYS {pk-dh}
+ -- UKM is not ASN.1 encoded
+ UKM ARE optional
+ SMIME-CAPS {TYPE KeyWrapAlgorithm IDENTIFIED BY id-alg-SSDH}
+ }
+
+ dh-public-number OBJECT IDENTIFIER ::= dhpublicnumber
+
+ pk-originator-dh PUBLIC-KEY ::= {
+ IDENTIFIER dh-public-number
+ KEY DHPublicKey
+ PARAMS ARE absent
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly}
+ }
+
+ kwa-3DESWrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMS3DESwrap
+ PARAMS TYPE NULL ARE required
+ SMIME-CAPS {IDENTIFIED BY id-alg-CMS3DESwrap}
+ }
+
+ kwa-RC2Wrap KEY-WRAP ::= {
+ IDENTIFIER id-alg-CMSRC2wrap
+ PARAMS TYPE RC2wrapParameter ARE required
+ SMIME-CAPS { IDENTIFIED BY id-alg-CMSRC2wrap }
+ }
+
+ kda-PBKDF2 KEY-DERIVATION ::= {
+ IDENTIFIER id-PBKDF2
+ PARAMS TYPE PBKDF2-params ARE required
+ -- No S/MIME caps defined
+ }
+
+ cea-3DES-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER des-ede3-cbc
+ PARAMS TYPE IV ARE required
+ SMIME-CAPS { IDENTIFIED BY des-ede3-cbc }
+ }
+
+ cea-RC2-cbc CONTENT-ENCRYPTION ::= {
+ IDENTIFIER rc2-cbc
+ PARAMS TYPE RC2CBCParameter ARE required
+ SMIME-CAPS cap-RC2CBC
+ }
+
+ kt-rsa KEY-TRANSPORT ::= {
+ IDENTIFIER rsaEncryption
+ PARAMS TYPE NULL ARE required
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY rsaEncryption}
+ }
+
+ -- S/MIME Capabilities - most have no label.
+
+ cap-3DESwrap SMIME-CAPS ::= { IDENTIFIED BY id-alg-CMS3DESwrap }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
index cd3330dc56..cd3330dc56 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DOR-definition.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DOR-definition.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
index df5e8489ea..df5e8489ea 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DSAOperationalAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DSAOperationalAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
index ef1187ba8c..ef1187ba8c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Default-Value-Lists.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Default-Value-Lists.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
index 5a5d310729..5a5d310729 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
index 10d6979f6d..10d6979f6d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
index 91c0a865f7..91c0a865f7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryInformationShadowProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryInformationShadowProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
index e3e1f95621..e3e1f95621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingManagementProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingManagementProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
index 9df5d2783a..9df5d2783a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryOperationalBindingTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryOperationalBindingTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
index 37c6cac261..37c6cac261 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
index acbb692b6f..acbb692b6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectoryShadowAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectoryShadowAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
index cace79d109..cace79d109 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DirectorySystemProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DirectorySystemProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
index 72e791f10c..72e791f10c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/DistributedOperations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/DistributedOperations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
index d8c15b7afa..d8c15b7afa 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Document-Profile-Descriptor.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Document-Profile-Descriptor.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
index 9991a59454..9991a59454 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/EnhancedSecurity.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnhancedSecurity.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
new file mode 100644
index 0000000000..17a45a0a6b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/EnrollmentMessageSyntax-2009.asn1
@@ -0,0 +1,543 @@
+ EnrollmentMessageSyntax-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0) id-mod-cmc2002-02(53)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ EXPORTS ALL;
+ IMPORTS
+
+ AttributeSet{}, Extension{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, KEY-WRAP, KEY-DERIVATION,
+ MAC-ALGORITHM, SIGNATURE-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertificateSerialNumber, GeneralName, CRLReason, ReasonFlags,
+ CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, id-pkix, PublicKeyAlgorithms, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ ContentInfo, IssuerAndSerialNumber, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ CertReqMsg, PKIPublicationInfo, CertTemplate
+ FROM PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ kda-PBKDF2, maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha256
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54) } ;
+
+ -- CMS Content types defined in this document
+ CMC-ContentTypes CONTENT-TYPE ::= { ct-PKIData | ct-PKIResponse, ... }
+
+ -- Signature Algorithms defined in this document
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-noSignature }
+
+ -- CMS Unsigned Attributes
+
+ CMC-UnsignedAtts ATTRIBUTE ::= { aa-cmc-unsignedData }
+
+ --
+ --
+
+ id-cmc OBJECT IDENTIFIER ::= {id-pkix 7} -- CMC controls
+ id-cct OBJECT IDENTIFIER ::= {id-pkix 12} -- CMC content types
+
+ -- This is the content type for a request message in the protocol
+
+ ct-PKIData CONTENT-TYPE ::=
+ { PKIData IDENTIFIED BY id-cct-PKIData }
+ id-cct-PKIData OBJECT IDENTIFIER ::= { id-cct 2 }
+
+ PKIData ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ reqSequence SEQUENCE SIZE(0..MAX) OF TaggedRequest,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ BodyPartID ::= INTEGER(0..4294967295)
+
+ TaggedAttribute ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ attrType CMC-CONTROL.&id({Cmc-Control-Set}),
+ attrValues SET OF CMC-CONTROL.
+ &Type({Cmc-Control-Set}{@attrType})
+ }
+
+ Cmc-Control-Set CMC-CONTROL ::= {
+ cmc-identityProof | cmc-dataReturn | cmc-regInfo |
+ cmc-responseInfo | cmc-queryPending | cmc-popLinkRandom |
+ cmc-popLinkWitness | cmc-identification | cmc-transactionId |
+ cmc-senderNonce | cmc-recipientNonce | cmc-statusInfo |
+ cmc-addExtensions | cmc-encryptedPOP | cmc-decryptedPOP |
+ cmc-lraPOPWitness | cmc-getCert | cmc-getCRL |
+ cmc-revokeRequest | cmc-confirmCertAcceptance |
+ cmc-statusInfoV2 | cmc-trustedAnchors | cmc-authData |
+ cmc-batchRequests | cmc-batchResponses | cmc-publishCert |
+ cmc-modCertTemplate | cmc-controlProcessed |
+ cmc-identityProofV2 | cmc-popLinkWitnessV2, ... }
+
+ OTHER-REQUEST ::= TYPE-IDENTIFIER
+
+ -- We do not define any other requests in this document;
+ -- examples might be attribute certification requests
+
+ OtherRequests OTHER-REQUEST ::= {...}
+
+ TaggedRequest ::= CHOICE {
+ tcr [0] TaggedCertificationRequest,
+ crm [1] CertReqMsg,
+ orm [2] SEQUENCE {
+ bodyPartID BodyPartID,
+ requestMessageType OTHER-REQUEST.&id({OtherRequests}),
+ requestMessageValue OTHER-REQUEST.&Type({OtherRequests}
+ {@.requestMessageType})
+ }
+ }
+
+ TaggedCertificationRequest ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ certificationRequest CertificationRequest
+ }
+
+ AttributeList ATTRIBUTE ::= {at-extension-req, ...}
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo SEQUENCE {
+ version INTEGER,
+ subject Name,
+ subjectPublicKeyInfo SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING
+ },
+ attributes [0] IMPLICIT SET OF
+ AttributeSet{{AttributeList}}
+ },
+ signatureAlgorithm AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING
+ }
+
+ TaggedContentInfo ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ contentInfo ContentInfo
+ }
+
+ OTHER-MSG ::= TYPE-IDENTIFIER
+
+ -- No other messages currently defined
+
+ OtherMsgSet OTHER-MSG ::= {...}
+
+ OtherMsg ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ otherMsgType OTHER-MSG.&id({OtherMsgSet}),
+ otherMsgValue OTHER-MSG.&Type({OtherMsgSet}{@otherMsgType}) }
+
+ -- This defines the response message in the protocol
+
+ ct-PKIResponse CONTENT-TYPE ::=
+ { PKIResponse IDENTIFIED BY id-cct-PKIResponse }
+ id-cct-PKIResponse OBJECT IDENTIFIER ::= { id-cct 3 }
+
+ ResponseBody ::= PKIResponse
+
+ PKIResponse ::= SEQUENCE {
+ controlSequence SEQUENCE SIZE(0..MAX) OF TaggedAttribute,
+ cmsSequence SEQUENCE SIZE(0..MAX) OF TaggedContentInfo,
+ otherMsgSequence SEQUENCE SIZE(0..MAX) OF OtherMsg
+ }
+
+ CMC-CONTROL ::= TYPE-IDENTIFIER
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-identityProof CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-identityProof }
+ id-cmc-identityProof OBJECT IDENTIFIER ::= {id-cmc 3}
+
+ cmc-dataReturn CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-dataReturn }
+ id-cmc-dataReturn OBJECT IDENTIFIER ::= {id-cmc 4}
+
+ cmc-regInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-regInfo }
+ id-cmc-regInfo OBJECT IDENTIFIER ::= {id-cmc 18}
+
+ cmc-responseInfo CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-responseInfo }
+ id-cmc-responseInfo OBJECT IDENTIFIER ::= {id-cmc 19}
+
+ cmc-queryPending CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-queryPending }
+ id-cmc-queryPending OBJECT IDENTIFIER ::= {id-cmc 21}
+
+ cmc-popLinkRandom CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkRandom }
+ id-cmc-popLinkRandom OBJECT IDENTIFIER ::= {id-cmc 22}
+
+ cmc-popLinkWitness CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-popLinkWitness }
+ id-cmc-popLinkWitness OBJECT IDENTIFIER ::= {id-cmc 23}
+
+ -- The following controls have the type UTF8String
+
+ cmc-identification CMC-CONTROL ::=
+ { UTF8String IDENTIFIED BY id-cmc-identification }
+ id-cmc-identification OBJECT IDENTIFIER ::= {id-cmc 2}
+
+ -- The following controls have the type INTEGER
+
+ cmc-transactionId CMC-CONTROL ::=
+ { INTEGER IDENTIFIED BY id-cmc-transactionId }
+ id-cmc-transactionId OBJECT IDENTIFIER ::= {id-cmc 5}
+
+ -- The following controls have the type OCTET STRING
+
+ cmc-senderNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-senderNonce }
+
+ id-cmc-senderNonce OBJECT IDENTIFIER ::= {id-cmc 6}
+
+ cmc-recipientNonce CMC-CONTROL ::=
+ { OCTET STRING IDENTIFIED BY id-cmc-recipientNonce }
+ id-cmc-recipientNonce OBJECT IDENTIFIER ::= {id-cmc 7}
+
+ -- Used to return status in a response
+
+ cmc-statusInfo CMC-CONTROL ::=
+ { CMCStatusInfo IDENTIFIED BY id-cmc-statusInfo }
+ id-cmc-statusInfo OBJECT IDENTIFIER ::= {id-cmc 1}
+
+ CMCStatusInfo ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF BodyPartID,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo
+ } OPTIONAL
+ }
+
+ PendInfo ::= SEQUENCE {
+ pendToken OCTET STRING,
+ pendTime GeneralizedTime
+ }
+
+ CMCStatus ::= INTEGER {
+ success (0),
+ failed (2),
+ pending (3),
+ noSupport (4),
+ confirmRequired (5),
+ popRequired (6),
+ partial (7)
+ }
+
+ -- Note:
+ -- The spelling of unsupportedExt is corrected in this version.
+ -- In RFC 2797, it was unsuportedExt.
+
+ CMCFailInfo ::= INTEGER {
+ badAlg (0),
+ badMessageCheck (1),
+ badRequest (2),
+ badTime (3),
+ badCertId (4),
+ unsuportedExt (5),
+ mustArchiveKeys (6),
+ badIdentity (7),
+ popRequired (8),
+ popFailed (9),
+ noKeyReuse (10),
+ internalCAError (11),
+ tryLater (12),
+ authDataFail (13)
+ }
+
+ -- Used for RAs to add extensions to certification requests
+
+ cmc-addExtensions CMC-CONTROL ::=
+ { AddExtensions IDENTIFIED BY id-cmc-addExtensions }
+ id-cmc-addExtensions OBJECT IDENTIFIER ::= {id-cmc 8}
+
+ AddExtensions ::= SEQUENCE {
+ pkiDataReference BodyPartID,
+ certReferences SEQUENCE OF BodyPartID,
+ extensions SEQUENCE OF Extension{{CertExtensions}}
+ }
+
+ cmc-encryptedPOP CMC-CONTROL ::=
+ { EncryptedPOP IDENTIFIED BY id-cmc-encryptedPOP }
+ cmc-decryptedPOP CMC-CONTROL ::=
+ { DecryptedPOP IDENTIFIED BY id-cmc-decryptedPOP }
+ id-cmc-encryptedPOP OBJECT IDENTIFIER ::= {id-cmc 9}
+ id-cmc-decryptedPOP OBJECT IDENTIFIER ::= {id-cmc 10}
+
+ EncryptedPOP ::= SEQUENCE {
+ request TaggedRequest,
+ cms ContentInfo,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witnessAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ witness OCTET STRING
+ }
+
+ POPAlgs MAC-ALGORITHM ::= {maca-hMAC-SHA1, ...}
+ WitnessAlgs DIGEST-ALGORITHM ::= {mda-sha1, ...}
+
+ DecryptedPOP ::= SEQUENCE {
+ bodyPartID BodyPartID,
+ thePOPAlgID AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ thePOP OCTET STRING
+ }
+
+ cmc-lraPOPWitness CMC-CONTROL ::=
+ { LraPopWitness IDENTIFIED BY id-cmc-lraPOPWitness }
+
+ id-cmc-lraPOPWitness OBJECT IDENTIFIER ::= {id-cmc 11}
+
+ LraPopWitness ::= SEQUENCE {
+ pkiDataBodyid BodyPartID,
+ bodyIds SEQUENCE OF BodyPartID
+ }
+
+ --
+
+ cmc-getCert CMC-CONTROL ::=
+ { GetCert IDENTIFIED BY id-cmc-getCert }
+ id-cmc-getCert OBJECT IDENTIFIER ::= {id-cmc 15}
+
+ GetCert ::= SEQUENCE {
+ issuerName GeneralName,
+ serialNumber INTEGER }
+
+ cmc-getCRL CMC-CONTROL ::=
+ { GetCRL IDENTIFIED BY id-cmc-getCRL }
+ id-cmc-getCRL OBJECT IDENTIFIER ::= {id-cmc 16}
+ GetCRL ::= SEQUENCE {
+ issuerName Name,
+ cRLName GeneralName OPTIONAL,
+ time GeneralizedTime OPTIONAL,
+ reasons ReasonFlags OPTIONAL }
+
+ cmc-revokeRequest CMC-CONTROL ::=
+ { RevokeRequest IDENTIFIED BY id-cmc-revokeRequest}
+ id-cmc-revokeRequest OBJECT IDENTIFIER ::= {id-cmc 17}
+
+ RevokeRequest ::= SEQUENCE {
+ issuerName Name,
+ serialNumber INTEGER,
+ reason CRLReason,
+ invalidityDate GeneralizedTime OPTIONAL,
+ passphrase OCTET STRING OPTIONAL,
+ comment UTF8String OPTIONAL }
+
+ cmc-confirmCertAcceptance CMC-CONTROL ::=
+ { CMCCertId IDENTIFIED BY id-cmc-confirmCertAcceptance }
+ id-cmc-confirmCertAcceptance OBJECT IDENTIFIER ::= {id-cmc 24}
+
+ CMCCertId ::= IssuerAndSerialNumber
+
+ -- The following is used to request v3 extensions be added
+ -- to a certificate
+
+ at-extension-req ATTRIBUTE ::=
+ { TYPE ExtensionReq IDENTIFIED BY id-ExtensionReq }
+ id-ExtensionReq OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) 14}
+
+ ExtensionReq ::= SEQUENCE SIZE (1..MAX) OF
+ Extension{{CertExtensions}}
+
+ -- The following allows Diffie-Hellman Certification Request
+ -- Messages to be well-formed
+
+ sa-noSignature SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-alg-noSignature
+ VALUE NoSignatureValue
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ }
+ id-alg-noSignature OBJECT IDENTIFIER ::= {id-pkix id-alg(6) 2}
+
+ NoSignatureValue ::= OCTET STRING
+ -- Unauthenticated attribute to carry removable data.
+
+ id-aa OBJECT IDENTIFIER ::= { iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) id-aa(2)}
+
+ aa-cmc-unsignedData ATTRIBUTE ::=
+ { TYPE CMCUnsignedData IDENTIFIED BY id-aa-cmc-unsignedData }
+ id-aa-cmc-unsignedData OBJECT IDENTIFIER ::= {id-aa 34}
+
+ CMCUnsignedData ::= SEQUENCE {
+ bodyPartPath BodyPartPath,
+ identifier TYPE-IDENTIFIER.&id,
+ content TYPE-IDENTIFIER.&Type
+ }
+
+ -- Replaces CMC Status Info
+ --
+
+ cmc-statusInfoV2 CMC-CONTROL ::=
+ { CMCStatusInfoV2 IDENTIFIED BY id-cmc-statusInfoV2 }
+ id-cmc-statusInfoV2 OBJECT IDENTIFIER ::= {id-cmc 25}
+
+ EXTENDED-FAILURE-INFO ::= TYPE-IDENTIFIER
+
+ ExtendedFailures EXTENDED-FAILURE-INFO ::= {...}
+
+ CMCStatusInfoV2 ::= SEQUENCE {
+ cMCStatus CMCStatus,
+ bodyList SEQUENCE SIZE (1..MAX) OF
+ BodyPartReference,
+ statusString UTF8String OPTIONAL,
+ otherInfo CHOICE {
+ failInfo CMCFailInfo,
+ pendInfo PendInfo,
+ extendedFailInfo [1] SEQUENCE {
+ failInfoOID TYPE-IDENTIFIER.&id
+ ({ExtendedFailures}),
+ failInfoValue TYPE-IDENTIFIER.&Type
+ ({ExtendedFailures}
+ {@.failInfoOID})
+ }
+ } OPTIONAL
+ }
+
+ BodyPartReference ::= CHOICE {
+ bodyPartID BodyPartID,
+ bodyPartPath BodyPartPath
+ }
+
+ BodyPartPath ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ -- Allow for distribution of trust anchors
+ --
+
+ cmc-trustedAnchors CMC-CONTROL ::=
+ { PublishTrustAnchors IDENTIFIED BY id-cmc-trustedAnchors }
+ id-cmc-trustedAnchors OBJECT IDENTIFIER ::= {id-cmc 26}
+
+ PublishTrustAnchors ::= SEQUENCE {
+ seqNumber INTEGER,
+ hashAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ anchorHashes SEQUENCE OF OCTET STRING
+ }
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1 | mda-sha256, ...
+ }
+
+ cmc-authData CMC-CONTROL ::=
+ { AuthPublish IDENTIFIED BY id-cmc-authData }
+ id-cmc-authData OBJECT IDENTIFIER ::= {id-cmc 27}
+
+ AuthPublish ::= BodyPartID
+
+ -- These two items use BodyPartList
+
+ cmc-batchRequests CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchRequests }
+ id-cmc-batchRequests OBJECT IDENTIFIER ::= {id-cmc 28}
+
+ cmc-batchResponses CMC-CONTROL ::=
+ { BodyPartList IDENTIFIED BY id-cmc-batchResponses }
+ id-cmc-batchResponses OBJECT IDENTIFIER ::= {id-cmc 29}
+
+ BodyPartList ::= SEQUENCE SIZE (1..MAX) OF BodyPartID
+
+ cmc-publishCert CMC-CONTROL ::=
+ { CMCPublicationInfo IDENTIFIED BY id-cmc-publishCert }
+ id-cmc-publishCert OBJECT IDENTIFIER ::= {id-cmc 30}
+
+ CMCPublicationInfo ::= SEQUENCE {
+ hashAlg AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}},
+ certHashes SEQUENCE OF OCTET STRING,
+ pubInfo PKIPublicationInfo
+ }
+
+ cmc-modCertTemplate CMC-CONTROL ::=
+ { ModCertTemplate IDENTIFIED BY id-cmc-modCertTemplate }
+ id-cmc-modCertTemplate OBJECT IDENTIFIER ::= {id-cmc 31}
+
+ ModCertTemplate ::= SEQUENCE {
+ pkiDataReference BodyPartPath,
+ certReferences BodyPartList,
+ replace BOOLEAN DEFAULT TRUE,
+ certTemplate CertTemplate
+ }
+
+ -- Inform follow-on servers that one or more controls have
+ -- already been processed
+
+ cmc-controlProcessed CMC-CONTROL ::=
+ { ControlsProcessed IDENTIFIED BY id-cmc-controlProcessed }
+ id-cmc-controlProcessed OBJECT IDENTIFIER ::= {id-cmc 32}
+
+ ControlsProcessed ::= SEQUENCE {
+ bodyList SEQUENCE SIZE(1..MAX) OF BodyPartReference
+ }
+
+ -- Identity Proof control w/ algorithm agility
+
+ cmc-identityProofV2 CMC-CONTROL ::=
+ { IdentityProofV2 IDENTIFIED BY id-cmc-identityProofV2 }
+ id-cmc-identityProofV2 OBJECT IDENTIFIER ::= { id-cmc 33 }
+
+ IdentityProofV2 ::= SEQUENCE {
+ proofAlgID AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {WitnessAlgs}},
+ macAlgId AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ cmc-popLinkWitnessV2 CMC-CONTROL ::=
+ { PopLinkWitnessV2 IDENTIFIED BY id-cmc-popLinkWitnessV2 }
+ id-cmc-popLinkWitnessV2 OBJECT IDENTIFIER ::= { id-cmc 34 }
+
+ PopLinkWitnessV2 ::= SEQUENCE {
+ keyGenAlgorithm AlgorithmIdentifier{KEY-DERIVATION,
+ {KeyDevAlgs}},
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM, {POPAlgs}},
+ witness OCTET STRING
+ }
+
+ KeyDevAlgs KEY-DERIVATION ::= {kda-PBKDF2, ...}
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
index 9a7d4936a6..9a7d4936a6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/External-References.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/External-References.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
index 9b6a426ca2..9b6a426ca2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GULSProtectionMappings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GULSProtectionMappings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
index c59451dcdb..c59451dcdb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GenericProtectingTransferSyntax.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GenericProtectingTransferSyntax.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
index 60acbb3b5c..60acbb3b5c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
index 84c1ee9851..84c1ee9851 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
index 28daa467e1..28daa467e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Geo-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Geo-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
index 336b824174..336b824174 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityExchanges.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityExchanges.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
index db2725c37d..db2725c37d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/GulsSecurityTransformations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/GulsSecurityTransformations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
index 4e0084b079..4e0084b079 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/HierarchicalOperationalBindings.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/HierarchicalOperationalBindings.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
index 3fec8ae64a..3fec8ae64a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
index 8c0c8138e2..8c0c8138e2 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
index 9805a6189d..9805a6189d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
index b39e03c3b6..b39e03c3b6 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedBodyPartTypes2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedBodyPartTypes2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
index 171f4b4223..171f4b4223 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSExtendedVoiceBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSExtendedVoiceBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
index 59de6d1b04..59de6d1b04 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFileTransferBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFileTransferBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
index 57faac6587..57faac6587 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedContentBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedContentBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
index 4e46c7679b..4e46c7679b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSForwardedReportBodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSForwardedReportBodyPartType.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
index 09ef4de282..09ef4de282 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSFunctionalObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSFunctionalObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
index 752e8d05e1..752e8d05e1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSHeadingExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSHeadingExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
index 3fb0463ee7..3fb0463ee7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSInformationObjects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSInformationObjects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
index 719bca4987..719bca4987 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSMessageStoreAttributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSMessageStoreAttributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
index 6e5c01ab40..6e5c01ab40 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
index 2b46b27b3e..2b46b27b3e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSObjectIdentifiers2.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSObjectIdentifiers2.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
index 8c692ccb31..8c692ccb31 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSSecurityExtensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSSecurityExtensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
index 27324f614f..27324f614f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/IPMSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/IPMSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
index b7efd7417e..b7efd7417e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO-STANDARD-9541-FONT-ATTRIBUTE-SET.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
index a57a276704..a57a276704 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO8571-FTAM.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO8571-FTAM.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
index 0149602040..0149602040 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ISO9541-SN.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ISO9541-SN.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
index bd1d8d3c48..bd1d8d3c48 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Identifiers-and-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Identifiers-and-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
index 813ac9c6a0..813ac9c6a0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/InformationFramework.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/InformationFramework.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
index 2c78360b7b..2c78360b7b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Interchange-Data-Elements.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Interchange-Data-Elements.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
index 92c887bb06..92c887bb06 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Layout-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Layout-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
index 64fc4436e4..64fc4436e4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Link-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Link-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
index 5de6491621..5de6491621 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Location-Expressions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Location-Expressions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
index fab36bf12a..fab36bf12a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Logical-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Logical-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
index 187c3c8ad4..187c3c8ad4 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
index 40f53b9458..40f53b9458 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MHSProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MHSProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
index 052b3b2041..052b3b2041 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
index b69d72b3ed..b69d72b3ed 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
index 99d34b2883..99d34b2883 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
index eceae4ab44..eceae4ab44 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSGeneralAutoActionTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSGeneralAutoActionTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
index 37c894da86..37c894da86 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSMatchingRules.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSMatchingRules.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
index df194f838c..df194f838c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
index 6494fbd3ef..6494fbd3ef 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
index 38035c77ae..38035c77ae 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTAAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTAAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
index 68a5118bc8..68a5118bc8 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
index f66d117f35..f66d117f35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAbstractService88.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAbstractService88.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
index 03181c5951..03181c5951 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSAccessProtocol.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSAccessProtocol.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
index 1615b241ee..1615b241ee 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
index 10eac962cb..10eac962cb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/MTSUpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/MTSUpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
index 96dfc39b6a..96dfc39b6a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Notation.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Notation.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
new file mode 100644
index 0000000000..db500fe9a1
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OCSP-2009.asn1
@@ -0,0 +1,183 @@
+ OCSP-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ AuthorityInfoAccessSyntax, GeneralName, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ Name, CertificateSerialNumber, id-kp, id-ad-ocsp, Certificate
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ sa-dsaWithSHA1, sa-rsaWithMD2, sa-rsaWithMD5, sa-rsaWithSHA1
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ OCSPRequest ::= SEQUENCE {
+ tbsRequest TBSRequest,
+ optionalSignature [0] EXPLICIT Signature OPTIONAL }
+
+ TBSRequest ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ requestorName [1] EXPLICIT GeneralName OPTIONAL,
+ requestList SEQUENCE OF Request,
+ requestExtensions [2] EXPLICIT Extensions {{re-ocsp-nonce |
+ re-ocsp-response, ...}} OPTIONAL }
+
+ Signature ::= SEQUENCE {
+ signatureAlgorithm AlgorithmIdentifier
+ { SIGNATURE-ALGORITHM, {...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ Version ::= INTEGER { v1(0) }
+
+ Request ::= SEQUENCE {
+ reqCert CertID,
+ singleRequestExtensions [0] EXPLICIT Extensions
+ { {re-ocsp-service-locator,
+ ...}} OPTIONAL }
+
+ CertID ::= SEQUENCE {
+ hashAlgorithm AlgorithmIdentifier
+ {DIGEST-ALGORITHM, {...}},
+ issuerNameHash OCTET STRING, -- Hash of Issuer's DN
+ issuerKeyHash OCTET STRING, -- Hash of Issuer's public key
+ serialNumber CertificateSerialNumber }
+
+ OCSPResponse ::= SEQUENCE {
+ responseStatus OCSPResponseStatus,
+ responseBytes [0] EXPLICIT ResponseBytes OPTIONAL }
+
+ OCSPResponseStatus ::= ENUMERATED {
+ successful (0), --Response has valid confirmations
+ malformedRequest (1), --Illegal confirmation request
+ internalError (2), --Internal error in issuer
+ tryLater (3), --Try again later
+ -- (4) is not used
+ sigRequired (5), --Must sign the request
+ unauthorized (6) --Request unauthorized
+ }
+
+ RESPONSE ::= TYPE-IDENTIFIER
+
+ ResponseSet RESPONSE ::= {basicResponse, ...}
+
+ ResponseBytes ::= SEQUENCE {
+ responseType RESPONSE.
+ &id ({ResponseSet}),
+ response OCTET STRING (CONTAINING RESPONSE.
+ &Type({ResponseSet}{@responseType}))}
+
+ basicResponse RESPONSE ::=
+ { BasicOCSPResponse IDENTIFIED BY id-pkix-ocsp-basic }
+
+ BasicOCSPResponse ::= SEQUENCE {
+ tbsResponseData ResponseData,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {sa-dsaWithSHA1 | sa-rsaWithSHA1 |
+ sa-rsaWithMD5 | sa-rsaWithMD2, ...}},
+ signature BIT STRING,
+ certs [0] EXPLICIT SEQUENCE OF Certificate OPTIONAL }
+
+ ResponseData ::= SEQUENCE {
+ version [0] EXPLICIT Version DEFAULT v1,
+ responderID ResponderID,
+ producedAt GeneralizedTime,
+ responses SEQUENCE OF SingleResponse,
+ responseExtensions [1] EXPLICIT Extensions
+ {{re-ocsp-nonce, ...}} OPTIONAL }
+
+ ResponderID ::= CHOICE {
+ byName [1] Name,
+ byKey [2] KeyHash }
+
+ KeyHash ::= OCTET STRING --SHA-1 hash of responder's public key
+ -- (excluding the tag and length fields)
+
+ SingleResponse ::= SEQUENCE {
+ certID CertID,
+ certStatus CertStatus,
+ thisUpdate GeneralizedTime,
+ nextUpdate [0] EXPLICIT GeneralizedTime OPTIONAL,
+ singleExtensions [1] EXPLICIT Extensions{{re-ocsp-crl |
+ re-ocsp-archive-cutoff |
+ CrlEntryExtensions, ...}
+ } OPTIONAL }
+
+ CertStatus ::= CHOICE {
+ good [0] IMPLICIT NULL,
+ revoked [1] IMPLICIT RevokedInfo,
+ unknown [2] IMPLICIT UnknownInfo }
+
+ RevokedInfo ::= SEQUENCE {
+ revocationTime GeneralizedTime,
+ revocationReason [0] EXPLICIT CRLReason OPTIONAL }
+
+ UnknownInfo ::= NULL
+
+ CRLReason ::= INTEGER
+
+ ArchiveCutoff ::= GeneralizedTime
+
+ AcceptableResponses ::= SEQUENCE OF RESPONSE.&id({ResponseSet})
+
+ ServiceLocator ::= SEQUENCE {
+ issuer Name,
+ locator AuthorityInfoAccessSyntax }
+
+ CrlID ::= SEQUENCE {
+ crlUrl [0] EXPLICIT IA5String OPTIONAL,
+ crlNum [1] EXPLICIT INTEGER OPTIONAL,
+ crlTime [2] EXPLICIT GeneralizedTime OPTIONAL }
+
+ -- Request Extensions
+
+ re-ocsp-nonce EXTENSION ::= { SYNTAX OCTET STRING IDENTIFIED
+ BY id-pkix-ocsp-nonce }
+ re-ocsp-response EXTENSION ::= { SYNTAX AcceptableResponses IDENTIFIED
+ BY id-pkix-ocsp-response }
+ re-ocsp-service-locator EXTENSION ::= { SYNTAX ServiceLocator
+ IDENTIFIED BY
+ id-pkix-ocsp-service-locator }
+
+ -- Response Extensions
+
+ re-ocsp-crl EXTENSION ::= { SYNTAX CrlID IDENTIFIED BY
+ id-pkix-ocsp-crl }
+ re-ocsp-archive-cutoff EXTENSION ::= { SYNTAX ArchiveCutoff
+ IDENTIFIED BY
+ id-pkix-ocsp-archive-cutoff }
+
+ -- Object Identifiers
+
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+ id-pkix-ocsp OBJECT IDENTIFIER ::= id-ad-ocsp
+ id-pkix-ocsp-basic OBJECT IDENTIFIER ::= { id-pkix-ocsp 1 }
+ id-pkix-ocsp-nonce OBJECT IDENTIFIER ::= { id-pkix-ocsp 2 }
+ id-pkix-ocsp-crl OBJECT IDENTIFIER ::= { id-pkix-ocsp 3 }
+ id-pkix-ocsp-response OBJECT IDENTIFIER ::= { id-pkix-ocsp 4 }
+ id-pkix-ocsp-nocheck OBJECT IDENTIFIER ::= { id-pkix-ocsp 5 }
+ id-pkix-ocsp-archive-cutoff OBJECT IDENTIFIER ::= { id-pkix-ocsp 6 }
+ id-pkix-ocsp-service-locator OBJECT IDENTIFIER ::= { id-pkix-ocsp 7 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
index ac449b59c7..ab555200bb 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OLD-PKCS7.asn1
@@ -3,7 +3,7 @@
-- This Annex contains a module of PKCS#7 ASN.1 definitions conforming to current ASN.1 standards rather than the obsolescent (and now deprecated) 1988/90 version of ASN.1 used in version 1.5 of PKCS#7.
-- Extensions to PKCS#7 defined in RFC 2630 are included.
-- If differences are found between the ASN.1 in the following module and that in PKCS#7, the latter is definitive.
-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
+OLD-PKCS7 {iso member-body usa(840) rsadsi(113549) pkcs(1) 7
module(0) -- module not currently defined in PKCS#7 --} DEFINITIONS IMPLICIT
TAGS ::=
BEGIN
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
index b4f91f50c5..b4f91f50c5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
index 2044feb155..2044feb155 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/OperationalBindingManagement.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/OperationalBindingManagement.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
new file mode 100644
index 0000000000..a5fd0fefb9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-10.asn1
@@ -0,0 +1,56 @@
+ PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, DIGEST-ALGORITHM, SIGNATURE-ALGORITHM,
+ PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE, Name
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)};
+
+ -- Certificate requests
+ CertificationRequestInfo ::= SEQUENCE {
+ version INTEGER { v1(0) } (v1, ... ),
+ subject Name,
+ subjectPKInfo SubjectPublicKeyInfo{{ PKInfoAlgorithms }},
+ attributes [0] Attributes{{ CRIAttributes }}
+ }
+
+ SubjectPublicKeyInfo {PUBLIC-KEY: IOSet} ::= SEQUENCE {
+ algorithm AlgorithmIdentifier {PUBLIC-KEY, {IOSet}},
+ subjectPublicKey BIT STRING
+ }
+
+ PKInfoAlgorithms PUBLIC-KEY ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ Attributes { ATTRIBUTE:IOSet } ::= SET OF Attribute{{ IOSet }}
+
+ CRIAttributes ATTRIBUTE ::= {
+ ... -- add any locally defined attributes here -- }
+
+ Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE(1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+ }
+
+ CertificationRequest ::= SEQUENCE {
+ certificationRequestInfo CertificationRequestInfo,
+ signatureAlgorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ { SignatureAlgorithms }},
+ signature BIT STRING
+ }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ ... -- add any locally defined algorithms here -- }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
new file mode 100644
index 0000000000..5b37a552f9
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-12.asn1
@@ -0,0 +1,174 @@
+PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS ALL
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ContentInfo, DigestInfo
+ FROM PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-7(7) modules(0) pkcs-7(1)}
+
+PrivateKeyInfo, EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+pkcs-9, friendlyName, localKeyId, certTypes, crlTypes
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) modules(0) pkcs-9(1)};
+
+-- Object identifiers
+
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) rsadsi(113549)}
+pkcs OBJECT IDENTIFIER ::= {rsadsi pkcs(1)}
+pkcs-12 OBJECT IDENTIFIER ::= {pkcs 12}
+pkcs-12PbeIds OBJECT IDENTIFIER ::= {pkcs-12 1}
+pbeWithSHAAnd128BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 1}
+pbeWithSHAAnd40BitRC4 OBJECT IDENTIFIER ::= {pkcs-12PbeIds 2}
+pbeWithSHAAnd3-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 3}
+pbeWithSHAAnd2-KeyTripleDES-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 4}
+pbeWithSHAAnd128BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 5}
+pbewithSHAAnd40BitRC2-CBC OBJECT IDENTIFIER ::= {pkcs-12PbeIds 6}
+
+bagtypes OBJECT IDENTIFIER ::= {pkcs-12 10 1}
+
+-- The PFX PDU
+
+PFX ::= SEQUENCE {
+ version INTEGER {v3(3)}(v3,...),
+ authSafe ContentInfo,
+ macData MacData OPTIONAL
+}
+
+MacData ::= SEQUENCE {
+ mac DigestInfo,
+ macSalt OCTET STRING,
+ iterations INTEGER DEFAULT 1
+-- Note: The default is for historical reasons and its use is
+-- deprecated. A higher value, like 1024 is recommended.
+}
+
+AuthenticatedSafe ::= SEQUENCE OF ContentInfo
+ -- Data if unencrypted
+ -- EncryptedData if password-encrypted
+ -- EnvelopedData if public key-encrypted
+
+SafeContents ::= SEQUENCE OF SafeBag
+
+SafeBag ::= SEQUENCE {
+ bagId BAG-TYPE.&id ({PKCS12BagSet}),
+ bagValue [0] EXPLICIT BAG-TYPE.&Type({PKCS12BagSet}{@bagId}),
+ bagAttributes SET OF PKCS12Attribute OPTIONAL
+}
+
+-- Bag types
+
+keyBag BAG-TYPE ::=
+ {KeyBag IDENTIFIED BY {bagtypes 1}}
+pkcs8ShroudedKeyBag BAG-TYPE ::=
+ {PKCS8ShroudedKeyBag IDENTIFIED BY {bagtypes 2}}
+certBag BAG-TYPE ::=
+ {CertBag IDENTIFIED BY {bagtypes 3}}
+crlBag BAG-TYPE ::=
+ {CRLBag IDENTIFIED BY {bagtypes 4}}
+secretBag BAG-TYPE ::=
+ {SecretBag IDENTIFIED BY {bagtypes 5}}
+safeContentsBag BAG-TYPE ::=
+ {SafeContents IDENTIFIED BY {bagtypes 6}}
+
+PKCS12BagSet BAG-TYPE ::= {
+ keyBag |
+ pkcs8ShroudedKeyBag |
+ certBag |
+ crlBag |
+ secretBag |
+ safeContentsBag,
+ ... -- For future extensions
+}
+
+BAG-TYPE ::= TYPE-IDENTIFIER
+
+-- KeyBag
+
+KeyBag ::= PrivateKeyInfo
+
+-- Shrouded KeyBag
+
+PKCS8ShroudedKeyBag ::= EncryptedPrivateKeyInfo
+
+-- CertBag
+
+CertBag ::= SEQUENCE {
+ certId BAG-TYPE.&id ({CertTypes}),
+ certValue [0] EXPLICIT BAG-TYPE.&Type ({CertTypes}{@certId})
+}
+
+x509Certificate BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {certTypes 1}}
+ -- DER-encoded X.509 certificate stored in OCTET STRING
+sdsiCertificate BAG-TYPE ::=
+ {IA5String IDENTIFIED BY {certTypes 2}}
+ -- Base64-encoded SDSI certificate stored in IA5String
+
+CertTypes BAG-TYPE ::= {
+ x509Certificate |
+ sdsiCertificate,
+ ... -- For future extensions
+}
+
+-- CRLBag
+
+CRLBag ::= SEQUENCE {
+ crlId BAG-TYPE.&id ({CRLTypes}),
+ crlValue [0] EXPLICIT BAG-TYPE.&Type ({CRLTypes}{@crlId})
+}
+
+x509CRL BAG-TYPE ::=
+ {OCTET STRING IDENTIFIED BY {crlTypes 1}}
+ -- DER-encoded X.509 CRL stored in OCTET STRING
+
+CRLTypes BAG-TYPE ::= {
+ x509CRL,
+ ... -- For future extensions
+}
+
+-- Secret Bag
+
+SecretBag ::= SEQUENCE {
+ secretTypeId BAG-TYPE.&id ({SecretTypes}),
+ secretValue [0] EXPLICIT BAG-TYPE.&Type ({SecretTypes}{@secretTypeId})
+}
+
+SecretTypes BAG-TYPE ::= {
+ ... -- For future extensions
+}
+
+-- Attributes
+
+PKCS12Attribute ::= SEQUENCE {
+ attrId ATTRIBUTE.&id ({PKCS12AttrSet}),
+ attrValues SET OF ATTRIBUTE.&Type ({PKCS12AttrSet}{@attrId})
+} -- This type is compatible with the X.500 type 'Attribute'
+
+PKCS12AttrSet ATTRIBUTE ::= {
+ friendlyName |
+ localKeyId,
+ ... -- Other attributes are allowed
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
new file mode 100644
index 0000000000..91b0dc36bf
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-5.asn1
@@ -0,0 +1,202 @@
+-- PKCS #5 v2.1 ASN.1 Module
+-- Revised October 27, 2012
+
+-- This module has been checked for conformance with the
+-- ASN.1 standard by the OSS ASN.1 Tools
+
+PKCS-5 {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-5(5) modules(16)
+ pkcs5v2-1(2)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+
+BEGIN
+
+-- ============================
+-- Basic object identifiers
+-- ============================
+
+nistAlgorithms OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) 4}
+oiw OBJECT IDENTIFIER ::= {iso(1) identified-organization(3) 14}
+rsadsi OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840) 113549}
+pkcs OBJECT IDENTIFIER ::= {rsadsi 1}
+pkcs-5 OBJECT IDENTIFIER ::= {pkcs 5}
+
+
+-- ============================
+-- Basic types and classes
+-- ============================
+
+AlgorithmIdentifier { ALGORITHM-IDENTIFIER:InfoObjectSet } ::= SEQUENCE {
+ algorithm ALGORITHM-IDENTIFIER.&id({InfoObjectSet}),
+ parameters ALGORITHM-IDENTIFIER.&Type({InfoObjectSet} {@algorithm}) OPTIONAL
+}
+
+ALGORITHM-IDENTIFIER ::= TYPE-IDENTIFIER
+
+
+-- ============================
+-- PBKDF2
+-- ============================
+
+PBKDF2Algorithms ALGORITHM-IDENTIFIER ::=
+ { {PBKDF2-params IDENTIFIED BY id-PBKDF2}, ...}
+
+id-PBKDF2 OBJECT IDENTIFIER ::= {pkcs-5 12}
+
+algid-hmacWithSHA1 AlgorithmIdentifier {{PBKDF2-PRFs}} ::=
+ {algorithm id-hmacWithSHA1, parameters NULL : NULL}
+
+PBKDF2-params ::= SEQUENCE {
+ salt CHOICE {
+ specified OCTET STRING,
+ otherSource AlgorithmIdentifier {{PBKDF2-SaltSources}}
+ },
+ iterationCount INTEGER (1..MAX),
+ keyLength INTEGER (1..MAX) OPTIONAL,
+ prf AlgorithmIdentifier {{PBKDF2-PRFs}} DEFAULT algid-hmacWithSHA1
+}
+
+PBKDF2-SaltSources ALGORITHM-IDENTIFIER ::= { ... }
+
+PBKDF2-PRFs ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {NULL IDENTIFIED BY id-hmacWithSHA224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA256} |
+ {NULL IDENTIFIED BY id-hmacWithSHA384} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-224} |
+ {NULL IDENTIFIED BY id-hmacWithSHA512-256},
+ ...
+}
+
+
+-- ============================
+ -- PBES1
+-- ============================
+
+PBES1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD2AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithMD5AndRC2-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndDES-CBC} |
+ {PBEParameter IDENTIFIED BY pbeWithSHA1AndRC2-CBC},
+ ...
+}
+
+pbeWithMD2AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 1}
+pbeWithMD2AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 4}
+pbeWithMD5AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 3}
+pbeWithMD5AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 6}
+pbeWithSHA1AndDES-CBC OBJECT IDENTIFIER ::= {pkcs-5 10}
+pbeWithSHA1AndRC2-CBC OBJECT IDENTIFIER ::= {pkcs-5 11}
+
+PBEParameter ::= SEQUENCE {
+ salt OCTET STRING (SIZE(8)),
+ iterationCount INTEGER
+}
+
+
+-- ============================
+-- PBES2
+-- ============================
+
+PBES2Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBES2-params IDENTIFIED BY id-PBES2},
+ ...
+}
+
+id-PBES2 OBJECT IDENTIFIER ::= {pkcs-5 13}
+
+PBES2-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBES2-KDFs}},
+ encryptionScheme AlgorithmIdentifier {{PBES2-Encs}}
+}
+
+PBES2-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBES2-Encs ALGORITHM-IDENTIFIER ::= { ... }
+
+
+-- ============================
+-- PBMAC1
+-- ============================
+
+PBMAC1Algorithms ALGORITHM-IDENTIFIER ::= {
+ {PBMAC1-params IDENTIFIED BY id-PBMAC1},
+ ...
+}
+
+id-PBMAC1 OBJECT IDENTIFIER ::= {pkcs-5 14}
+
+PBMAC1-params ::= SEQUENCE {
+ keyDerivationFunc AlgorithmIdentifier {{PBMAC1-KDFs}},
+ messageAuthScheme AlgorithmIdentifier {{PBMAC1-MACs}}
+}
+
+PBMAC1-KDFs ALGORITHM-IDENTIFIER ::= {
+ {PBKDF2-params IDENTIFIED BY id-PBKDF2},
+ ...
+}
+
+PBMAC1-MACs ALGORITHM-IDENTIFIER ::= { ... }
+
+-- ============================
+-- Supporting techniques
+-- ============================
+
+digestAlgorithm OBJECT IDENTIFIER ::= {rsadsi 2}
+encryptionAlgorithm OBJECT IDENTIFIER ::= {rsadsi 3}
+
+SupportingAlgorithms ALGORITHM-IDENTIFIER ::= {
+ {NULL IDENTIFIED BY id-hmacWithSHA1} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY desCBC} |
+ {OCTET STRING (SIZE(8)) IDENTIFIED BY des-EDE3-CBC} |
+ {RC2-CBC-Parameter IDENTIFIED BY rc2CBC} |
+ {RC5-CBC-Parameters IDENTIFIED BY rc5-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes128-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes192-CBC-PAD} |
+ {OCTET STRING (SIZE(16)) IDENTIFIED BY aes256-CBC-PAD},
+ ...
+}
+
+id-hmacWithSHA1 OBJECT IDENTIFIER ::= {digestAlgorithm 7}
+id-hmacWithSHA224 OBJECT IDENTIFIER ::= {digestAlgorithm 8}
+id-hmacWithSHA256 OBJECT IDENTIFIER ::= {digestAlgorithm 9}
+id-hmacWithSHA384 OBJECT IDENTIFIER ::= {digestAlgorithm 10}
+id-hmacWithSHA512 OBJECT IDENTIFIER ::= {digestAlgorithm 11}
+id-hmacWithSHA512-224 OBJECT IDENTIFIER ::= {digestAlgorithm 12}
+id-hmacWithSHA512-256 OBJECT IDENTIFIER ::= {digestAlgorithm 13}
+
+-- from OIW
+desCBC OBJECT IDENTIFIER ::= {oiw secsig(3) algorithms(2) 7}
+
+des-EDE3-CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 7}
+
+rc2CBC OBJECT IDENTIFIER ::= {encryptionAlgorithm 2}
+
+RC2-CBC-Parameter ::= SEQUENCE {
+ rc2ParameterVersion INTEGER OPTIONAL,
+ iv OCTET STRING (SIZE(8))
+}
+
+rc5-CBC-PAD OBJECT IDENTIFIER ::= {encryptionAlgorithm 9}
+
+RC5-CBC-Parameters ::= SEQUENCE {
+ version INTEGER {v1-0(16)} (v1-0),
+ rounds INTEGER (8..127),
+ blockSizeInBits INTEGER (64 | 128),
+ iv OCTET STRING OPTIONAL
+}
+
+aes OBJECT IDENTIFIER ::= { nistAlgorithms 1 }
+aes128-CBC-PAD OBJECT IDENTIFIER ::= { aes 2 }
+aes192-CBC-PAD OBJECT IDENTIFIER ::= { aes 22 }
+aes256-CBC-PAD OBJECT IDENTIFIER ::= { aes 42 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
new file mode 100644
index 0000000000..4cea8db240
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-7.asn1
@@ -0,0 +1,326 @@
+PKCS-7 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-7(7)
+ modules(0) pkcs-7(1)}
+
+DEFINITIONS EXPLICIT TAGS ::=
+BEGIN
+
+--
+-- 3. Definitions
+--
+
+-- EXPORTS All;
+
+IMPORTS
+
+informationFramework, authenticationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ Name, ATTRIBUTE
+ FROM InformationFramework informationFramework
+
+ ALGORITHM, Certificate, CertificateSerialNumber,
+ CertificateList
+ FROM AuthenticationFramework authenticationFramework
+
+ contentType, messageDigest, signingTime, counterSignature
+ FROM PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-9(9) modules(0) pkcs-9(1)};
+--
+-- 6. Useful types
+--
+
+-- Also defined in X.509
+-- Redeclared here as a parameterized type
+AlgorithmIdentifier {ALGORITHM:IOSet} ::= SEQUENCE {
+ algorithm ALGORITHM.&id({IOSet}),
+ parameters ALGORITHM.&Type({IOSet}{@algorithm}) OPTIONAL
+}
+
+-- Also defined in X.501
+-- Redeclared here as a parameterized type
+Attribute { ATTRIBUTE:IOSet } ::= SEQUENCE {
+ type ATTRIBUTE.&id({IOSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.&Type({IOSet}{@type})
+}
+
+CertificateRevocationLists ::=
+ SET OF CertificateList
+
+Certificates ::=
+ SEQUENCE OF Certificate
+
+CRLSequence ::=
+ SEQUENCE OF CertificateList
+
+ContentEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{ContentEncryptionAlgorithms}}
+
+ContentEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestAlgorithms}}
+
+DigestAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+DigestEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{DigestEncryptionAlgorithms}}
+
+DigestEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+ExtendedCertificateOrCertificate ::= CHOICE {
+ certificate Certificate, -- X.509
+ extendedCertificate [0] IMPLICIT ExtendedCertificate -- PKCS#6
+}
+
+ExtendedCertificate ::= Certificate -- cheating
+
+ExtendedCertificatesAndCertificates ::=
+ SET OF ExtendedCertificateOrCertificate
+
+IssuerAndSerialNumber ::= SEQUENCE {
+ issuer Name,
+ serialNumber CertificateSerialNumber
+}
+
+KeyEncryptionAlgorithmIdentifier ::=
+ AlgorithmIdentifier {{KeyEncryptionAlgorithms}}
+
+KeyEncryptionAlgorithms ALGORITHM ::= {
+ ... -- add any application-specific algorithms here
+}
+
+--
+-- 7. General syntax
+--
+
+ContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ content [0] EXPLICIT CONTENTS.&Type({Contents}{@contentType})
+OPTIONAL
+}
+
+CONTENTS ::= TYPE-IDENTIFIER
+
+Contents CONTENTS ::= {
+ {Data IDENTIFIED BY data} |
+ {SignedData IDENTIFIED BY signedData} |
+ {EnvelopedData IDENTIFIED BY envelopedData} |
+ {SignedAndEnvelopedData IDENTIFIED BY signedAndEnvelopedData} |
+ {DigestedData IDENTIFIED BY digestedData} |
+ {EncryptedData IDENTIFIED BY encryptedData},
+ ... -- add any application-specific types/contents here
+}
+
+ContentType ::= CONTENTS.&id({Contents})
+
+--
+-- 8. Data content type
+--
+
+Data ::= OCTET STRING
+
+--
+-- 9. Signed-data content type
+--
+
+SignedData ::= SEQUENCE {
+ version INTEGER {sdVer1(1), sdVer2(2)} (sdVer1 | sdVer2),
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ contentInfo ContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (sdVer1),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+ } |
+ WITH COMPONENTS { ..., version (sdVer2),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+SignerInfos ::= CHOICE {
+ siSet SET OF SignerInfo,
+ siSequence SEQUENCE OF SignerInfo
+}
+
+DigestAlgorithmIdentifiers ::= CHOICE {
+ daSet SET OF DigestAlgorithmIdentifier,
+ daSequence SEQUENCE OF DigestAlgorithmIdentifier
+}
+
+SignerInfo ::= SEQUENCE {
+ version INTEGER {siVer1(1), siVer2(2)} (siVer1 | siVer2),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ digestAlgorithm DigestAlgorithmIdentifier,
+ authenticatedAttributes CHOICE {
+ aaSet [0] IMPLICIT SET OF Attribute {{Authenticated}},
+ aaSequence [2] EXPLICIT SEQUENCE OF Attribute {{Authenticated}}
+ -- Explicit because easier to compute digest on sequence of attributes and then reuse
+ -- encoded sequence in aaSequence.
+ } OPTIONAL,
+ digestEncryptionAlgorithm
+ DigestEncryptionAlgorithmIdentifier,
+ encryptedDigest EncryptedDigest,
+ unauthenticatedAttributes CHOICE {
+ uaSet [1] IMPLICIT SET OF Attribute {{Unauthenticated}},
+ uaSequence [3] IMPLICIT SEQUENCE OF Attribute {{Unauthenticated}}
+ } OPTIONAL
+} (WITH COMPONENTS { ..., version (siVer1),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSequence ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSequence ABSENT })
+} | WITH COMPONENTS { ..., version (siVer2),
+ authenticatedAttributes (WITH COMPONENTS { ..., aaSet ABSENT }),
+ unauthenticatedAttributes (WITH COMPONENTS { ..., uaSet ABSENT })
+})
+
+Authenticated ATTRIBUTE ::= {
+ contentType |
+ messageDigest,
+ ..., -- add application-specific attributes here
+ signingTime
+}
+
+Unauthenticated ATTRIBUTE ::= {
+ ..., -- add application-specific attributes here
+ counterSignature
+}
+
+EncryptedDigest ::= OCTET STRING
+
+DigestInfo ::= SEQUENCE {
+ digestAlgorithm DigestAlgorithmIdentifier,
+ digest Digest
+}
+
+Digest ::= OCTET STRING
+
+--
+-- 10. Enveloped-data content type
+--
+
+EnvelopedData ::= SEQUENCE {
+ version INTEGER {edVer0(0), edVer1(1)} (edVer0 | edVer1),
+ recipientInfos RecipientInfos,
+ encryptedContentInfo
+ EncryptedContentInfo
+} (WITH COMPONENTS { ..., version (edVer0),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT })
+} | WITH COMPONENTS { ..., version (edVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT })
+})
+
+RecipientInfos ::= CHOICE {
+ riSet SET OF RecipientInfo,
+ riSequence SEQUENCE OF RecipientInfo
+}
+
+EncryptedContentInfo ::= SEQUENCE {
+ contentType ContentType,
+ contentEncryptionAlgorithm
+ ContentEncryptionAlgorithmIdentifier,
+ encryptedContent
+ [0] IMPLICIT EncryptedContent OPTIONAL
+}
+
+EncryptedContent ::= OCTET STRING
+
+RecipientInfo ::= SEQUENCE {
+ version INTEGER {riVer0(0)} (riVer0),
+ issuerAndSerialNumber
+ IssuerAndSerialNumber,
+ keyEncryptionAlgorithm
+ KeyEncryptionAlgorithmIdentifier,
+ encryptedKey EncryptedKey
+}
+
+EncryptedKey ::= OCTET STRING
+
+--
+-- 11. Signed-and-enveloped-data content type
+--
+
+SignedAndEnvelopedData ::= SEQUENCE {
+ version INTEGER {seVer1(1), seVer2(2)} (seVer1 | seVer2),
+ recipientInfos RecipientInfos,
+ digestAlgorithms
+ DigestAlgorithmIdentifiers,
+ encryptedContentInfo
+ EncryptedContentInfo,
+ certificates CHOICE {
+ certSet [0] IMPLICIT ExtendedCertificatesAndCertificates,
+ certSequence [2] IMPLICIT Certificates
+ } OPTIONAL,
+ crls CHOICE {
+ crlSet [1] IMPLICIT CertificateRevocationLists,
+ crlSequence [3] IMPLICIT CRLSequence
+ } OPTIONAL,
+ signerInfos SignerInfos
+} (WITH COMPONENTS { ..., version (seVer1),
+ recipientInfos (WITH COMPONENTS { ..., riSet PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSet PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSequence ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSequence ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSet PRESENT })
+} |
+ WITH COMPONENTS { ..., version (seVer2),
+ recipientInfos (WITH COMPONENTS { ..., riSequence PRESENT }),
+ digestAlgorithms (WITH COMPONENTS { ..., daSequence PRESENT }),
+ certificates (WITH COMPONENTS { ..., certSet ABSENT }),
+ crls (WITH COMPONENTS { ..., crlSet ABSENT }),
+ signerInfos (WITH COMPONENTS { ..., siSequence PRESENT })
+})
+
+--
+-- 12. Digested-data content type
+--
+
+DigestedData ::= SEQUENCE {
+ version INTEGER {ddVer0(0)} (ddVer0),
+ digestAlgorithm DigestAlgorithmIdentifier,
+ contentInfo ContentInfo,
+ digest Digest
+}
+
+--
+-- 13. Encrypted-data content type
+--
+
+EncryptedData ::= SEQUENCE {
+ version INTEGER {edVer0(0)} (edVer0),
+ encryptedContentInfo EncryptedContentInfo
+}
+
+--
+-- 14. Object Identifiers
+--
+
+pkcs-7 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 7 }
+data OBJECT IDENTIFIER ::= { pkcs-7 1 }
+signedData OBJECT IDENTIFIER ::= { pkcs-7 2 }
+envelopedData OBJECT IDENTIFIER ::= { pkcs-7 3 }
+signedAndEnvelopedData OBJECT IDENTIFIER ::= { pkcs-7 4 }
+digestedData OBJECT IDENTIFIER ::= { pkcs-7 5 }
+encryptedData OBJECT IDENTIFIER ::= { pkcs-7 6 }
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
new file mode 100644
index 0000000000..266f90170a
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-8.asn1
@@ -0,0 +1,61 @@
+PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-8(8)
+ modules(1) pkcs-8(1)}
+
+-- $Revision: 1.5 $
+
+-- This module has been checked for conformance with the ASN.1
+-- standard by the OSS ASN.1 Tools
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in other
+-- ASN.1 modules.
+
+IMPORTS
+
+informationFramework
+ FROM UsefulDefinitions {joint-iso-itu-t(2) ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+Attribute
+ FROM InformationFramework informationFramework
+
+AlgorithmIdentifier, ALGORITHM-IDENTIFIER
+ FROM PKCS-5 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-5(5) modules(16) pkcs-5(1)};
+
+-- Private-key information syntax
+
+PrivateKeyInfo ::= SEQUENCE {
+ version Version,
+ privateKeyAlgorithm AlgorithmIdentifier {{PrivateKeyAlgorithms}},
+ privateKey PrivateKey,
+ attributes [0] Attributes OPTIONAL }
+
+Version ::= INTEGER {v1(0)} (v1,...)
+
+PrivateKey ::= OCTET STRING
+
+Attributes ::= SET OF Attribute
+
+-- Encrypted private-key information syntax
+
+EncryptedPrivateKeyInfo ::= SEQUENCE {
+ encryptionAlgorithm AlgorithmIdentifier {{KeyEncryptionAlgorithms}},
+ encryptedData EncryptedData
+}
+
+EncryptedData ::= OCTET STRING
+
+PrivateKeyAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+KeyEncryptionAlgorithms ALGORITHM-IDENTIFIER ::= {
+ ... -- For local profiles
+}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
new file mode 100644
index 0000000000..cd561f4d7e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS-9.asn1
@@ -0,0 +1,391 @@
+PKCS-9 {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+pkcs-9(9) modules(0) pkcs-9(1)}
+
+-- $Revision$
+
+DEFINITIONS IMPLICIT TAGS ::=
+
+BEGIN
+
+-- EXPORTS All --
+-- All types and values defined in this module is exported for use in
+-- other ASN.1 modules.
+
+IMPORTS
+
+informationFramework, authenticationFramework, selectedAttributeTypes,
+ upperBounds , id-at
+ FROM UsefulDefinitions {joint-iso-itu-t ds(5) module(1)
+ usefulDefinitions(0) 3}
+
+ub-name
+ FROM UpperBounds upperBounds
+
+OBJECT-CLASS, ATTRIBUTE, MATCHING-RULE, Attribute, top, objectIdentifierMatch
+ FROM InformationFramework informationFramework
+
+ALGORITHM, Extensions, Time
+ FROM AuthenticationFramework authenticationFramework
+
+DirectoryString, octetStringMatch, caseIgnoreMatch, caseExactMatch,
+ generalizedTimeMatch, integerMatch, serialNumber
+ FROM SelectedAttributeTypes selectedAttributeTypes
+
+ContentInfo, SignerInfo
+ FROM CryptographicMessageSyntax-2009 {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) pkcs-9(9) smime(16) modules(0) cms(1)}
+
+EncryptedPrivateKeyInfo
+ FROM PKCS-8 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-8(8) modules(1) pkcs-8(1)}
+
+PFX
+ FROM PKCS-12 {iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-12(12) modules(0) pkcs-12(1)}
+
+-- PKCS15Token
+-- FROM PKCS-15 {iso(1) member-body(2) us(840) rsadsi(113549)
+-- pkcs(1) pkcs-15(15) modules(1) pkcs-15(1)}
+;
+
+-- Upper bounds
+pkcs-9-ub-pkcs9String INTEGER ::= 255
+pkcs-9-ub-emailAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-unstructuredAddress INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-challengePassword INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-friendlyName INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-signingDescription INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-match INTEGER ::= pkcs-9-ub-pkcs9String
+pkcs-9-ub-pseudonym INTEGER ::= ub-name
+pkcs-9-ub-placeOfBirth INTEGER ::= ub-name
+
+-- Object Identifiers
+
+pkcs-9 OBJECT IDENTIFIER ::= {iso(1) member-body(2) us(840)
+ rsadsi(113549) pkcs(1) 9}
+
+ -- Main arcs
+pkcs-9-mo OBJECT IDENTIFIER ::= {pkcs-9 0} -- Modules branch
+pkcs-9-oc OBJECT IDENTIFIER ::= {pkcs-9 24} -- Object class branch
+pkcs-9-at OBJECT IDENTIFIER ::= {pkcs-9 25} -- Attribute branch, for new attributes
+pkcs-9-sx OBJECT IDENTIFIER ::= {pkcs-9 26} -- For syntaxes (RFC 2252)
+pkcs-9-mr OBJECT IDENTIFIER ::= {pkcs-9 27} -- Matching rules
+
+ -- Object classes
+pkcs-9-oc-pkcsEntity OBJECT IDENTIFIER ::= {pkcs-9-oc 1}
+pkcs-9-oc-naturalPerson OBJECT IDENTIFIER ::= {pkcs-9-oc 2}
+
+ -- Attributes
+pkcs-9-at-emailAddress OBJECT IDENTIFIER ::= {pkcs-9 1}
+pkcs-9-at-unstructuredName OBJECT IDENTIFIER ::= {pkcs-9 2}
+pkcs-9-at-contentType OBJECT IDENTIFIER ::= {pkcs-9 3}
+pkcs-9-at-messageDigest OBJECT IDENTIFIER ::= {pkcs-9 4}
+pkcs-9-at-signingTime OBJECT IDENTIFIER ::= {pkcs-9 5}
+pkcs-9-at-counterSignature OBJECT IDENTIFIER ::= {pkcs-9 6}
+pkcs-9-at-challengePassword OBJECT IDENTIFIER ::= {pkcs-9 7}
+pkcs-9-at-unstructuredAddress OBJECT IDENTIFIER ::= {pkcs-9 8}
+pkcs-9-at-extendedCertificateAttributes OBJECT IDENTIFIER ::= {pkcs-9 9}
+
+-- Obsolete (?) attribute identifiers, purportedly from "tentative
+-- PKCS #9 draft"
+-- pkcs-9-at-issuerAndSerialNumber OBJECT IDENTIFIER ::= {pkcs-9 10}
+-- pkcs-9-at-passwordCheck OBJECT IDENTIFIER ::= {pkcs-9 11}
+-- pkcs-9-at-publicKey OBJECT IDENTIFIER ::= {pkcs-9 12}
+
+pkcs-9-at-signingDescription OBJECT IDENTIFIER ::= {pkcs-9 13}
+pkcs-9-at-extensionRequest OBJECT IDENTIFIER ::= {pkcs-9 14}
+pkcs-9-at-smimeCapabilities OBJECT IDENTIFIER ::= {pkcs-9 15}
+
+-- Unused (?)
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 17}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 18}
+-- pkcs-9-at-? OBJECT IDENTIFIER ::= {pkcs-9 19}
+
+pkcs-9-at-friendlyName OBJECT IDENTIFIER ::= {pkcs-9 20}
+pkcs-9-at-localKeyId OBJECT IDENTIFIER ::= {pkcs-9 21}
+pkcs-9-at-userPKCS12 OBJECT IDENTIFIER ::= {2 16 840 1 113730 3 1 216}
+pkcs-9-at-pkcs15Token OBJECT IDENTIFIER ::= {pkcs-9-at 1}
+pkcs-9-at-encryptedPrivateKeyInfo OBJECT IDENTIFIER ::= {pkcs-9-at 2}
+pkcs-9-at-randomNonce OBJECT IDENTIFIER ::= {pkcs-9-at 3}
+pkcs-9-at-sequenceNumber OBJECT IDENTIFIER ::= {pkcs-9-at 4}
+pkcs-9-at-pkcs7PDU OBJECT IDENTIFIER ::= {pkcs-9-at 5}
+
+ -- IETF PKIX Attribute branch
+ietf-at OBJECT IDENTIFIER ::= {1 3 6 1 5 5 7 9}
+
+pkcs-9-at-dateOfBirth OBJECT IDENTIFIER ::= {ietf-at 1}
+pkcs-9-at-placeOfBirth OBJECT IDENTIFIER ::= {ietf-at 2}
+pkcs-9-at-gender OBJECT IDENTIFIER ::= {ietf-at 3}
+pkcs-9-at-countryOfCitizenship OBJECT IDENTIFIER ::= {ietf-at 4}
+pkcs-9-at-countryOfResidence OBJECT IDENTIFIER ::= {ietf-at 5}
+
+ -- Syntaxes (for use with LDAP accessible directories)
+pkcs-9-sx-pkcs9String OBJECT IDENTIFIER ::= {pkcs-9-sx 1}
+pkcs-9-sx-signingTime OBJECT IDENTIFIER ::= {pkcs-9-sx 2}
+
+ -- Matching rules
+pkcs-9-mr-caseIgnoreMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 1}
+pkcs-9-mr-signingTimeMatch OBJECT IDENTIFIER ::= {pkcs-9-mr 2}
+
+ -- Arcs with attributes defined elsewhere
+smime OBJECT IDENTIFIER ::= {pkcs-9 16}
+ -- Main arc for S/MIME (RFC 2633)
+certTypes OBJECT IDENTIFIER ::= {pkcs-9 22}
+ -- Main arc for certificate types defined in PKCS #12
+crlTypes OBJECT IDENTIFIER ::= {pkcs-9 23}
+ -- Main arc for crl types defined in PKCS #12
+
+ -- Other object identifiers
+id-at-pseudonym OBJECT IDENTIFIER ::= {id-at 65}
+
+-- Useful types
+
+PKCS9String {INTEGER : maxSize} ::= CHOICE {
+ ia5String IA5String (SIZE(1..maxSize)),
+ directoryString DirectoryString {maxSize}
+}
+
+-- Object classes
+
+pkcsEntity OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { PKCSEntityAttributeSet }
+ ID pkcs-9-oc-pkcsEntity
+}
+
+naturalPerson OBJECT-CLASS ::= {
+ SUBCLASS OF { top }
+ KIND auxiliary
+ MAY CONTAIN { NaturalPersonAttributeSet }
+ ID pkcs-9-oc-naturalPerson
+}
+
+-- Attribute sets
+
+PKCSEntityAttributeSet ATTRIBUTE ::= {
+ pKCS7PDU |
+ userPKCS12 |
+-- pKCS15Token |
+ encryptedPrivateKeyInfo,
+ ... -- For future extensions
+}
+
+NaturalPersonAttributeSet ATTRIBUTE ::= {
+ emailAddress |
+ unstructuredName |
+ unstructuredAddress |
+ dateOfBirth |
+ placeOfBirth |
+ gender |
+ countryOfCitizenship |
+ countryOfResidence |
+ pseudonym |
+ serialNumber,
+ ... -- For future extensions
+}
+
+-- Attributes
+
+pKCS7PDU ATTRIBUTE ::= {
+ WITH SYNTAX ContentInfo
+ ID pkcs-9-at-pkcs7PDU
+}
+
+userPKCS12 ATTRIBUTE ::= {
+ WITH SYNTAX PFX
+ ID pkcs-9-at-userPKCS12
+}
+
+-- pKCS15Token ATTRIBUTE ::= {
+-- WITH SYNTAX PKCS15Token
+-- ID pkcs-9-at-pkcs15Token
+-- }
+
+encryptedPrivateKeyInfo ATTRIBUTE ::= {
+ WITH SYNTAX EncryptedPrivateKeyInfo
+ ID pkcs-9-at-encryptedPrivateKeyInfo
+}
+
+emailAddress ATTRIBUTE ::= {
+ WITH SYNTAX IA5String (SIZE(1..pkcs-9-ub-emailAddress))
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-emailAddress
+}
+
+unstructuredName ATTRIBUTE ::= {
+ WITH SYNTAX PKCS9String {pkcs-9-ub-unstructuredName}
+ EQUALITY MATCHING RULE pkcs9CaseIgnoreMatch
+ ID pkcs-9-at-unstructuredName
+}
+
+unstructuredAddress ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-unstructuredAddress}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-unstructuredAddress
+}
+
+dateOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX GeneralizedTime
+ EQUALITY MATCHING RULE generalizedTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-dateOfBirth
+}
+
+placeOfBirth ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-placeOfBirth}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-placeOfBirth
+}
+
+gender ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(1) ^ FROM ("M" | "F" | "m" | "f"))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-gender
+}
+
+countryOfCitizenship ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfCitizenship
+}
+
+countryOfResidence ATTRIBUTE ::= {
+ WITH SYNTAX PrintableString (SIZE(2))(CONSTRAINED BY {
+ -- Must be a two-letter country acronym in accordance with
+ -- ISO/IEC 3166 --})
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ ID pkcs-9-at-countryOfResidence
+}
+
+pseudonym ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-pseudonym}
+ EQUALITY MATCHING RULE caseExactMatch
+ ID id-at-pseudonym
+}
+
+contentType ATTRIBUTE ::= {
+ WITH SYNTAX ContentType
+ EQUALITY MATCHING RULE objectIdentifierMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-contentType
+}
+
+ContentType ::= OBJECT IDENTIFIER
+
+messageDigest ATTRIBUTE ::= {
+ WITH SYNTAX MessageDigest
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-messageDigest
+}
+
+MessageDigest ::= OCTET STRING
+
+signingTime ATTRIBUTE ::= {
+ WITH SYNTAX SigningTime
+ EQUALITY MATCHING RULE signingTimeMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingTime
+}
+
+SigningTime ::= Time -- imported from ISO/IEC 9594-8
+
+randomNonce ATTRIBUTE ::= {
+ WITH SYNTAX RandomNonce
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-randomNonce
+}
+
+RandomNonce ::= OCTET STRING (SIZE(4..MAX)) -- At least four bytes long
+
+sequenceNumber ATTRIBUTE ::= {
+ WITH SYNTAX SequenceNumber
+ EQUALITY MATCHING RULE integerMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-sequenceNumber
+}
+
+SequenceNumber ::= INTEGER (1..MAX)
+
+counterSignature ATTRIBUTE ::= {
+ WITH SYNTAX SignerInfo
+ ID pkcs-9-at-counterSignature
+}
+
+challengePassword ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-challengePassword}
+ EQUALITY MATCHING RULE caseExactMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-challengePassword
+}
+
+extensionRequest ATTRIBUTE ::= {
+ WITH SYNTAX ExtensionRequest
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extensionRequest
+}
+
+ExtensionRequest ::= Extensions
+
+extendedCertificateAttributes ATTRIBUTE ::= {
+ WITH SYNTAX SET OF Attribute
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-extendedCertificateAttributes
+}
+
+friendlyName ATTRIBUTE ::= {
+ WITH SYNTAX BMPString (SIZE(1..pkcs-9-ub-friendlyName))
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-friendlyName
+}
+
+localKeyId ATTRIBUTE ::= {
+ WITH SYNTAX OCTET STRING
+ EQUALITY MATCHING RULE octetStringMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-localKeyId
+}
+
+signingDescription ATTRIBUTE ::= {
+ WITH SYNTAX DirectoryString {pkcs-9-ub-signingDescription}
+ EQUALITY MATCHING RULE caseIgnoreMatch
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-signingDescription
+}
+
+smimeCapabilities ATTRIBUTE ::= {
+ WITH SYNTAX SMIMECapabilities
+ SINGLE VALUE TRUE
+ ID pkcs-9-at-smimeCapabilities
+}
+
+SMIMECapabilities ::= SEQUENCE OF SMIMECapability
+
+SMIMECapability ::= SEQUENCE {
+ algorithm ALGORITHM.&id ({SMIMEv3Algorithms}),
+ parameters ALGORITHM.&Type ({SMIMEv3Algorithms}{@algorithm})
+}
+
+SMIMEv3Algorithms ALGORITHM ::= {...-- See RFC 2633 --}
+
+ -- Matching rules
+
+pkcs9CaseIgnoreMatch MATCHING-RULE ::= {
+ SYNTAX PKCS9String {pkcs-9-ub-match}
+ ID pkcs-9-mr-caseIgnoreMatch
+}
+
+signingTimeMatch MATCHING-RULE ::= {
+ SYNTAX SigningTime
+ ID pkcs-9-mr-signingTimeMatch
+}
+
+END \ No newline at end of file
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
index 525ee3c5ec..1bcc2281a1 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/PKCS7BodyPartType.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKCS7BodyPartType.asn1
@@ -6,7 +6,7 @@ BEGIN
IMPORTS
-- PKCS#7
ContentInfo
- FROM PKCS7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
+ FROM PKCS-7 {iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1)
7 module(0)}
-- module not formally defined in the PKCS#7document, therefore defined in Annex O
-- IPMS Information Objects
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
new file mode 100644
index 0000000000..fde5bddbf3
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-CommonTypes-2009.asn1
@@ -0,0 +1,166 @@
+ PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- ATTRIBUTE
+ --
+ -- Describe the set of data associated with an attribute of some type
+ --
+ -- &id is an OID identifying the attribute
+ -- &Type is the ASN.1 type structure for the attribute; not all
+ -- attributes have a data structure, so this field is optional
+ -- &minCount contains the minimum number of times the attribute can
+ -- occur in an AttributeSet
+ -- &maxCount contains the maximum number of times the attribute can
+ -- appear in an AttributeSet
+ -- Note: this cannot be automatically enforced as the field
+ -- cannot be defaulted to MAX.
+ -- &equality-match contains information about how matching should be
+ -- done
+ --
+ -- Currently we are using two different prefixes for attributes.
+ --
+ -- at- for certificate attributes
+ -- aa- for CMS attributes
+ --
+
+ ATTRIBUTE ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &Type OPTIONAL,
+ &equality-match MATCHING-RULE OPTIONAL,
+ &minCount INTEGER DEFAULT 1,
+ &maxCount INTEGER OPTIONAL
+ } WITH SYNTAX {
+ [TYPE &Type]
+ [EQUALITY MATCHING RULE &equality-match]
+ [COUNTS [MIN &minCount] [MAX &maxCount]]
+ IDENTIFIED BY &id
+ }
+
+ -- Specification of MATCHING-RULE information object class
+ --
+
+ MATCHING-RULE ::= CLASS {
+ &ParentMatchingRules MATCHING-RULE OPTIONAL,
+ &AssertionType OPTIONAL,
+ &uniqueMatchIndicator ATTRIBUTE OPTIONAL,
+ &id OBJECT IDENTIFIER UNIQUE
+ }
+ WITH SYNTAX {
+ [PARENT &ParentMatchingRules]
+ [SYNTAX &AssertionType]
+ [UNIQUE-MATCH-INDICATOR &uniqueMatchIndicator]
+ ID &id
+ }
+
+ -- AttributeSet
+ --
+ -- Used when a set of attributes is to occur.
+ --
+ -- type contains the identifier of the attribute
+ -- values contains a set of values where the structure of the ASN.1
+ -- is defined by the attribute
+ --
+ -- The parameter contains the set of objects describing
+ -- those attributes that can occur in this location.
+ --
+
+ AttributeSet{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ values SET SIZE (1..MAX) OF ATTRIBUTE.
+ &Type({AttrSet}{@type})
+ }
+
+ -- SingleAttribute
+ --
+ -- Used for a single valued attribute
+ --
+ -- The parameter contains the set of objects describing the
+ -- attributes that can occur in this location
+ --
+
+ SingleAttribute{ATTRIBUTE:AttrSet} ::= SEQUENCE {
+ type ATTRIBUTE.&id({AttrSet}),
+ value ATTRIBUTE.&Type({AttrSet}{@type})
+ }
+
+ -- EXTENSION
+ --
+ -- This class definition is used to describe the association of
+ -- object identifier and ASN.1 type structure for extensions
+ --
+ -- All extensions are prefixed with ext-
+ --
+ -- &id contains the object identifier for the extension
+ -- &ExtnType specifies the ASN.1 type structure for the extension
+ -- &Critical contains the set of legal values for the critical field.
+ -- This is normally {TRUE|FALSE} but in some instances may be
+ -- restricted to just one of these values.
+ --
+
+ EXTENSION ::= CLASS {
+ &id OBJECT IDENTIFIER UNIQUE,
+ &ExtnType,
+ &Critical BOOLEAN DEFAULT {TRUE | FALSE }
+ } WITH SYNTAX {
+ SYNTAX &ExtnType IDENTIFIED BY &id
+ [CRITICALITY &Critical]
+ }
+
+ -- Extensions
+ --
+ -- Used for a sequence of extensions.
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this sequence.
+ --
+
+ Extensions{EXTENSION:ExtensionSet} ::=
+ SEQUENCE SIZE (1..MAX) OF Extension{{ExtensionSet}}
+
+ -- Extension
+ --
+ -- Used for a single extension
+ --
+ -- The parameter contains the set of legal extensions that can
+ -- occur in this extension.
+ --
+ -- The restriction on the critical field has been commented out
+ -- the authors are not completely sure it is correct.
+ -- The restriction could be done using custom code rather than
+ -- compiler-generated code, however.
+ --
+
+ Extension{EXTENSION:ExtensionSet} ::= SEQUENCE {
+ extnID EXTENSION.&id({ExtensionSet}),
+ critical BOOLEAN
+ -- (EXTENSION.&Critical({ExtensionSet}{@extnID}))
+ DEFAULT FALSE,
+ extnValue OCTET STRING (CONTAINING
+ EXTENSION.&ExtnType({ExtensionSet}{@extnID}))
+ -- contains the DER encoding of the ASN.1 value
+ -- corresponding to the extension type identified
+ -- by extnID
+ }
+
+ -- Security Category
+ --
+ -- Security categories are used both for specifying clearances and
+ -- for labeling objects. We move this here from RFC 3281 so that
+ -- they will use a common single object class to express this
+ -- information.
+ --
+
+ SECURITY-CATEGORY ::= TYPE-IDENTIFIER
+
+ SecurityCategory{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
new file mode 100644
index 0000000000..41cbaea67e
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX-X400Address-2009.asn1
@@ -0,0 +1,300 @@
+ --
+ -- This module is used to isolate all the X.400 naming information.
+ -- There is no reason to expect this to occur in a PKIX certificate.
+ --
+
+ PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ -- X.400 address syntax starts here
+
+ ORAddress ::= SEQUENCE {
+ built-in-standard-attributes BuiltInStandardAttributes,
+ built-in-domain-defined-attributes
+ BuiltInDomainDefinedAttributes OPTIONAL,
+
+ -- see also teletex-domain-defined-attributes
+ extension-attributes ExtensionAttributes OPTIONAL }
+
+ -- Built-in Standard Attributes
+
+ BuiltInStandardAttributes ::= SEQUENCE {
+ country-name CountryName OPTIONAL,
+ administration-domain-name AdministrationDomainName OPTIONAL,
+ network-address [0] IMPLICIT NetworkAddress OPTIONAL,
+ -- see also extended-network-address
+ terminal-identifier [1] IMPLICIT TerminalIdentifier OPTIONAL,
+ private-domain-name [2] PrivateDomainName OPTIONAL,
+ organization-name [3] IMPLICIT OrganizationName OPTIONAL,
+ -- see also teletex-organization-name
+ numeric-user-identifier [4] IMPLICIT NumericUserIdentifier
+ OPTIONAL,
+ personal-name [5] IMPLICIT PersonalName OPTIONAL,
+ -- see also teletex-personal-name
+ organizational-unit-names [6] IMPLICIT OrganizationalUnitNames
+ OPTIONAL }
+ -- see also teletex-organizational-unit-names
+
+ CountryName ::= [APPLICATION 1] CHOICE {
+ x121-dcc-code NumericString
+ (SIZE (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+
+ AdministrationDomainName ::= [APPLICATION 2] CHOICE {
+ numeric NumericString (SIZE (0..ub-domain-name-length)),
+ printable PrintableString (SIZE (0..ub-domain-name-length)) }
+
+ NetworkAddress ::= X121Address -- see also extended-network-address
+
+ X121Address ::= NumericString (SIZE (1..ub-x121-address-length))
+
+ TerminalIdentifier ::= PrintableString (SIZE
+ (1..ub-terminal-id-length))
+
+ PrivateDomainName ::= CHOICE {
+ numeric NumericString (SIZE (1..ub-domain-name-length)),
+ printable PrintableString (SIZE (1..ub-domain-name-length)) }
+
+ OrganizationName ::= PrintableString
+ (SIZE (1..ub-organization-name-length))
+ -- see also teletex-organization-name
+
+ NumericUserIdentifier ::= NumericString
+ (SIZE (1..ub-numeric-user-id-length))
+
+ PersonalName ::= SET {
+ surname [0] IMPLICIT PrintableString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT PrintableString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT PrintableString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT PrintableString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL }
+ -- see also teletex-personal-name
+
+ OrganizationalUnitNames ::= SEQUENCE SIZE (1..ub-organizational-units)
+ OF OrganizationalUnitName
+ -- see also teletex-organizational-unit-names
+
+ OrganizationalUnitName ::= PrintableString (SIZE
+ (1..ub-organizational-unit-name-length))
+
+ -- Built-in Domain-defined Attributes
+
+ BuiltInDomainDefinedAttributes ::= SEQUENCE SIZE
+ (1..ub-domain-defined-attributes) OF
+ BuiltInDomainDefinedAttribute
+
+ BuiltInDomainDefinedAttribute ::= SEQUENCE {
+ type PrintableString (SIZE
+ (1..ub-domain-defined-attribute-type-length)),
+ value PrintableString (SIZE
+ (1..ub-domain-defined-attribute-value-length)) }
+
+ -- Extension Attributes
+
+ ExtensionAttributes ::= SET SIZE (1..ub-extension-attributes) OF
+ ExtensionAttribute
+
+ EXTENSION-ATTRIBUTE ::= CLASS {
+ &id INTEGER (0..ub-extension-attributes) UNIQUE,
+ &Type
+ } WITH SYNTAX { &Type IDENTIFIED BY &id }
+
+ ExtensionAttribute ::= SEQUENCE {
+ extension-attribute-type [0] IMPLICIT EXTENSION-ATTRIBUTE.
+ &id({SupportedExtensionAttributes}),
+ extension-attribute-value [1] EXTENSION-ATTRIBUTE.
+ &Type({SupportedExtensionAttributes}
+ {@extension-attribute-type})}
+
+ SupportedExtensionAttributes EXTENSION-ATTRIBUTE ::= {
+ ea-commonName | ea-teletexCommonName | ea-teletexOrganizationName
+ | ea-teletexPersonalName | ea-teletexOrganizationalUnitNames |
+ ea-pDSName | ea-physicalDeliveryCountryName | ea-postalCode |
+ ea-physicalDeliveryOfficeName | ea-physicalDeliveryOfficeNumber |
+ ea-extensionORAddressComponents | ea-physicalDeliveryPersonalName
+ | ea-physicalDeliveryOrganizationName |
+ ea-extensionPhysicalDeliveryAddressComponents |
+ ea-unformattedPostalAddress | ea-streetAddress |
+ ea-postOfficeBoxAddress | ea-posteRestanteAddress |
+ ea-uniquePostalName | ea-localPostalAttributes |
+ ea-extendedNetworkAddress | ea-terminalType |
+ ea-teletexDomainDefinedAttributes, ... }
+
+ -- Extension types and attribute values
+
+ ea-commonName EXTENSION-ATTRIBUTE ::= { PrintableString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 1 }
+
+ ea-teletexCommonName EXTENSION-ATTRIBUTE ::= {TeletexString
+ (SIZE (1..ub-common-name-length)) IDENTIFIED BY 2 }
+
+ ea-teletexOrganizationName EXTENSION-ATTRIBUTE::= { TeletexString
+ (SIZE (1..ub-organization-name-length)) IDENTIFIED BY 3 }
+
+ ea-teletexPersonalName EXTENSION-ATTRIBUTE ::= {SET {
+ surname [0] IMPLICIT TeletexString
+ (SIZE (1..ub-surname-length)),
+ given-name [1] IMPLICIT TeletexString
+ (SIZE (1..ub-given-name-length)) OPTIONAL,
+ initials [2] IMPLICIT TeletexString
+ (SIZE (1..ub-initials-length)) OPTIONAL,
+ generation-qualifier [3] IMPLICIT TeletexString
+ (SIZE (1..ub-generation-qualifier-length))
+ OPTIONAL } IDENTIFIED BY 4 }
+
+ ea-teletexOrganizationalUnitNames EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-organizational-units) OF
+ TeletexOrganizationalUnitName IDENTIFIED BY 5 }
+
+ TeletexOrganizationalUnitName ::= TeletexString
+ (SIZE (1..ub-organizational-unit-name-length))
+
+ ea-pDSName EXTENSION-ATTRIBUTE ::= {PrintableString
+ (SIZE (1..ub-pds-name-length)) IDENTIFIED BY 7 }
+
+ ea-physicalDeliveryCountryName EXTENSION-ATTRIBUTE ::= { CHOICE {
+ x121-dcc-code NumericString (SIZE
+ (ub-country-name-numeric-length)),
+ iso-3166-alpha2-code PrintableString
+ (SIZE (ub-country-name-alpha-length)) }
+ IDENTIFIED BY 8 }
+
+ ea-postalCode EXTENSION-ATTRIBUTE ::= { CHOICE {
+ numeric-code NumericString (SIZE (1..ub-postal-code-length)),
+ printable-code PrintableString (SIZE (1..ub-postal-code-length)) }
+ IDENTIFIED BY 9 }
+
+ ea-physicalDeliveryOfficeName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 10 }
+
+ ea-physicalDeliveryOfficeNumber EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 11 }
+
+ ea-extensionORAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 12 }
+
+ ea-physicalDeliveryPersonalName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 13}
+
+ ea-physicalDeliveryOrganizationName EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 14 }
+
+ ea-extensionPhysicalDeliveryAddressComponents EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 15 }
+
+ ea-unformattedPostalAddress EXTENSION-ATTRIBUTE ::= { SET {
+ printable-address SEQUENCE SIZE (1..ub-pds-physical-address-lines)
+ OF PrintableString (SIZE (1..ub-pds-parameter-length))
+ OPTIONAL,
+ teletex-string TeletexString
+ (SIZE (1..ub-unformatted-address-length)) OPTIONAL }
+ IDENTIFIED BY 16 }
+
+ ea-streetAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 17 }
+
+ ea-postOfficeBoxAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 18 }
+
+ ea-posteRestanteAddress EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 19 }
+
+ ea-uniquePostalName EXTENSION-ATTRIBUTE ::=
+ { PDSParameter IDENTIFIED BY 20 }
+
+ ea-localPostalAttributes EXTENSION-ATTRIBUTE ::=
+ {PDSParameter IDENTIFIED BY 21 }
+ PDSParameter ::= SET {
+ printable-string PrintableString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL,
+ teletex-string TeletexString
+ (SIZE(1..ub-pds-parameter-length)) OPTIONAL }
+
+ ea-extendedNetworkAddress EXTENSION-ATTRIBUTE ::= {
+ CHOICE {
+ e163-4-address SEQUENCE {
+ number [0] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-number-length)),
+ sub-address [1] IMPLICIT NumericString
+ (SIZE (1..ub-e163-4-sub-address-length)) OPTIONAL
+ },
+ psap-address [0] IMPLICIT PresentationAddress
+ } IDENTIFIED BY 22
+ }
+
+ PresentationAddress ::= SEQUENCE {
+ pSelector [0] EXPLICIT OCTET STRING OPTIONAL,
+ sSelector [1] EXPLICIT OCTET STRING OPTIONAL,
+ tSelector [2] EXPLICIT OCTET STRING OPTIONAL,
+ nAddresses [3] EXPLICIT SET SIZE (1..MAX) OF OCTET STRING }
+
+ ea-terminalType EXTENSION-ATTRIBUTE ::= {INTEGER {
+ telex (3),
+ teletex (4),
+ g3-facsimile (5),
+ g4-facsimile (6),
+ ia5-terminal (7),
+ videotex (8) } (0..ub-integer-options)
+ IDENTIFIED BY 23 }
+
+ -- Extension Domain-defined Attributes
+
+ ea-teletexDomainDefinedAttributes EXTENSION-ATTRIBUTE ::=
+ { SEQUENCE SIZE (1..ub-domain-defined-attributes) OF
+ TeletexDomainDefinedAttribute IDENTIFIED BY 6 }
+
+ TeletexDomainDefinedAttribute ::= SEQUENCE {
+ type TeletexString
+ (SIZE (1..ub-domain-defined-attribute-type-length)),
+ value TeletexString
+ (SIZE (1..ub-domain-defined-attribute-value-length)) }
+
+ -- specifications of Upper Bounds MUST be regarded as mandatory
+ -- from Annex B of ITU-T X.411 Reference Definition of MTS Parameter
+ -- Upper Bounds
+ -- Upper Bounds
+ ub-match INTEGER ::= 128
+ ub-common-name-length INTEGER ::= 64
+ ub-country-name-alpha-length INTEGER ::= 2
+ ub-country-name-numeric-length INTEGER ::= 3
+ ub-domain-defined-attributes INTEGER ::= 4
+ ub-domain-defined-attribute-type-length INTEGER ::= 8
+ ub-domain-defined-attribute-value-length INTEGER ::= 128
+ ub-domain-name-length INTEGER ::= 16
+ ub-extension-attributes INTEGER ::= 256
+ ub-e163-4-number-length INTEGER ::= 15
+ ub-e163-4-sub-address-length INTEGER ::= 40
+ ub-generation-qualifier-length INTEGER ::= 3
+ ub-given-name-length INTEGER ::= 16
+ ub-initials-length INTEGER ::= 5
+ ub-integer-options INTEGER ::= 256
+ ub-numeric-user-id-length INTEGER ::= 32
+ ub-organization-name-length INTEGER ::= 64
+ ub-organizational-unit-name-length INTEGER ::= 32
+ ub-organizational-units INTEGER ::= 4
+ ub-pds-name-length INTEGER ::= 16
+ ub-pds-parameter-length INTEGER ::= 30
+ ub-pds-physical-address-lines INTEGER ::= 6
+ ub-postal-code-length INTEGER ::= 16
+ ub-surname-length INTEGER ::= 40
+ ub-terminal-id-length INTEGER ::= 24
+ ub-unformatted-address-length INTEGER ::= 180
+ ub-x121-address-length INTEGER ::= 16
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
new file mode 100644
index 0000000000..b1232fb8f2
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1-PSS-OAEP-Algorithms-2009.asn1
@@ -0,0 +1,308 @@
+ PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-rsa-pkalgs-02(54)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AlgorithmIdentifier{}, ALGORITHM, DIGEST-ALGORITHM, KEY-TRANSPORT,
+ SIGNATURE-ALGORITHM, PUBLIC-KEY, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ id-sha1, mda-sha1, pk-rsa, RSAPublicKey
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)};
+
+ -- ============================
+ -- Object Set exports
+ -- ============================
+ --
+ -- Define top-level symbols with all of the objects defined for
+ -- export to other modules. These objects would be included as part
+ -- of an Object Set to restrict the set of legal values.
+ --
+
+ PublicKeys PUBLIC-KEY ::= { pk-rsaSSA-PSS | pk-rsaES-OAEP, ... }
+ SignatureAlgs SIGNATURE-ALGORITHM ::= { sa-rsaSSA-PSS, ...}
+ KeyTransportAlgs KEY-TRANSPORT ::= { kta-rsaES-OAEP, ... }
+ HashAlgs DIGEST-ALGORITHM ::= { mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512, ... }
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaSSA-PSS.&smimeCaps |
+ kta-rsaES-OAEP.&smimeCaps,
+ ...
+ }
+
+ -- =============================
+ -- Algorithm Objects
+ -- =============================
+
+ --
+ -- Public key object for PSS signatures
+ --
+
+ pk-rsaSSA-PSS PUBLIC-KEY ::= {
+ IDENTIFIER id-RSASSA-PSS
+ KEY RSAPublicKey
+ PARAMS TYPE RSASSA-PSS-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { nonRepudiation, digitalSignature,
+ keyCertSign, cRLSign }
+ }
+
+ --
+ -- Signature algorithm definition for PSS signatures
+ --
+
+ sa-rsaSSA-PSS SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER id-RSASSA-PSS
+ PARAMS TYPE RSASSA-PSS-params ARE required
+ HASHES { mda-sha1 | mda-sha224 | mda-sha256 | mda-sha384
+ | mda-sha512 }
+ PUBLIC-KEYS { pk-rsa | pk-rsaSSA-PSS }
+ SMIME-CAPS { IDENTIFIED BY id-RSASSA-PSS }
+ }
+
+ --
+ -- Signature algorithm definitions for PKCS v1.5 signatures
+ --
+
+ sa-sha224WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha224WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha224WithRSAEncryption }
+ }
+ sha224WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 14 }
+
+ sa-sha256WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha256WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha256WithRSAEncryption }
+ }
+ sha256WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 11 }
+
+ sa-sha384WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha384WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha384WithRSAEncryption }
+ }
+ sha384WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 12 }
+
+ sa-sha512WithRSAEncryption SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha512WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY sha512WithRSAEncryption }
+ }
+ sha512WithRSAEncryption OBJECT IDENTIFIER ::= { pkcs-1 13 }
+
+ --
+ -- Public key definition for OAEP encryption
+ --
+
+ pk-rsaES-OAEP PUBLIC-KEY ::= {
+ IDENTIFIER id-RSAES-OAEP
+ KEY RSAPublicKey
+ PARAMS TYPE RSAES-OAEP-params ARE optional
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyEncipherment, dataEncipherment}
+ }
+
+ --
+ -- Key transport key lock definition for OAEP encryption
+ --
+
+ kta-rsaES-OAEP KEY-TRANSPORT ::= {
+ IDENTIFIER id-RSAES-OAEP
+ PARAMS TYPE RSAES-OAEP-params ARE required
+ PUBLIC-KEYS { pk-rsa | pk-rsaES-OAEP }
+ SMIME-CAPS { TYPE RSAES-OAEP-params IDENTIFIED BY id-RSAES-OAEP}
+ }
+ -- ============================
+ -- Basic object identifiers
+ -- ============================
+
+ pkcs-1 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 1 }
+
+ -- When rsaEncryption is used in an AlgorithmIdentifier, the
+ -- parameters MUST be present and MUST be NULL.
+ -- rsaEncryption OBJECT IDENTIFIER ::= { pkcs-1 1 }
+
+ -- When id-RSAES-OAEP is used in an AlgorithmIdentifier,
+ -- and the parameters field is present, it MUST be
+ -- RSAES-OAEP-params.
+
+ id-RSAES-OAEP OBJECT IDENTIFIER ::= { pkcs-1 7 }
+
+ -- When id-mgf1 is used in an AlgorithmIdentifier, the parameters
+ -- MUST be present and MUST be a HashAlgorithm.
+
+ id-mgf1 OBJECT IDENTIFIER ::= { pkcs-1 8 }
+
+ -- When id-pSpecified is used in an AlgorithmIdentifier, the
+ -- parameters MUST be an OCTET STRING.
+
+ id-pSpecified OBJECT IDENTIFIER ::= { pkcs-1 9 }
+
+ -- When id-RSASSA-PSS is used in an AlgorithmIdentifier, and the
+ -- parameters field is present, it MUST be RSASSA-PSS-params.
+
+ id-RSASSA-PSS OBJECT IDENTIFIER ::= { pkcs-1 10 }
+
+ -- When the following OIDs are used in an AlgorithmIdentifier, the
+ -- parameters SHOULD be absent, but if the parameters are present,
+ -- they MUST be NULL.
+
+ --
+ -- id-sha1 is imported from RFC 3279. Additionally, the v1.5
+ -- signature algorithms (i.e., rsaWithSHA256) are now solely placed
+ -- in that module.
+ --
+
+ id-sha224 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 4 }
+
+ mda-sha224 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha224
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha256 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 1 }
+
+ mda-sha256 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha256
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha384 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 2 }
+
+ mda-sha384 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha384
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+ id-sha512 OBJECT IDENTIFIER ::=
+ { joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101)
+ csor(3) nistAlgorithms(4) hashalgs(2) 3 }
+
+ mda-sha512 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha512
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ -- =============
+ -- Constants
+ -- =============
+
+ EncodingParameters ::= OCTET STRING(SIZE(0..MAX))
+
+ nullOctetString EncodingParameters ::= ''H
+
+ nullParameters NULL ::= NULL
+
+ -- =========================
+ -- Algorithm Identifiers
+ -- =========================
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {HashAlgorithms}}
+
+ HashAlgorithms DIGEST-ALGORITHM ::= {
+ { IDENTIFIER id-sha1 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha224 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha256 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha384 PARAMS TYPE NULL ARE preferredPresent } |
+ { IDENTIFIER id-sha512 PARAMS TYPE NULL ARE preferredPresent }
+ }
+
+ sha1Identifier HashAlgorithm ::= {
+ algorithm id-sha1,
+ parameters NULL : NULL
+ }
+
+ --
+ -- We have a default algorithm - create the value here
+ --
+
+ MaskGenAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PKCS1MGFAlgorithms}}
+
+ mgf1SHA1 MaskGenAlgorithm ::= {
+ algorithm id-mgf1,
+ parameters HashAlgorithm : sha1Identifier
+ }
+
+ --
+ -- Define the set of mask generation functions
+ --
+ -- If the identifier is id-mgf1, any of the listed hash
+ -- algorithms may be used.
+ --
+
+ PKCS1MGFAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-mgf1 PARAMS TYPE HashAlgorithm ARE required },
+ ...
+ }
+
+ --
+ -- Define the set of known source algorithms for PSS
+ --
+
+ PSourceAlgorithm ::= AlgorithmIdentifier{ALGORITHM,
+ {PSS-SourceAlgorithms}}
+
+ PSS-SourceAlgorithms ALGORITHM ::= {
+ { IDENTIFIER id-pSpecified PARAMS TYPE EncodingParameters
+ ARE required },
+ ...
+ }
+ pSpecifiedEmpty PSourceAlgorithm ::= {
+ algorithm id-pSpecified,
+ parameters EncodingParameters : nullOctetString
+ }
+
+ -- ===================
+ -- Main structures
+ -- ===================
+
+ -- AlgorithmIdentifier parameters for id-RSASSA-PSS.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashAlgorithm and in
+ -- maskGenAlgorithm should be the same.
+
+ RSASSA-PSS-params ::= SEQUENCE {
+ hashAlgorithm [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenAlgorithm [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ saltLength [2] INTEGER DEFAULT 20,
+ trailerField [3] INTEGER DEFAULT 1
+ }
+
+ -- AlgorithmIdentifier parameters for id-RSAES-OAEP.
+ -- Note that the tags in this Sequence are explicit.
+ -- Note: The hash algorithm in hashFunc and in
+ -- maskGenFunc should be the same.
+
+ RSAES-OAEP-params ::= SEQUENCE {
+ hashFunc [0] HashAlgorithm DEFAULT sha1Identifier,
+ maskGenFunc [1] MaskGenAlgorithm DEFAULT mgf1SHA1,
+ pSourceFunc [2] PSourceAlgorithm DEFAULT
+ pSpecifiedEmpty
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
new file mode 100644
index 0000000000..613e0e9d2c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Explicit-2009.asn1
@@ -0,0 +1,415 @@
+ PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-explicit-02(51)}
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE, SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, PUBLIC-KEY, SIGNATURE-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ CertExtensions, CrlExtensions, CrlEntryExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ SignatureAlgs, PublicKeys
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0) 56}
+
+ SignatureAlgs, PublicKeys
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)}
+
+ ORAddress
+ FROM PKIX-X400Address-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-x400address-02(60)};
+
+ id-pkix OBJECT IDENTIFIER ::=
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7)}
+
+ -- PKIX arcs
+
+ id-pe OBJECT IDENTIFIER ::= { id-pkix 1 }
+ -- arc for private certificate extensions
+ id-qt OBJECT IDENTIFIER ::= { id-pkix 2 }
+ -- arc for policy qualifier types
+ id-kp OBJECT IDENTIFIER ::= { id-pkix 3 }
+ -- arc for extended key purpose OIDs
+ id-ad OBJECT IDENTIFIER ::= { id-pkix 48 }
+ -- arc for access descriptors
+
+ -- policyQualifierIds for Internet policy qualifiers
+
+ id-qt-cps OBJECT IDENTIFIER ::= { id-qt 1 }
+ -- OID for CPS qualifier
+ id-qt-unotice OBJECT IDENTIFIER ::= { id-qt 2 }
+ -- OID for user notice qualifier
+
+ -- access descriptor definitions
+
+ id-ad-ocsp OBJECT IDENTIFIER ::= { id-ad 1 }
+ id-ad-caIssuers OBJECT IDENTIFIER ::= { id-ad 2 }
+ id-ad-timeStamping OBJECT IDENTIFIER ::= { id-ad 3 }
+ id-ad-caRepository OBJECT IDENTIFIER ::= { id-ad 5 }
+
+ -- attribute data types
+ AttributeType ::= ATTRIBUTE.&id
+
+ -- Replaced by SingleAttribute{}
+ --
+ -- AttributeTypeAndValue ::= SEQUENCE {
+ -- type ATTRIBUTE.&id({SupportedAttributes}),
+ -- value ATTRIBUTE.&Type({SupportedAttributes}{@type}) }
+ --
+
+ -- Suggested naming attributes: Definition of the following
+ -- information object set may be augmented to meet local
+ -- requirements. Note that deleting members of the set may
+ -- prevent interoperability with conforming implementations.
+ -- All attributes are presented in pairs: the AttributeType
+ -- followed by the type definition for the corresponding
+ -- AttributeValue.
+
+ -- Arc for standard naming attributes
+
+ id-at OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 4 }
+
+ -- Naming attributes of type X520name
+
+ id-at-name AttributeType ::= { id-at 41 }
+ at-name ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-name }
+
+ id-at-surname AttributeType ::= { id-at 4 }
+ at-surname ATTRIBUTE ::= { TYPE X520name IDENTIFIED BY id-at-surname }
+
+ id-at-givenName AttributeType ::= { id-at 42 }
+ at-givenName ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-givenName }
+
+ id-at-initials AttributeType ::= { id-at 43 }
+ at-initials ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-initials }
+
+ id-at-generationQualifier AttributeType ::= { id-at 44 }
+ at-generationQualifier ATTRIBUTE ::=
+ { TYPE X520name IDENTIFIED BY id-at-generationQualifier }
+
+ -- Directory string type --
+
+ DirectoryString{INTEGER:maxSize} ::= CHOICE {
+ teletexString TeletexString(SIZE (1..maxSize)),
+ printableString PrintableString(SIZE (1..maxSize)),
+ bmpString BMPString(SIZE (1..maxSize)),
+ universalString UniversalString(SIZE (1..maxSize)),
+ uTF8String UTF8String(SIZE (1..maxSize))
+ }
+
+ X520name ::= DirectoryString {ub-name}
+
+ -- Naming attributes of type X520CommonName
+
+ id-at-commonName AttributeType ::= { id-at 3 }
+
+ at-x520CommonName ATTRIBUTE ::=
+ {TYPE X520CommonName IDENTIFIED BY id-at-commonName }
+
+ X520CommonName ::= DirectoryString {ub-common-name}
+
+ -- Naming attributes of type X520LocalityName
+
+ id-at-localityName AttributeType ::= { id-at 7 }
+
+ at-x520LocalityName ATTRIBUTE ::=
+ { TYPE X520LocalityName IDENTIFIED BY id-at-localityName }
+ X520LocalityName ::= DirectoryString {ub-locality-name}
+
+ -- Naming attributes of type X520StateOrProvinceName
+
+ id-at-stateOrProvinceName AttributeType ::= { id-at 8 }
+
+ at-x520StateOrProvinceName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-state-name}
+ IDENTIFIED BY id-at-stateOrProvinceName }
+ X520StateOrProvinceName ::= DirectoryString {ub-state-name}
+
+ -- Naming attributes of type X520OrganizationName
+
+ id-at-organizationName AttributeType ::= { id-at 10 }
+
+ at-x520OrganizationName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organization-name}
+ IDENTIFIED BY id-at-organizationName }
+ X520OrganizationName ::= DirectoryString {ub-organization-name}
+
+ -- Naming attributes of type X520OrganizationalUnitName
+
+ id-at-organizationalUnitName AttributeType ::= { id-at 11 }
+
+ at-x520OrganizationalUnitName ATTRIBUTE ::=
+ { TYPE DirectoryString {ub-organizational-unit-name}
+ IDENTIFIED BY id-at-organizationalUnitName }
+ X520OrganizationalUnitName ::= DirectoryString
+ {ub-organizational-unit-name}
+
+ -- Naming attributes of type X520Title
+
+ id-at-title AttributeType ::= { id-at 12 }
+
+ at-x520Title ATTRIBUTE ::= { TYPE DirectoryString { ub-title }
+ IDENTIFIED BY id-at-title }
+
+ -- Naming attributes of type X520dnQualifier
+
+ id-at-dnQualifier AttributeType ::= { id-at 46 }
+
+ at-x520dnQualifier ATTRIBUTE ::= { TYPE PrintableString
+ IDENTIFIED BY id-at-dnQualifier }
+
+ -- Naming attributes of type X520countryName (digraph from IS 3166)
+
+ id-at-countryName AttributeType ::= { id-at 6 }
+
+ at-x520countryName ATTRIBUTE ::= { TYPE PrintableString (SIZE (2))
+ IDENTIFIED BY id-at-countryName }
+
+ -- Naming attributes of type X520SerialNumber
+
+ id-at-serialNumber AttributeType ::= { id-at 5 }
+
+ at-x520SerialNumber ATTRIBUTE ::= {TYPE PrintableString
+ (SIZE (1..ub-serial-number)) IDENTIFIED BY id-at-serialNumber }
+
+ -- Naming attributes of type X520Pseudonym
+
+ id-at-pseudonym AttributeType ::= { id-at 65 }
+
+ at-x520Pseudonym ATTRIBUTE ::= { TYPE DirectoryString {ub-pseudonym}
+ IDENTIFIED BY id-at-pseudonym }
+
+ -- Naming attributes of type DomainComponent (from RFC 2247)
+
+ id-domainComponent AttributeType ::=
+ { itu-t(0) data(9) pss(2342) ucl(19200300) pilot(100)
+ pilotAttributeType(1) 25 }
+
+ at-domainComponent ATTRIBUTE ::= {TYPE IA5String
+ IDENTIFIED BY id-domainComponent }
+
+ -- Legacy attributes
+
+ pkcs-9 OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) 9 }
+ id-emailAddress AttributeType ::= { pkcs-9 1 }
+
+ at-emailAddress ATTRIBUTE ::= {TYPE IA5String
+ (SIZE (1..ub-emailaddress-length)) IDENTIFIED BY
+ id-emailAddress }
+
+ -- naming data types --
+
+ Name ::= CHOICE { -- only one possibility for now --
+ rdnSequence RDNSequence }
+
+ RDNSequence ::= SEQUENCE OF RelativeDistinguishedName
+
+ DistinguishedName ::= RDNSequence
+
+ RelativeDistinguishedName ::=
+ SET SIZE (1 .. MAX) OF SingleAttribute { {SupportedAttributes} }
+
+ -- These are the known name elements for a DN
+
+ SupportedAttributes ATTRIBUTE ::= {
+ at-name | at-surname | at-givenName | at-initials |
+ at-generationQualifier | at-x520CommonName |
+ at-x520LocalityName | at-x520StateOrProvinceName |
+ at-x520OrganizationName | at-x520OrganizationalUnitName |
+ at-x520Title | at-x520dnQualifier | at-x520countryName |
+ at-x520SerialNumber | at-x520Pseudonym | at-domainComponent |
+ at-emailAddress, ... }
+
+ --
+ -- Certificate- and CRL-specific structures begin here
+ --
+
+ Certificate ::= SIGNED{TBSCertificate}
+
+ TBSCertificate ::= SEQUENCE {
+ version [0] Version DEFAULT v1,
+ serialNumber CertificateSerialNumber,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ validity Validity,
+ subject Name,
+ subjectPublicKeyInfo SubjectPublicKeyInfo,
+ ... ,
+ [[2: -- If present, version MUST be v2
+ issuerUniqueID [1] IMPLICIT UniqueIdentifier OPTIONAL,
+ subjectUniqueID [2] IMPLICIT UniqueIdentifier OPTIONAL
+ ]],
+ [[3: -- If present, version MUST be v3 --
+ extensions [3] Extensions{{CertExtensions}} OPTIONAL
+ ]], ... }
+
+ Version ::= INTEGER { v1(0), v2(1), v3(2) }
+
+ CertificateSerialNumber ::= INTEGER
+
+ Validity ::= SEQUENCE {
+ notBefore Time,
+ notAfter Time }
+
+ Time ::= CHOICE {
+ utcTime UTCTime,
+ generalTime GeneralizedTime }
+
+ UniqueIdentifier ::= BIT STRING
+
+ SubjectPublicKeyInfo ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{PUBLIC-KEY,
+ {PublicKeyAlgorithms}},
+ subjectPublicKey BIT STRING }
+
+ -- CRL structures
+
+ CertificateList ::= SIGNED{TBSCertList}
+
+ TBSCertList ::= SEQUENCE {
+ version Version OPTIONAL,
+ -- if present, MUST be v2
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ issuer Name,
+ thisUpdate Time,
+ nextUpdate Time OPTIONAL,
+ revokedCertificates SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ userCertificate CertificateSerialNumber,
+ revocationDate Time,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlEntryExtensions Extensions{{CrlEntryExtensions}}
+ OPTIONAL
+ ]], ...
+ } OPTIONAL,
+ ... ,
+ [[2: -- if present, version MUST be v2
+ crlExtensions [0] Extensions{{CrlExtensions}}
+ OPTIONAL
+ ]], ... }
+
+ -- Version, Time, CertificateSerialNumber, and Extensions were
+ -- defined earlier for use in the certificate structure
+
+ --
+ -- The two object sets below should be expanded to include
+ -- those algorithms which are supported by the system.
+ --
+ -- For example:
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ -- PKIXAlgs-2008.SignatureAlgs, ...,
+ -- - - RFC 3279 provides the base set
+ -- PKIX1-PSS-OAEP-ALGORITHMS.SignatureAlgs |
+ -- - - RFC 4055 provides extension algs
+ -- OtherModule.SignatureAlgs
+ -- - - RFC XXXX provides additional extension algs
+ -- }
+
+ SignatureAlgorithms SIGNATURE-ALGORITHM ::= {
+ PKIXAlgs-2009.SignatureAlgs, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.SignatureAlgs }
+
+ PublicKeyAlgorithms PUBLIC-KEY ::= {
+ PKIXAlgs-2009.PublicKeys, ...,
+ PKIX1-PSS-OAEP-Algorithms-2009.PublicKeys}
+
+ -- Upper Bounds
+
+ ub-state-name INTEGER ::= 128
+ ub-organization-name INTEGER ::= 64
+ ub-organizational-unit-name INTEGER ::= 64
+ ub-title INTEGER ::= 64
+ ub-serial-number INTEGER ::= 64
+ ub-pseudonym INTEGER ::= 128
+ ub-emailaddress-length INTEGER ::= 255
+ ub-locality-name INTEGER ::= 128
+ ub-common-name INTEGER ::= 64
+ ub-name INTEGER ::= 32768
+
+ -- Note - upper bounds on string types, such as TeletexString, are
+ -- measured in characters. Excepting PrintableString or IA5String, a
+ -- significantly greater number of octets will be required to hold
+ -- such a value. As a minimum, 16 octets or twice the specified
+ -- upper bound, whichever is the larger, should be allowed for
+ -- TeletexString. For UTF8String or UniversalString, at least four
+ -- times the upper bound should be allowed.
+
+ -- Information object classes used in the definition
+ -- of certificates and CRLs
+
+ -- Parameterized Type SIGNED
+ --
+ -- Three different versions of doing SIGNED:
+ -- 1. Simple and close to the previous version
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- algorithm AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ -- {SignatureAlgorithms}},
+ -- signature BIT STRING
+ -- }
+
+ -- 2. From Authenticated Framework
+ --
+ -- SIGNED{ToBeSigned} ::= SEQUENCE {
+ -- toBeSigned ToBeSigned,
+ -- COMPONENTS OF SIGNATURE{ToBeSigned}
+ -- }
+ -- SIGNATURE{ToBeSigned} ::= SEQUENCE {
+ -- algorithmIdentifier AlgorithmIdentifier,
+ -- encrypted ENCRYPTED-HASH{ToBeSigned}
+ -- }
+ -- ENCRYPTED-HASH{ToBeSigned} ::=
+ -- BIT STRING
+ -- (CONSTRAINED BY {
+ -- shall be the result of applying a hashing procedure to
+ -- the DER-encoded (see 4.1) octets of a value of
+ -- ToBeSigned and then applying an encipherment procedure
+ -- to those octets
+ -- })
+ --
+ --
+ -- 3. A more complex version, but one that automatically ties
+ -- together both the signature algorithm and the
+ -- signature value for automatic decoding.
+ --
+ SIGNED{ToBeSigned} ::= SEQUENCE {
+ toBeSigned ToBeSigned,
+ algorithmIdentifier SEQUENCE {
+ algorithm SIGNATURE-ALGORITHM.
+ &id({SignatureAlgorithms}),
+ parameters SIGNATURE-ALGORITHM.
+ &Params({SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}) OPTIONAL
+ },
+ signature BIT STRING (CONTAINING SIGNATURE-ALGORITHM.&Value(
+ {SignatureAlgorithms}
+ {@algorithmIdentifier.algorithm}))
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
new file mode 100644
index 0000000000..3651a5249b
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIX1Implicit-2009.asn1
@@ -0,0 +1,447 @@
+ PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ id-pe, id-kp, id-qt-unotice, id-qt-cps, ORAddress, Name,
+ RelativeDistinguishedName, CertificateSerialNumber,
+ DirectoryString{}, SupportedAttributes
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) };
+
+ CertExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-SubjectKeyIdentifier |
+ ext-KeyUsage | ext-PrivateKeyUsagePeriod |
+ ext-CertificatePolicies | ext-PolicyMappings |
+ ext-SubjectAltName | ext-IssuerAltName |
+ ext-SubjectDirectoryAttributes |
+ ext-BasicConstraints | ext-NameConstraints |
+ ext-PolicyConstraints | ext-ExtKeyUsage |
+ ext-CRLDistributionPoints | ext-InhibitAnyPolicy |
+ ext-FreshestCRL | ext-AuthorityInfoAccess |
+ ext-SubjectInfoAccessSyntax, ... }
+
+ CrlExtensions EXTENSION ::= {
+ ext-AuthorityKeyIdentifier | ext-IssuerAltName |
+ ext-CRLNumber | ext-DeltaCRLIndicator |
+ ext-IssuingDistributionPoint | ext-FreshestCRL, ... }
+
+ CrlEntryExtensions EXTENSION ::= {
+ ext-CRLReason | ext-CertificateIssuer |
+ ext-HoldInstructionCode | ext-InvalidityDate, ... }
+ -- Shared arc for standard certificate and CRL extensions
+
+ id-ce OBJECT IDENTIFIER ::= { joint-iso-ccitt(2) ds(5) 29 }
+
+ -- authority key identifier OID and syntax
+
+ ext-AuthorityKeyIdentifier EXTENSION ::= { SYNTAX
+ AuthorityKeyIdentifier IDENTIFIED BY
+ id-ce-authorityKeyIdentifier }
+ id-ce-authorityKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 35 }
+
+ AuthorityKeyIdentifier ::= SEQUENCE {
+ keyIdentifier [0] KeyIdentifier OPTIONAL,
+ authorityCertIssuer [1] GeneralNames OPTIONAL,
+ authorityCertSerialNumber [2] CertificateSerialNumber OPTIONAL }
+ (WITH COMPONENTS {
+ ...,
+ authorityCertIssuer PRESENT,
+ authorityCertSerialNumber PRESENT
+ } |
+ WITH COMPONENTS {
+ ...,
+ authorityCertIssuer ABSENT,
+ authorityCertSerialNumber ABSENT
+ })
+
+ KeyIdentifier ::= OCTET STRING
+
+ -- subject key identifier OID and syntax
+
+ ext-SubjectKeyIdentifier EXTENSION ::= { SYNTAX
+ KeyIdentifier IDENTIFIED BY id-ce-subjectKeyIdentifier }
+ id-ce-subjectKeyIdentifier OBJECT IDENTIFIER ::= { id-ce 14 }
+
+ -- key usage extension OID and syntax
+
+ ext-KeyUsage EXTENSION ::= { SYNTAX
+ KeyUsage IDENTIFIED BY id-ce-keyUsage }
+ id-ce-keyUsage OBJECT IDENTIFIER ::= { id-ce 15 }
+
+ KeyUsage ::= BIT STRING {
+ digitalSignature (0),
+ nonRepudiation (1), -- recent editions of X.509 have
+ -- renamed this bit to
+ -- contentCommitment
+ keyEncipherment (2),
+ dataEncipherment (3),
+ keyAgreement (4),
+ keyCertSign (5),
+ cRLSign (6),
+ encipherOnly (7),
+ decipherOnly (8)
+ }
+
+ -- private key usage period extension OID and syntax
+
+ ext-PrivateKeyUsagePeriod EXTENSION ::= { SYNTAX
+ PrivateKeyUsagePeriod IDENTIFIED BY id-ce-privateKeyUsagePeriod }
+ id-ce-privateKeyUsagePeriod OBJECT IDENTIFIER ::= { id-ce 16 }
+
+ PrivateKeyUsagePeriod ::= SEQUENCE {
+ notBefore [0] GeneralizedTime OPTIONAL,
+ notAfter [1] GeneralizedTime OPTIONAL }
+ (WITH COMPONENTS {..., notBefore PRESENT } |
+ WITH COMPONENTS {..., notAfter PRESENT })
+
+ -- certificate policies extension OID and syntax
+
+ ext-CertificatePolicies EXTENSION ::= { SYNTAX
+ CertificatePolicies IDENTIFIED BY id-ce-certificatePolicies}
+ id-ce-certificatePolicies OBJECT IDENTIFIER ::= { id-ce 32 }
+
+ CertificatePolicies ::= SEQUENCE SIZE (1..MAX) OF PolicyInformation
+
+ PolicyInformation ::= SEQUENCE {
+ policyIdentifier CertPolicyId,
+ policyQualifiers SEQUENCE SIZE (1..MAX) OF
+ PolicyQualifierInfo OPTIONAL }
+
+ CertPolicyId ::= OBJECT IDENTIFIER
+
+ CERT-POLICY-QUALIFIER ::= TYPE-IDENTIFIER
+
+ PolicyQualifierInfo ::= SEQUENCE {
+ policyQualifierId CERT-POLICY-QUALIFIER.
+ &id({PolicyQualifierId}),
+ qualifier CERT-POLICY-QUALIFIER.
+ &Type({PolicyQualifierId}{@policyQualifierId})}
+
+ -- Implementations that recognize additional policy qualifiers MUST
+ -- augment the following definition for PolicyQualifierId
+
+ PolicyQualifierId CERT-POLICY-QUALIFIER ::=
+ { pqid-cps | pqid-unotice, ... }
+
+ pqid-cps CERT-POLICY-QUALIFIER ::= { CPSuri IDENTIFIED BY id-qt-cps }
+ pqid-unotice CERT-POLICY-QUALIFIER ::= { UserNotice
+ IDENTIFIED BY id-qt-unotice }
+
+ -- CPS pointer qualifier
+
+ CPSuri ::= IA5String
+
+ -- user notice qualifier
+
+ UserNotice ::= SEQUENCE {
+ noticeRef NoticeReference OPTIONAL,
+ explicitText DisplayText OPTIONAL}
+
+ --
+ -- This is not made explicit in the text
+ --
+ -- {WITH COMPONENTS {..., noticeRef PRESENT} |
+ -- WITH COMPONENTS {..., DisplayText PRESENT }}
+
+ NoticeReference ::= SEQUENCE {
+ organization DisplayText,
+ noticeNumbers SEQUENCE OF INTEGER }
+
+ DisplayText ::= CHOICE {
+ ia5String IA5String (SIZE (1..200)),
+ visibleString VisibleString (SIZE (1..200)),
+ bmpString BMPString (SIZE (1..200)),
+ utf8String UTF8String (SIZE (1..200)) }
+
+ -- policy mapping extension OID and syntax
+
+ ext-PolicyMappings EXTENSION ::= { SYNTAX
+ PolicyMappings IDENTIFIED BY id-ce-policyMappings }
+ id-ce-policyMappings OBJECT IDENTIFIER ::= { id-ce 33 }
+
+ PolicyMappings ::= SEQUENCE SIZE (1..MAX) OF SEQUENCE {
+ issuerDomainPolicy CertPolicyId,
+ subjectDomainPolicy CertPolicyId
+ }
+
+ -- subject alternative name extension OID and syntax
+
+ ext-SubjectAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-subjectAltName }
+ id-ce-subjectAltName OBJECT IDENTIFIER ::= { id-ce 17 }
+
+ GeneralNames ::= SEQUENCE SIZE (1..MAX) OF GeneralName
+
+ GeneralName ::= CHOICE {
+ otherName [0] INSTANCE OF OTHER-NAME,
+ rfc822Name [1] IA5String,
+ dNSName [2] IA5String,
+ x400Address [3] ORAddress,
+ directoryName [4] Name,
+ ediPartyName [5] EDIPartyName,
+ uniformResourceIdentifier [6] IA5String,
+ iPAddress [7] OCTET STRING,
+ registeredID [8] OBJECT IDENTIFIER
+ }
+
+ -- AnotherName replaces OTHER-NAME ::= TYPE-IDENTIFIER, as
+ -- TYPE-IDENTIFIER is not supported in the '88 ASN.1 syntax
+
+ OTHER-NAME ::= TYPE-IDENTIFIER
+
+ EDIPartyName ::= SEQUENCE {
+ nameAssigner [0] DirectoryString {ubMax} OPTIONAL,
+ partyName [1] DirectoryString {ubMax}
+ }
+
+ -- issuer alternative name extension OID and syntax
+
+ ext-IssuerAltName EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-issuerAltName }
+ id-ce-issuerAltName OBJECT IDENTIFIER ::= { id-ce 18 }
+
+ ext-SubjectDirectoryAttributes EXTENSION ::= { SYNTAX
+ SubjectDirectoryAttributes IDENTIFIED BY
+ id-ce-subjectDirectoryAttributes }
+ id-ce-subjectDirectoryAttributes OBJECT IDENTIFIER ::= { id-ce 9 }
+
+ SubjectDirectoryAttributes ::= SEQUENCE SIZE (1..MAX) OF
+ AttributeSet{{SupportedAttributes}}
+
+ -- basic constraints extension OID and syntax
+
+ ext-BasicConstraints EXTENSION ::= { SYNTAX
+ BasicConstraints IDENTIFIED BY id-ce-basicConstraints }
+ id-ce-basicConstraints OBJECT IDENTIFIER ::= { id-ce 19 }
+
+ BasicConstraints ::= SEQUENCE {
+ cA BOOLEAN DEFAULT FALSE,
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL
+ }
+
+ -- name constraints extension OID and syntax
+ ext-NameConstraints EXTENSION ::= { SYNTAX
+ NameConstraints IDENTIFIED BY id-ce-nameConstraints }
+ id-ce-nameConstraints OBJECT IDENTIFIER ::= { id-ce 30 }
+
+ NameConstraints ::= SEQUENCE {
+ permittedSubtrees [0] GeneralSubtrees OPTIONAL,
+ excludedSubtrees [1] GeneralSubtrees OPTIONAL
+ }
+ --
+ -- This is a constraint in the issued certificates by CAs, but is
+ -- not a requirement on EEs.
+ --
+ -- (WITH COMPONENTS { ..., permittedSubtrees PRESENT} |
+ -- WITH COMPONENTS { ..., excludedSubtrees PRESENT }}
+
+ GeneralSubtrees ::= SEQUENCE SIZE (1..MAX) OF GeneralSubtree
+
+ GeneralSubtree ::= SEQUENCE {
+ base GeneralName,
+ minimum [0] BaseDistance DEFAULT 0,
+ maximum [1] BaseDistance OPTIONAL
+ }
+
+ BaseDistance ::= INTEGER (0..MAX)
+
+ -- policy constraints extension OID and syntax
+
+ ext-PolicyConstraints EXTENSION ::= { SYNTAX
+ PolicyConstraints IDENTIFIED BY id-ce-policyConstraints }
+ id-ce-policyConstraints OBJECT IDENTIFIER ::= { id-ce 36 }
+
+ PolicyConstraints ::= SEQUENCE {
+ requireExplicitPolicy [0] SkipCerts OPTIONAL,
+ inhibitPolicyMapping [1] SkipCerts OPTIONAL }
+ --
+ -- This is a constraint in the issued certificates by CAs,
+ -- but is not a requirement for EEs
+ --
+ -- (WITH COMPONENTS { ..., requireExplicitPolicy PRESENT} |
+ -- WITH COMPONENTS { ..., inhibitPolicyMapping PRESENT})
+
+ SkipCerts ::= INTEGER (0..MAX)
+
+ -- CRL distribution points extension OID and syntax
+
+ ext-CRLDistributionPoints EXTENSION ::= { SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-cRLDistributionPoints}
+ id-ce-cRLDistributionPoints OBJECT IDENTIFIER ::= {id-ce 31}
+ CRLDistributionPoints ::= SEQUENCE SIZE (1..MAX) OF DistributionPoint
+
+ DistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ reasons [1] ReasonFlags OPTIONAL,
+ cRLIssuer [2] GeneralNames OPTIONAL
+ }
+ --
+ -- This is not a requirement in the text, but it seems as if it
+ -- should be
+ --
+ --(WITH COMPONENTS {..., distributionPoint PRESENT} |
+ -- WITH COMPONENTS {..., cRLIssuer PRESENT})
+
+ DistributionPointName ::= CHOICE {
+ fullName [0] GeneralNames,
+ nameRelativeToCRLIssuer [1] RelativeDistinguishedName
+ }
+
+ ReasonFlags ::= BIT STRING {
+ unused (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ privilegeWithdrawn (7),
+ aACompromise (8)
+ }
+
+ -- extended key usage extension OID and syntax
+
+ ext-ExtKeyUsage EXTENSION ::= { SYNTAX
+ ExtKeyUsageSyntax IDENTIFIED BY id-ce-extKeyUsage }
+ id-ce-extKeyUsage OBJECT IDENTIFIER ::= {id-ce 37}
+
+ ExtKeyUsageSyntax ::= SEQUENCE SIZE (1..MAX) OF KeyPurposeId
+
+ KeyPurposeId ::= OBJECT IDENTIFIER
+
+ -- permit unspecified key uses
+
+ anyExtendedKeyUsage OBJECT IDENTIFIER ::= { id-ce-extKeyUsage 0 }
+
+ -- extended key purpose OIDs
+
+ id-kp-serverAuth OBJECT IDENTIFIER ::= { id-kp 1 }
+ id-kp-clientAuth OBJECT IDENTIFIER ::= { id-kp 2 }
+ id-kp-codeSigning OBJECT IDENTIFIER ::= { id-kp 3 }
+ id-kp-emailProtection OBJECT IDENTIFIER ::= { id-kp 4 }
+ id-kp-timeStamping OBJECT IDENTIFIER ::= { id-kp 8 }
+ id-kp-OCSPSigning OBJECT IDENTIFIER ::= { id-kp 9 }
+
+ -- inhibit any policy OID and syntax
+
+ ext-InhibitAnyPolicy EXTENSION ::= {SYNTAX
+ SkipCerts IDENTIFIED BY id-ce-inhibitAnyPolicy }
+ id-ce-inhibitAnyPolicy OBJECT IDENTIFIER ::= { id-ce 54 }
+
+ -- freshest (delta)CRL extension OID and syntax
+
+ ext-FreshestCRL EXTENSION ::= {SYNTAX
+ CRLDistributionPoints IDENTIFIED BY id-ce-freshestCRL }
+ id-ce-freshestCRL OBJECT IDENTIFIER ::= { id-ce 46 }
+
+ -- authority info access
+
+ ext-AuthorityInfoAccess EXTENSION ::= { SYNTAX
+ AuthorityInfoAccessSyntax IDENTIFIED BY
+ id-pe-authorityInfoAccess }
+ id-pe-authorityInfoAccess OBJECT IDENTIFIER ::= { id-pe 1 }
+
+ AuthorityInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ AccessDescription ::= SEQUENCE {
+ accessMethod OBJECT IDENTIFIER,
+ accessLocation GeneralName }
+
+ -- subject info access
+
+ ext-SubjectInfoAccessSyntax EXTENSION ::= { SYNTAX
+ SubjectInfoAccessSyntax IDENTIFIED BY id-pe-subjectInfoAccess }
+ id-pe-subjectInfoAccess OBJECT IDENTIFIER ::= { id-pe 11 }
+
+ SubjectInfoAccessSyntax ::=
+ SEQUENCE SIZE (1..MAX) OF AccessDescription
+
+ -- CRL number extension OID and syntax
+
+ ext-CRLNumber EXTENSION ::= {SYNTAX
+ INTEGER (0..MAX) IDENTIFIED BY id-ce-cRLNumber }
+ id-ce-cRLNumber OBJECT IDENTIFIER ::= { id-ce 20 }
+
+ CRLNumber ::= INTEGER (0..MAX)
+ -- issuing distribution point extension OID and syntax
+
+ ext-IssuingDistributionPoint EXTENSION ::= { SYNTAX
+ IssuingDistributionPoint IDENTIFIED BY
+ id-ce-issuingDistributionPoint }
+ id-ce-issuingDistributionPoint OBJECT IDENTIFIER ::= { id-ce 28 }
+
+ IssuingDistributionPoint ::= SEQUENCE {
+ distributionPoint [0] DistributionPointName OPTIONAL,
+ onlyContainsUserCerts [1] BOOLEAN DEFAULT FALSE,
+ onlyContainsCACerts [2] BOOLEAN DEFAULT FALSE,
+ onlySomeReasons [3] ReasonFlags OPTIONAL,
+ indirectCRL [4] BOOLEAN DEFAULT FALSE,
+ onlyContainsAttributeCerts [5] BOOLEAN DEFAULT FALSE
+ }
+ -- at most one of onlyContainsUserCerts, onlyContainsCACerts,
+ -- or onlyContainsAttributeCerts may be set to TRUE.
+
+ ext-DeltaCRLIndicator EXTENSION ::= { SYNTAX
+ CRLNumber IDENTIFIED BY id-ce-deltaCRLIndicator }
+ id-ce-deltaCRLIndicator OBJECT IDENTIFIER ::= { id-ce 27 }
+
+ -- CRL reasons extension OID and syntax
+
+ ext-CRLReason EXTENSION ::= { SYNTAX
+ CRLReason IDENTIFIED BY id-ce-cRLReasons }
+ id-ce-cRLReasons OBJECT IDENTIFIER ::= { id-ce 21 }
+
+ CRLReason ::= ENUMERATED {
+ unspecified (0),
+ keyCompromise (1),
+ cACompromise (2),
+ affiliationChanged (3),
+ superseded (4),
+ cessationOfOperation (5),
+ certificateHold (6),
+ removeFromCRL (8),
+ privilegeWithdrawn (9),
+ aACompromise (10)
+ }
+
+ -- certificate issuer CRL entry extension OID and syntax
+
+ ext-CertificateIssuer EXTENSION ::= { SYNTAX
+ GeneralNames IDENTIFIED BY id-ce-certificateIssuer }
+ id-ce-certificateIssuer OBJECT IDENTIFIER ::= { id-ce 29 }
+
+ -- hold instruction extension OID and syntax
+ ext-HoldInstructionCode EXTENSION ::= { SYNTAX
+ OBJECT IDENTIFIER IDENTIFIED BY id-ce-holdInstructionCode }
+ id-ce-holdInstructionCode OBJECT IDENTIFIER ::= { id-ce 23 }
+
+ -- ANSI x9 holdinstructions
+
+ holdInstruction OBJECT IDENTIFIER ::=
+ {joint-iso-itu-t(2) member-body(2) us(840) x9cm(10040) 2}
+ id-holdinstruction-none OBJECT IDENTIFIER ::=
+ {holdInstruction 1} -- deprecated
+ id-holdinstruction-callissuer OBJECT IDENTIFIER ::=
+ {holdInstruction 2}
+ id-holdinstruction-reject OBJECT IDENTIFIER ::=
+ {holdInstruction 3}
+
+ -- invalidity date CRL entry extension OID and syntax
+
+ ext-InvalidityDate EXTENSION ::= { SYNTAX
+ GeneralizedTime IDENTIFIED BY id-ce-invalidityDate }
+ id-ce-invalidityDate OBJECT IDENTIFIER ::= { id-ce 24 }
+ -- Upper bounds
+ ubMax INTEGER ::= 32768
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
new file mode 100644
index 0000000000..d58bcb5b19
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAlgs-2009.asn1
@@ -0,0 +1,528 @@
+ PKIXAlgs-2009 { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) }
+
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ PUBLIC-KEY, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM, SMIME-CAPS
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ mda-sha224, mda-sha256, mda-sha384, mda-sha512
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)} ;
+
+ --
+ -- Public Key (pk-) Algorithms
+ --
+
+ PublicKeys PUBLIC-KEY ::= {
+ pk-rsa |
+ pk-dsa |
+ pk-dh |
+ pk-kea,
+ ...,
+ pk-ec |
+ pk-ecDH |
+ pk-ecMQV
+ }
+
+ --
+ -- Signature Algorithms (sa-)
+ --
+
+ SignatureAlgs SIGNATURE-ALGORITHM ::= {
+ sa-rsaWithMD2 |
+ sa-rsaWithMD5 |
+ sa-rsaWithSHA1 |
+ sa-dsaWithSHA1 |
+ sa-ecdsaWithSHA1,
+ ..., -- Extensible
+ sa-dsaWithSHA224 |
+ sa-dsaWithSHA256 |
+ sa-ecdsaWithSHA224 |
+ sa-ecdsaWithSHA256 |
+ sa-ecdsaWithSHA384 |
+ sa-ecdsaWithSHA512
+ }
+
+ --
+ -- S/MIME CAPS for algorithms in this document
+ --
+ -- For all of the algorithms laid out in this document, the
+ -- parameters field for the S/MIME capabilities is defined as
+ -- ABSENT as there are no specific values that need to be known
+ -- by the receiver for negotiation.
+
+ --
+
+ SMimeCaps SMIME-CAPS ::= {
+ sa-rsaWithMD2.&smimeCaps |
+ sa-rsaWithMD5.&smimeCaps |
+ sa-rsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA1.&smimeCaps |
+ sa-dsaWithSHA224.&smimeCaps |
+ sa-dsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA1.&smimeCaps |
+ sa-ecdsaWithSHA224.&smimeCaps |
+ sa-ecdsaWithSHA256.&smimeCaps |
+ sa-ecdsaWithSHA384.&smimeCaps |
+ sa-ecdsaWithSHA512.&smimeCaps,
+ ... }
+
+ -- RSA PK Algorithm, Parameters, and Keys
+
+ pk-rsa PUBLIC-KEY ::= {
+ IDENTIFIER rsaEncryption
+ KEY RSAPublicKey
+ PARAMS TYPE NULL ARE absent
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {digitalSignature, nonRepudiation,
+ keyEncipherment, dataEncipherment, keyCertSign, cRLSign}
+ }
+
+ rsaEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 1 }
+
+ RSAPublicKey ::= SEQUENCE {
+ modulus INTEGER, -- n
+ publicExponent INTEGER -- e
+ }
+
+ -- DSA PK Algorithm, Parameters, and Keys
+
+ pk-dsa PUBLIC-KEY ::= {
+ IDENTIFIER id-dsa
+ KEY DSAPublicKey
+ PARAMS TYPE DSA-Params ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyCertSign,
+ cRLSign }
+ }
+
+ id-dsa OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 1 }
+
+ DSA-Params ::= SEQUENCE {
+ p INTEGER,
+ q INTEGER,
+ g INTEGER
+ }
+
+ DSAPublicKey ::= INTEGER -- public key, y
+
+ -- Diffie-Hellman PK Algorithm, Parameters, and Keys
+
+ pk-dh PUBLIC-KEY ::= {
+ IDENTIFIER dhpublicnumber
+ KEY DHPublicKey
+ PARAMS TYPE DomainParameters ARE inheritable
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ dhpublicnumber OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-x942(10046)
+ number-type(2) 1 }
+
+ DomainParameters ::= SEQUENCE {
+ p INTEGER, -- odd prime, p=jq +1
+ g INTEGER, -- generator, g
+ q INTEGER, -- factor of p-1
+ j INTEGER OPTIONAL, -- subgroup factor, j>= 2
+ validationParams ValidationParams OPTIONAL
+ }
+
+ ValidationParams ::= SEQUENCE {
+ seed BIT STRING,
+ pgenCounter INTEGER
+ }
+
+ DiffieHellmanPublicNumber ::= INTEGER -- according to http://wikisec.free.fr/crypto/crypto.html
+
+ DHPublicKey ::= INTEGER -- public key, y = g^x mod p
+
+ -- KEA PK Algorithm and Parameters
+
+ pk-kea PUBLIC-KEY ::= {
+ IDENTIFIER id-keyExchangeAlgorithm
+ -- key is not encoded --
+ PARAMS TYPE KEA-Params-Id ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE {keyAgreement, encipherOnly, decipherOnly }
+ }
+ id-keyExchangeAlgorithm OBJECT IDENTIFIER ::= {
+ joint-iso-itu-t(2) country(16) us(840) organization(1)
+ gov(101) dod(2) infosec(1) algorithms(1) 22 }
+
+ KEA-Params-Id ::= OCTET STRING
+
+ -- Elliptic Curve (EC) Signatures: Unrestricted Algorithms
+ -- (Section 2.1.1 of RFC 5480)
+ --
+ -- EC Unrestricted Algorithm ID -- -- this is used for ECDSA
+
+ pk-ec PUBLIC-KEY ::= {
+ IDENTIFIER id-ecPublicKey
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { digitalSignature, nonRepudiation, keyAgreement,
+ keyCertSign, cRLSign }
+ }
+
+ ECPoint ::= OCTET STRING -- see RFC 5480 for syntax and restrictions
+
+ id-ecPublicKey OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
+
+ -- Elliptic Curve (EC) Signatures: Restricted Algorithms
+ -- (Section 2.1.2 of RFC 5480)
+ --
+ -- EC Diffie-Hellman Algorithm ID
+
+ pk-ecDH PUBLIC-KEY ::= {
+ IDENTIFIER id-ecDH
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecDH OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecdh(12) }
+
+ -- EC Menezes-Qu-Vanstone Algorithm ID
+
+ pk-ecMQV PUBLIC-KEY ::= {
+ IDENTIFIER id-ecMQV
+ KEY ECPoint
+ PARAMS TYPE ECParameters ARE required
+ -- Private key format not in this module --
+ CERT-KEY-USAGE { keyAgreement, encipherOnly, decipherOnly }
+ }
+
+ id-ecMQV OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) schemes(1)
+ ecmqv(13) }
+
+ -- Parameters and Keys for both Restricted and Unrestricted EC
+
+ ECParameters ::= CHOICE {
+ namedCurve CURVE.&id({NamedCurve})
+ -- implicitCurve NULL
+ -- implicitCurve MUST NOT be used in PKIX
+ -- specifiedCurve SpecifiedCurve
+ -- specifiedCurve MUST NOT be used in PKIX
+ -- Details for specifiedCurve can be found in [X9.62]
+ -- Any future additions to this CHOICE should be coordinated
+ -- with ANSI X.9.
+ }
+ -- If you need to be able to decode ANSI X.9 parameter structures,
+ -- uncomment the implicitCurve and specifiedCurve above, and also
+ -- uncomment the following:
+ --(WITH COMPONENTS {namedCurve PRESENT})
+
+ -- Sec 2.1.1.1 Named Curve
+
+ CURVE ::= CLASS { &id OBJECT IDENTIFIER UNIQUE }
+ WITH SYNTAX { ID &id }
+
+ NamedCurve CURVE ::= {
+ { ID secp192r1 } | { ID sect163k1 } | { ID sect163r2 } |
+ { ID secp224r1 } | { ID sect233k1 } | { ID sect233r1 } |
+ { ID secp256r1 } | { ID sect283k1 } | { ID sect283r1 } |
+ { ID secp384r1 } | { ID sect409k1 } | { ID sect409r1 } |
+ { ID secp521r1 } | { ID sect571k1 } | { ID sect571r1 },
+ ... -- Extensible
+ }
+
+ -- Note in [X9.62] the curves are referred to as 'ansiX9' as
+ -- opposed to 'sec'. For example, secp192r1 is the same curve as
+ -- ansix9p192r1.
+
+ -- Note that in [PKI-ALG] the secp192r1 curve was referred to as
+ -- prime192v1 and the secp256r1 curve was referred to as
+ -- prime256v1.
+
+ -- Note that [FIPS186-3] refers to secp192r1 as P-192,
+ -- secp224r1 as P-224, secp256r1 as P-256, secp384r1 as P-384,
+ -- and secp521r1 as P-521.
+
+ secp192r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 1 }
+
+ sect163k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 1 }
+
+ sect163r2 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 15 }
+
+ secp224r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 33 }
+
+ sect233k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 26 }
+
+ sect233r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 27 }
+
+ secp256r1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) curves(3)
+ prime(1) 7 }
+
+ sect283k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 16 }
+
+ sect283r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 17 }
+
+ secp384r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 34 }
+
+ sect409k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 36 }
+
+ sect409r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 37 }
+
+ secp521r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 35 }
+
+ sect571k1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 38 }
+
+ sect571r1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) certicom(132) curve(0) 39 }
+
+ -- RSA with MD-2
+
+ sa-rsaWithMD2 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md2WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md2 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md2WithRSAEncryption }
+ }
+
+ md2WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 2 }
+
+ -- RSA with MD-5
+
+ sa-rsaWithMD5 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER md5WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-md5 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS { IDENTIFIED BY md5WithRSAEncryption }
+ }
+
+ md5WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 4 }
+
+ -- RSA with SHA-1
+
+ sa-rsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER sha1WithRSAEncryption
+ PARAMS TYPE NULL ARE required
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-rsa }
+ SMIME-CAPS {IDENTIFIED BY sha1WithRSAEncryption }
+ }
+
+ sha1WithRSAEncryption OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1)
+ pkcs-1(1) 5 }
+
+ -- DSA with SHA-1
+
+ sa-dsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha1
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha1 }
+ }
+
+ dsa-with-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) x9-57(10040) x9algorithm(4) 3 }
+
+ -- DSA with SHA-224
+
+ sa-dsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha224
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha224 }
+ }
+
+ dsa-with-sha224 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 1 }
+
+ -- DSA with SHA-256
+
+ sa-dsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER dsa-with-sha256
+ VALUE DSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-dsa }
+ SMIME-CAPS { IDENTIFIED BY dsa-with-sha256 }
+ }
+
+ dsa-with-sha256 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) country(16) us(840) organization(1) gov(101)
+ csor(3) algorithms(4) id-dsa-with-sha2(3) 2 }
+
+ -- ECDSA with SHA-1
+
+ sa-ecdsaWithSHA1 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA1
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha1 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS {IDENTIFIED BY ecdsa-with-SHA1 }
+ }
+
+ ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045)
+ signatures(4) 1 }
+
+ -- ECDSA with SHA-224
+
+ sa-ecdsaWithSHA224 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA224
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha224 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA224 }
+ }
+
+ ecdsa-with-SHA224 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 1 }
+
+ -- ECDSA with SHA-256
+
+ sa-ecdsaWithSHA256 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA256
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha256 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA256 }
+ }
+
+ ecdsa-with-SHA256 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 2 }
+
+ -- ECDSA with SHA-384
+
+ sa-ecdsaWithSHA384 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA384
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha384 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA384 }
+ }
+ ecdsa-with-SHA384 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 3 }
+
+ -- ECDSA with SHA-512
+
+ sa-ecdsaWithSHA512 SIGNATURE-ALGORITHM ::= {
+ IDENTIFIER ecdsa-with-SHA512
+ VALUE ECDSA-Sig-Value
+ PARAMS TYPE NULL ARE absent
+ HASHES { mda-sha512 }
+ PUBLIC-KEYS { pk-ec }
+ SMIME-CAPS { IDENTIFIED BY ecdsa-with-SHA512 }
+ }
+
+ ecdsa-with-SHA512 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) ansi-X9-62(10045) signatures(4)
+ ecdsa-with-SHA2(3) 4 }
+
+ --
+ -- Signature Values
+ --
+
+ -- DSA
+
+ DSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ -- ECDSA
+
+ ECDSA-Sig-Value ::= SEQUENCE {
+ r INTEGER,
+ s INTEGER
+ }
+
+ --
+ -- Message Digest Algorithms (mda-)
+ --
+
+ HashAlgs DIGEST-ALGORITHM ::= {
+ mda-md2 |
+ mda-md5 |
+ mda-sha1,
+ ... -- Extensible
+ }
+ -- MD-2
+
+ mda-md2 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md2
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md2 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 2 }
+
+ -- MD-5
+
+ mda-md5 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-md5
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-md5 OBJECT IDENTIFIER ::= {
+ iso(1) member-body(2) us(840) rsadsi(113549)
+ digestAlgorithm(2) 5 }
+
+ -- SHA-1
+
+ mda-sha1 DIGEST-ALGORITHM ::= {
+ IDENTIFIER id-sha1
+ PARAMS TYPE NULL ARE preferredAbsent
+ }
+
+ id-sha1 OBJECT IDENTIFIER ::= {
+ iso(1) identified-organization(3) oiw(14) secsig(3)
+ algorithm(2) 26 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
new file mode 100644
index 0000000000..3ab074643f
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXAttributeCertificate-2009.asn1
@@ -0,0 +1,292 @@
+ PKIXAttributeCertificate-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, SecurityCategory{},
+ EXTENSION, ATTRIBUTE, SECURITY-CATEGORY
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, DIGEST-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ -- IMPORTed module OIDs MAY change if [PKIXPROF] changes
+ -- PKIX Certificate Extensions
+
+ CertificateSerialNumber, UniqueIdentifier, id-pkix, id-pe, id-kp,
+ id-ad, id-at, SIGNED{}, SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, GeneralNames, id-ce, ext-AuthorityKeyIdentifier,
+ ext-AuthorityInfoAccess, ext-CRLDistributionPoints
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ ContentInfo
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549)
+ pkcs(1) pkcs-9(9) smime(16) modules(0) id-mod-cms-2004-02(41) };
+ -- Define the set of extensions that can appear.
+ -- Some of these are imported from PKIX Cert
+
+ AttributeCertExtensions EXTENSION ::= {
+ ext-auditIdentity | ext-targetInformation |
+ ext-AuthorityKeyIdentifier | ext-AuthorityInfoAccess |
+ ext-CRLDistributionPoints | ext-noRevAvail | ext-ac-proxying |
+ ext-aaControls, ... }
+
+ ext-auditIdentity EXTENSION ::= { SYNTAX
+ OCTET STRING IDENTIFIED BY id-pe-ac-auditIdentity}
+
+ ext-targetInformation EXTENSION ::= { SYNTAX
+ Targets IDENTIFIED BY id-ce-targetInformation }
+
+ ext-noRevAvail EXTENSION ::= { SYNTAX
+ NULL IDENTIFIED BY id-ce-noRevAvail}
+
+ ext-ac-proxying EXTENSION ::= { SYNTAX
+ ProxyInfo IDENTIFIED BY id-pe-ac-proxying}
+
+ ext-aaControls EXTENSION ::= { SYNTAX
+ AAControls IDENTIFIED BY id-pe-aaControls}
+
+ -- Define the set of attributes used here
+
+ AttributesDefined ATTRIBUTE ::= { at-authenticationInfo |
+ at-accesIdentity | at-chargingIdentity | at-group |
+ at-role | at-clearance | at-encAttrs, ...}
+
+ at-authenticationInfo ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-authenticationInfo}
+
+ at-accesIdentity ATTRIBUTE ::= { TYPE SvceAuthInfo
+ IDENTIFIED BY id-aca-accessIdentity}
+
+ at-chargingIdentity ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-chargingIdentity}
+
+ at-group ATTRIBUTE ::= { TYPE IetfAttrSyntax
+ IDENTIFIED BY id-aca-group}
+
+ at-role ATTRIBUTE ::= { TYPE RoleSyntax
+ IDENTIFIED BY id-at-role}
+
+ at-clearance ATTRIBUTE ::= { TYPE Clearance
+ IDENTIFIED BY id-at-clearance}
+ at-clearance-RFC3281 ATTRIBUTE ::= {TYPE Clearance-rfc3281
+ IDENTIFIED BY id-at-clearance-rfc3281 }
+
+ at-encAttrs ATTRIBUTE ::= { TYPE ContentInfo
+ IDENTIFIED BY id-aca-encAttrs}
+
+ --
+ -- OIDs used by Attribute Certificate Extensions
+ --
+
+ id-pe-ac-auditIdentity OBJECT IDENTIFIER ::= { id-pe 4 }
+ id-pe-aaControls OBJECT IDENTIFIER ::= { id-pe 6 }
+ id-pe-ac-proxying OBJECT IDENTIFIER ::= { id-pe 10 }
+ id-ce-targetInformation OBJECT IDENTIFIER ::= { id-ce 55 }
+ id-ce-noRevAvail OBJECT IDENTIFIER ::= { id-ce 56 }
+
+ --
+ -- OIDs used by Attribute Certificate Attributes
+ --
+
+ id-aca OBJECT IDENTIFIER ::= { id-pkix 10 }
+
+ id-aca-authenticationInfo OBJECT IDENTIFIER ::= { id-aca 1 }
+ id-aca-accessIdentity OBJECT IDENTIFIER ::= { id-aca 2 }
+ id-aca-chargingIdentity OBJECT IDENTIFIER ::= { id-aca 3 }
+ id-aca-group OBJECT IDENTIFIER ::= { id-aca 4 }
+ -- { id-aca 5 } is reserved
+ id-aca-encAttrs OBJECT IDENTIFIER ::= { id-aca 6 }
+
+ id-at-role OBJECT IDENTIFIER ::= { id-at 72}
+ id-at-clearance OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) attributeType(4) clearance (55) }
+
+ -- Uncomment the following declaration and comment the above line if
+ -- using the id-at-clearance attribute as defined in [RFC3281]
+ -- id-at-clearance ::= id-at-clearance-3281
+
+ id-at-clearance-rfc3281 OBJECT IDENTIFIER ::= {
+ joint-iso-ccitt(2) ds(5) module(1) selected-attribute-types(5)
+ clearance (55) }
+
+ --
+ -- The syntax of an Attribute Certificate
+ --
+
+ AttributeCertificate ::= SIGNED{AttributeCertificateInfo}
+
+ AttributeCertificateInfo ::= SEQUENCE {
+ version AttCertVersion, -- version is v2
+ holder Holder,
+ issuer AttCertIssuer,
+ signature AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ serialNumber CertificateSerialNumber,
+ attrCertValidityPeriod AttCertValidityPeriod,
+ attributes SEQUENCE OF
+ AttributeSet{{AttributesDefined}},
+ issuerUniqueID UniqueIdentifier OPTIONAL,
+ extensions Extensions{{AttributeCertExtensions}} OPTIONAL
+ }
+
+ AttCertVersion ::= INTEGER { v2(1) }
+
+ Holder ::= SEQUENCE {
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ -- the issuer and serial number of
+ -- the holder's Public Key Certificate
+ entityName [1] GeneralNames OPTIONAL,
+ -- the name of the claimant or role
+ objectDigestInfo [2] ObjectDigestInfo OPTIONAL
+ -- used to directly authenticate the
+ -- holder, for example, an executable
+ }
+
+ ObjectDigestInfo ::= SEQUENCE {
+ digestedObjectType ENUMERATED {
+ publicKey (0),
+ publicKeyCert (1),
+ otherObjectTypes (2) },
+ -- otherObjectTypes MUST NOT
+ -- be used in this profile
+ otherObjectTypeID OBJECT IDENTIFIER OPTIONAL,
+ digestAlgorithm AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ objectDigest BIT STRING
+ }
+
+ AttCertIssuer ::= CHOICE {
+ v1Form GeneralNames, -- MUST NOT be used in this
+ -- profile
+ v2Form [0] V2Form -- v2 only
+ }
+
+ V2Form ::= SEQUENCE {
+ issuerName GeneralNames OPTIONAL,
+ baseCertificateID [0] IssuerSerial OPTIONAL,
+ objectDigestInfo [1] ObjectDigestInfo OPTIONAL
+ -- issuerName MUST be present in this profile
+ -- baseCertificateID and objectDigestInfo MUST
+ -- NOT be present in this profile
+ }
+
+ IssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serial CertificateSerialNumber,
+ issuerUID UniqueIdentifier OPTIONAL
+ }
+
+ AttCertValidityPeriod ::= SEQUENCE {
+ notBeforeTime GeneralizedTime,
+ notAfterTime GeneralizedTime
+ }
+
+ --
+ -- Syntax used by Attribute Certificate Extensions
+ --
+
+ Targets ::= SEQUENCE OF Target
+
+ Target ::= CHOICE {
+ targetName [0] GeneralName,
+ targetGroup [1] GeneralName,
+ targetCert [2] TargetCert
+ }
+
+ TargetCert ::= SEQUENCE {
+ targetCertificate IssuerSerial,
+ targetName GeneralName OPTIONAL,
+ certDigestInfo ObjectDigestInfo OPTIONAL
+ }
+
+ AAControls ::= SEQUENCE {
+ pathLenConstraint INTEGER (0..MAX) OPTIONAL,
+ permittedAttrs [0] AttrSpec OPTIONAL,
+ excludedAttrs [1] AttrSpec OPTIONAL,
+ permitUnSpecified BOOLEAN DEFAULT TRUE
+ }
+
+ AttrSpec::= SEQUENCE OF OBJECT IDENTIFIER
+
+ ProxyInfo ::= SEQUENCE OF Targets
+
+ --
+ -- Syntax used by Attribute Certificate Attributes
+ --
+ IetfAttrSyntax ::= SEQUENCE {
+ policyAuthority[0] GeneralNames OPTIONAL,
+ values SEQUENCE OF CHOICE {
+ octets OCTET STRING,
+ oid OBJECT IDENTIFIER,
+ string UTF8String
+ }
+ }
+
+ SvceAuthInfo ::= SEQUENCE {
+ service GeneralName,
+ ident GeneralName,
+ authInfo OCTET STRING OPTIONAL
+ }
+
+ RoleSyntax ::= SEQUENCE {
+ roleAuthority [0] GeneralNames OPTIONAL,
+ roleName [1] GeneralName
+ }
+
+ Clearance ::= SEQUENCE {
+ policyId OBJECT IDENTIFIER,
+ classList ClassList DEFAULT {unclassified},
+ securityCategories SET OF SecurityCategory
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ -- Uncomment the following lines to support deprecated clearance
+ -- syntax and comment out previous Clearance.
+
+ -- Clearance ::= Clearance-rfc3281
+
+ Clearance-rfc3281 ::= SEQUENCE {
+ policyId [0] OBJECT IDENTIFIER,
+ classList [1] ClassList DEFAULT {unclassified},
+ securityCategories [2] SET OF SecurityCategory-rfc3281
+ {{SupportedSecurityCategories}} OPTIONAL
+ }
+
+ ClassList ::= BIT STRING {
+ unmarked (0),
+ unclassified (1),
+ restricted (2),
+ confidential (3),
+ secret (4),
+ topSecret (5)
+ }
+ SupportedSecurityCategories SECURITY-CATEGORY ::= { ... }
+
+ SecurityCategory-rfc3281{SECURITY-CATEGORY:Supported} ::= SEQUENCE {
+ type [0] IMPLICIT SECURITY-CATEGORY.
+ &id({Supported}),
+ value [1] EXPLICIT SECURITY-CATEGORY.
+ &Type({Supported}{@type})
+ }
+
+ ACClearAttrs ::= SEQUENCE {
+ acIssuer GeneralName,
+ acSerial INTEGER,
+ attrs SEQUENCE OF AttributeSet{{AttributesDefined}}
+ }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
new file mode 100644
index 0000000000..968a142f28
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCMP-2009.asn1
@@ -0,0 +1,495 @@
+ PKIXCMP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-cmp2000-02(50) }
+ DEFINITIONS EXPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, KeyIdentifier
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ CertTemplate, PKIPublicationInfo, EncryptedValue, CertId,
+ CertReqMessages
+ FROM PKIXCRMF-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55) }
+ -- see also the behavioral clarifications to CRMF codified in
+ -- Appendix C of this specification
+
+ CertificationRequest
+ FROM PKCS-10
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkcs10-2009(69)}
+ -- (specified in RFC 2986 with 1993 ASN.1 syntax and IMPLICIT
+ -- tags). Alternatively, implementers may directly include
+ -- the [PKCS10] syntax in this module
+ ;
+
+ -- the rest of the module contains locally defined OIDs and
+ -- constructs
+
+ CMPCertificate ::= CHOICE { x509v3PKCert Certificate, ... }
+ -- This syntax, while bits-on-the-wire compatible with the
+ -- standard X.509 definition of "Certificate", allows the
+ -- possibility of future certificate types (such as X.509
+ -- attribute certificates, WAP WTLS certificates, or other kinds
+ -- of certificates) within this certificate management protocol,
+ -- should a need ever arise to support such generality. Those
+ -- implementations that do not foresee a need to ever support
+ -- other certificate types MAY, if they wish, comment out the
+ -- above structure and "uncomment" the following one prior to
+ -- compiling this ASN.1 module. (Note that interoperability
+ -- with implementations that don't do this will be unaffected by
+ -- this change.)
+
+ -- CMPCertificate ::= Certificate
+
+ PKIMessage ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody,
+ protection [0] PKIProtection OPTIONAL,
+ extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL }
+
+ PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage
+
+ PKIHeader ::= SEQUENCE {
+ pvno INTEGER { cmp1999(1), cmp2000(2) },
+ sender GeneralName,
+ -- identifies the sender
+ recipient GeneralName,
+ -- identifies the intended recipient
+ messageTime [0] GeneralizedTime OPTIONAL,
+ -- time of production of this message (used when sender
+ -- believes that the transport will be "suitable"; i.e.,
+ -- that the time will still be meaningful upon receipt)
+ protectionAlg [1] AlgorithmIdentifier{ALGORITHM, {...}}
+ OPTIONAL,
+ -- algorithm used for calculation of protection bits
+ senderKID [2] KeyIdentifier OPTIONAL,
+ recipKID [3] KeyIdentifier OPTIONAL,
+ -- to identify specific keys used for protection
+ transactionID [4] OCTET STRING OPTIONAL,
+ -- identifies the transaction; i.e., this will be the same in
+ -- corresponding request, response, certConf, and PKIConf
+ -- messages
+ senderNonce [5] OCTET STRING OPTIONAL,
+ recipNonce [6] OCTET STRING OPTIONAL,
+ -- nonces used to provide replay protection, senderNonce
+ -- is inserted by the creator of this message; recipNonce
+ -- is a nonce previously inserted in a related message by
+ -- the intended recipient of this message
+ freeText [7] PKIFreeText OPTIONAL,
+ -- this may be used to indicate context-specific instructions
+ -- (this field is intended for human consumption)
+ generalInfo [8] SEQUENCE SIZE (1..MAX) OF
+ InfoTypeAndValue OPTIONAL
+ -- this may be used to convey context-specific information
+ -- (this field not primarily intended for human consumption)
+ }
+
+ PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String
+ -- text encoded as UTF-8 String [RFC3629] (note: each
+ -- UTF8String MAY include an [RFC3066] language tag
+ -- to indicate the language of the contained text;
+ -- see [RFC2482] for details)
+
+ PKIBody ::= CHOICE { -- message-specific body elements
+ ir [0] CertReqMessages, --Initialization Request
+ ip [1] CertRepMessage, --Initialization Response
+ cr [2] CertReqMessages, --Certification Request
+ cp [3] CertRepMessage, --Certification Response
+ p10cr [4] CertificationRequest, --imported from [PKCS10]
+ popdecc [5] POPODecKeyChallContent, --pop Challenge
+ popdecr [6] POPODecKeyRespContent, --pop Response
+ kur [7] CertReqMessages, --Key Update Request
+ kup [8] CertRepMessage, --Key Update Response
+ krr [9] CertReqMessages, --Key Recovery Request
+ krp [10] KeyRecRepContent, --Key Recovery Response
+ rr [11] RevReqContent, --Revocation Request
+ rp [12] RevRepContent, --Revocation Response
+ ccr [13] CertReqMessages, --Cross-Cert. Request
+ ccp [14] CertRepMessage, --Cross-Cert. Response
+ ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann.
+ cann [16] CertAnnContent, --Certificate Ann.
+ rann [17] RevAnnContent, --Revocation Ann.
+ crlann [18] CRLAnnContent, --CRL Announcement
+ pkiconf [19] PKIConfirmContent, --Confirmation
+ nested [20] NestedMessageContent, --Nested Message
+ genm [21] GenMsgContent, --General Message
+ genp [22] GenRepContent, --General Response
+ error [23] ErrorMsgContent, --Error Message
+ certConf [24] CertConfirmContent, --Certificate confirm
+ pollReq [25] PollReqContent, --Polling request
+ pollRep [26] PollRepContent --Polling response
+ }
+
+ PKIProtection ::= BIT STRING
+
+ ProtectedPart ::= SEQUENCE {
+ header PKIHeader,
+ body PKIBody }
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this string to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ -- note: implementations MAY wish to limit acceptable sizes
+ -- of this integer to values appropriate for their environment
+ -- in order to reduce the risk of denial-of-service attacks
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ id-DHBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 30 }
+ DHBMParameter ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ mac AlgorithmIdentifier{MAC-ALGORITHM, {...}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11],
+ -- or HMAC [RFC2104, RFC2202])
+ }
+
+ PKIStatus ::= INTEGER {
+ accepted (0),
+ -- you got exactly what you asked for
+ grantedWithMods (1),
+ -- you got something like what you asked for; the
+ -- requester is responsible for ascertaining the differences
+ rejection (2),
+ -- you don't get it, more information elsewhere in the message
+ waiting (3),
+ -- the request body part has not yet been processed; expect to
+ -- hear more later (note: proper handling of this status
+ -- response MAY use the polling req/rep PKIMessages specified
+ -- in Section 5.3.22; alternatively, polling in the underlying
+ -- transport layer MAY have some utility in this regard)
+ revocationWarning (4),
+ -- this message contains a warning that a revocation is
+ -- imminent
+ revocationNotification (5),
+ -- notification that a revocation has occurred
+ keyUpdateWarning (6)
+ -- update already done for the oldCertId specified in
+ -- CertReqMsg
+ }
+
+ PKIFailureInfo ::= BIT STRING {
+ -- since we can fail in more than one way!
+ -- More codes may be added in the future if/when required.
+ badAlg (0),
+ -- unrecognized or unsupported Algorithm Identifier
+ badMessageCheck (1),
+ -- integrity check failed (e.g., signature did not verify)
+ badRequest (2),
+ -- transaction not permitted or supported
+ badTime (3),
+ -- messageTime was not sufficiently close to the system time,
+ -- as defined by local policy
+ badCertId (4),
+ -- no certificate could be found matching the provided criteria
+ badDataFormat (5),
+ -- the data submitted has the wrong format
+ wrongAuthority (6),
+ -- the authority indicated in the request is different from the
+ -- one creating the response token
+ incorrectData (7),
+ -- the requester's data is incorrect (for notary services)
+ missingTimeStamp (8),
+ -- when the timestamp is missing but should be there
+ -- (by policy)
+ badPOP (9),
+ -- the proof-of-possession failed
+ certRevoked (10),
+ -- the certificate has already been revoked
+ certConfirmed (11),
+ -- the certificate has already been confirmed
+ wrongIntegrity (12),
+ -- invalid integrity, password based instead of signature or
+ -- vice versa
+ badRecipientNonce (13),
+ -- invalid recipient nonce, either missing or wrong value
+ timeNotAvailable (14),
+ -- the TSA's time source is not available
+ unacceptedPolicy (15),
+ -- the requested TSA policy is not supported by the TSA
+ unacceptedExtension (16),
+ -- the requested extension is not supported by the TSA
+ addInfoNotAvailable (17),
+ -- the additional information requested could not be
+ -- understood or is not available
+ badSenderNonce (18),
+ -- invalid sender nonce, either missing or wrong size
+ badCertTemplate (19),
+ -- invalid cert. template or missing mandatory information
+ signerNotTrusted (20),
+ -- signer of the message unknown or not trusted
+ transactionIdInUse (21),
+ -- the transaction identifier is already in use
+ unsupportedVersion (22),
+ -- the version of the message is not supported
+ notAuthorized (23),
+ -- the sender was not authorized to make the preceding
+ -- request or perform the preceding action
+ systemUnavail (24),
+ -- the request cannot be handled due to system unavailability
+ systemFailure (25),
+ -- the request cannot be handled due to system failure
+ duplicateCertReq (26)
+ -- certificate cannot be issued because a duplicate
+ -- certificate already exists
+ }
+
+ PKIStatusInfo ::= SEQUENCE {
+ status PKIStatus,
+ statusString PKIFreeText OPTIONAL,
+ failInfo PKIFailureInfo OPTIONAL }
+
+ OOBCert ::= CMPCertificate
+
+ OOBCertHash ::= SEQUENCE {
+ hashAlg [0] AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ certId [1] CertId OPTIONAL,
+ hashVal BIT STRING
+ -- hashVal is calculated over the DER encoding of the
+ -- self-signed certificate with the identifier certID.
+ }
+
+ POPODecKeyChallContent ::= SEQUENCE OF Challenge
+ -- One Challenge per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages).
+
+ Challenge ::= SEQUENCE {
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM, {...}}
+ OPTIONAL,
+ -- MUST be present in the first Challenge; MAY be omitted in
+ -- any subsequent Challenge in POPODecKeyChallContent (if
+ -- omitted, then the owf used in the immediately preceding
+ -- Challenge is to be used).
+ witness OCTET STRING,
+ -- the result of applying the one-way function (owf) to a
+ -- randomly-generated INTEGER, A. [Note that a different
+ -- INTEGER MUST be used for each Challenge.]
+ challenge OCTET STRING
+ -- the encryption (under the public key for which the cert.
+ -- request is being made) of Rand, where Rand is specified as
+ -- Rand ::= SEQUENCE {
+ -- int INTEGER,
+ -- - the randomly-generated INTEGER A (above)
+ -- sender GeneralName
+ -- - the sender's name (as included in PKIHeader)
+ -- }
+ }
+
+ POPODecKeyRespContent ::= SEQUENCE OF INTEGER
+ -- One INTEGER per encryption key certification request (in the
+ -- same order as these requests appear in CertReqMessages). The
+ -- retrieved INTEGER A (above) is returned to the sender of the
+ -- corresponding Challenge.
+
+ CertRepMessage ::= SEQUENCE {
+ caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate
+ OPTIONAL,
+ response SEQUENCE OF CertResponse }
+
+ CertResponse ::= SEQUENCE {
+ certReqId INTEGER,
+ -- to match this response with the corresponding request (a value
+ -- of -1 is to be used if certReqId is not specified in the
+ -- corresponding request)
+ status PKIStatusInfo,
+ certifiedKeyPair CertifiedKeyPair OPTIONAL,
+ rspInfo OCTET STRING OPTIONAL
+ -- analogous to the id-regInfo-utf8Pairs string defined
+ -- for regInfo in CertReqMsg [RFC4211]
+ }
+
+ CertifiedKeyPair ::= SEQUENCE {
+ certOrEncCert CertOrEncCert,
+ privateKey [0] EncryptedValue OPTIONAL,
+ -- see [RFC4211] for comment on encoding
+ publicationInfo [1] PKIPublicationInfo OPTIONAL }
+
+ CertOrEncCert ::= CHOICE {
+ certificate [0] CMPCertificate,
+ encryptedCert [1] EncryptedValue }
+ KeyRecRepContent ::= SEQUENCE {
+ status PKIStatusInfo,
+ newSigCert [0] CMPCertificate OPTIONAL,
+ caCerts [1] SEQUENCE SIZE (1..MAX) OF
+ CMPCertificate OPTIONAL,
+ keyPairHist [2] SEQUENCE SIZE (1..MAX) OF
+ CertifiedKeyPair OPTIONAL }
+
+ RevReqContent ::= SEQUENCE OF RevDetails
+
+ RevDetails ::= SEQUENCE {
+ certDetails CertTemplate,
+ -- allows requester to specify as much as they can about
+ -- the cert. for which revocation is requested
+ -- (e.g., for cases in which serialNumber is not available)
+ crlEntryDetails Extensions{{...}} OPTIONAL
+ -- requested crlEntryExtensions
+ }
+
+ RevRepContent ::= SEQUENCE {
+ status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo,
+ -- in same order as was sent in RevReqContent
+ revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId OPTIONAL,
+ -- IDs for which revocation was requested
+ -- (same order as status)
+ crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList OPTIONAL
+ -- the resulting CRLs (there may be more than one)
+ }
+
+ CAKeyUpdAnnContent ::= SEQUENCE {
+ oldWithNew CMPCertificate, -- old pub signed with new priv
+ newWithOld CMPCertificate, -- new pub signed with old priv
+ newWithNew CMPCertificate -- new pub signed with new priv
+ }
+
+ CertAnnContent ::= CMPCertificate
+
+ RevAnnContent ::= SEQUENCE {
+ status PKIStatus,
+ certId CertId,
+ willBeRevokedAt GeneralizedTime,
+ badSinceDate GeneralizedTime,
+ crlDetails Extensions{{...}} OPTIONAL
+ -- extra CRL details (e.g., crl number, reason, location, etc.)
+ }
+
+ CRLAnnContent ::= SEQUENCE OF CertificateList
+ PKIConfirmContent ::= NULL
+
+ NestedMessageContent ::= PKIMessages
+
+ INFO-TYPE-AND-VALUE ::= TYPE-IDENTIFIER
+
+ InfoTypeAndValue ::= SEQUENCE {
+ infoType INFO-TYPE-AND-VALUE.
+ &id({SupportedInfoSet}),
+ infoValue INFO-TYPE-AND-VALUE.
+ &Type({SupportedInfoSet}{@infoType}) }
+
+ SupportedInfoSet INFO-TYPE-AND-VALUE ::= { ... }
+
+ -- Example InfoTypeAndValue contents include, but are not limited
+ -- to, the following (uncomment in this ASN.1 module and use as
+ -- appropriate for a given environment):
+ --
+ -- id-it-caProtEncCert OBJECT IDENTIFIER ::= {id-it 1}
+ -- CAProtEncCertValue ::= CMPCertificate
+ -- id-it-signKeyPairTypes OBJECT IDENTIFIER ::= {id-it 2}
+ -- SignKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-encKeyPairTypes OBJECT IDENTIFIER ::= {id-it 3}
+ -- EncKeyPairTypesValue ::= SEQUENCE OF
+ -- AlgorithmIdentifier{{...}}
+ -- id-it-preferredSymmAlg OBJECT IDENTIFIER ::= {id-it 4}
+ -- PreferredSymmAlgValue ::= AlgorithmIdentifier{{...}}
+ -- id-it-caKeyUpdateInfo OBJECT IDENTIFIER ::= {id-it 5}
+ -- CAKeyUpdateInfoValue ::= CAKeyUpdAnnContent
+ -- id-it-currentCRL OBJECT IDENTIFIER ::= {id-it 6}
+ -- CurrentCRLValue ::= CertificateList
+ -- id-it-unsupportedOIDs OBJECT IDENTIFIER ::= {id-it 7}
+ -- UnsupportedOIDsValue ::= SEQUENCE OF OBJECT IDENTIFIER
+ -- id-it-keyPairParamReq OBJECT IDENTIFIER ::= {id-it 10}
+ -- KeyPairParamReqValue ::= OBJECT IDENTIFIER
+ -- id-it-keyPairParamRep OBJECT IDENTIFIER ::= {id-it 11}
+ -- KeyPairParamRepValue ::= AlgorithmIdentifer
+ -- id-it-revPassphrase OBJECT IDENTIFIER ::= {id-it 12}
+ -- RevPassphraseValue ::= EncryptedValue
+ -- id-it-implicitConfirm OBJECT IDENTIFIER ::= {id-it 13}
+ -- ImplicitConfirmValue ::= NULL
+ -- id-it-confirmWaitTime OBJECT IDENTIFIER ::= {id-it 14}
+ -- ConfirmWaitTimeValue ::= GeneralizedTime
+ -- id-it-origPKIMessage OBJECT IDENTIFIER ::= {id-it 15}
+ -- OrigPKIMessageValue ::= PKIMessages
+ -- id-it-suppLangTags OBJECT IDENTIFIER ::= {id-it 16}
+ -- SuppLangTagsValue ::= SEQUENCE OF UTF8String
+ --
+ -- where
+ --
+ -- id-pkix OBJECT IDENTIFIER ::= {
+ -- iso(1) identified-organization(3)
+ -- dod(6) internet(1) security(5) mechanisms(5) pkix(7)}
+ -- and
+ -- id-it OBJECT IDENTIFIER ::= {id-pkix 4}
+ --
+ --
+ -- This construct MAY also be used to define new PKIX Certificate
+ -- Management Protocol request and response messages, or general-
+ -- purpose (e.g., announcement) messages for future needs or for
+ -- specific environments.
+
+ GenMsgContent ::= SEQUENCE OF InfoTypeAndValue
+
+ -- May be sent by EE, RA, or CA (depending on message content).
+ -- The OPTIONAL infoValue parameter of InfoTypeAndValue will
+ -- typically be omitted for some of the examples given above.
+ -- The receiver is free to ignore any contained OBJECT IDs that it
+ -- does not recognize. If sent from EE to CA, the empty set
+ -- indicates that the CA may send
+ -- any/all information that it wishes.
+
+ GenRepContent ::= SEQUENCE OF InfoTypeAndValue
+ -- Receiver MAY ignore any contained OIDs that it does not
+ -- recognize.
+
+ ErrorMsgContent ::= SEQUENCE {
+ pKIStatusInfo PKIStatusInfo,
+ errorCode INTEGER OPTIONAL,
+ -- implementation-specific error codes
+ errorDetails PKIFreeText OPTIONAL
+ -- implementation-specific error details
+ }
+
+ CertConfirmContent ::= SEQUENCE OF CertStatus
+
+ CertStatus ::= SEQUENCE {
+ certHash OCTET STRING,
+ -- the hash of the certificate, using the same hash algorithm
+ -- as is used to create and verify the certificate signature
+ certReqId INTEGER,
+ -- to match this confirmation with the corresponding req/rep
+ statusInfo PKIStatusInfo OPTIONAL }
+
+ PollReqContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER }
+
+ PollRepContent ::= SEQUENCE OF SEQUENCE {
+ certReqId INTEGER,
+ checkAfter INTEGER, -- time in seconds
+ reason PKIFreeText OPTIONAL }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
new file mode 100644
index 0000000000..1c0b780499
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/PKIXCRMF-2009.asn1
@@ -0,0 +1,409 @@
+ PKIXCRMF-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-crmf2005-02(55)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ AttributeSet{}, Extensions{}, EXTENSION, ATTRIBUTE,
+ SingleAttribute{}
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, ALGORITHM,
+ DIGEST-ALGORITHM, MAC-ALGORITHM, PUBLIC-KEY
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Version, Name, Time, SubjectPublicKeyInfo, UniqueIdentifier, id-pkix,
+ SignatureAlgorithms
+ FROM PKIX1Explicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51)}
+
+ GeneralName, CertExtensions
+ FROM PKIX1Implicit-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59)}
+
+ EnvelopedData, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+ maca-hMAC-SHA1
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ -- arc for Internet X.509 PKI protocols and their components
+
+ id-pkip OBJECT IDENTIFIER ::= { id-pkix 5 }
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-ct OBJECT IDENTIFIER ::= { id-smime 1 } -- content types
+
+ -- Core definitions for this module
+
+ CertReqMessages ::= SEQUENCE SIZE (1..MAX) OF CertReqMsg
+
+ CertReqMsg ::= SEQUENCE {
+ certReq CertRequest,
+ popo ProofOfPossession OPTIONAL,
+ -- content depends upon key type
+ regInfo SEQUENCE SIZE(1..MAX) OF
+ SingleAttribute{{RegInfoSet}} OPTIONAL }
+
+ CertRequest ::= SEQUENCE {
+ certReqId INTEGER,
+ -- ID for matching request and reply
+ certTemplate CertTemplate,
+ -- Selected fields of cert to be issued
+ controls Controls OPTIONAL }
+ -- Attributes affecting issuance
+
+ CertTemplate ::= SEQUENCE {
+ version [0] Version OPTIONAL,
+ serialNumber [1] INTEGER OPTIONAL,
+ signingAlg [2] AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}} OPTIONAL,
+ issuer [3] Name OPTIONAL,
+ validity [4] OptionalValidity OPTIONAL,
+ subject [5] Name OPTIONAL,
+ publicKey [6] SubjectPublicKeyInfo OPTIONAL,
+ issuerUID [7] UniqueIdentifier OPTIONAL,
+ subjectUID [8] UniqueIdentifier OPTIONAL,
+ extensions [9] Extensions{{CertExtensions}} OPTIONAL }
+
+ OptionalValidity ::= SEQUENCE {
+ notBefore [0] Time OPTIONAL,
+ notAfter [1] Time OPTIONAL } -- at least one MUST be present
+
+ Controls ::= SEQUENCE SIZE(1..MAX) OF SingleAttribute
+ {{RegControlSet}}
+
+ ProofOfPossession ::= CHOICE {
+ raVerified [0] NULL,
+ -- used if the RA has already verified that the requester is in
+ -- possession of the private key
+ signature [1] POPOSigningKey,
+ keyEncipherment [2] POPOPrivKey,
+ keyAgreement [3] POPOPrivKey }
+
+ POPOSigningKey ::= SEQUENCE {
+ poposkInput [0] POPOSigningKeyInput OPTIONAL,
+ algorithmIdentifier AlgorithmIdentifier{SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signature BIT STRING }
+ -- The signature (using "algorithmIdentifier") is on the
+ -- DER-encoded value of poposkInput. NOTE: If the CertReqMsg
+ -- certReq CertTemplate contains the subject and publicKey values,
+ -- then poposkInput MUST be omitted and the signature MUST be
+ -- computed over the DER-encoded value of CertReqMsg certReq. If
+ -- the CertReqMsg certReq CertTemplate does not contain both the
+ -- public key and subject values (i.e., if it contains only one
+ -- of these, or neither), then poposkInput MUST be present and
+ -- MUST be signed.
+
+ POPOSigningKeyInput ::= SEQUENCE {
+ authInfo CHOICE {
+ sender [0] GeneralName,
+ -- used only if an authenticated identity has been
+ -- established for the sender (e.g., a DN from a
+ -- previously-issued and currently-valid certificate)
+ publicKeyMAC PKMACValue },
+ -- used if no authenticated GeneralName currently exists for
+ -- the sender; publicKeyMAC contains a password-based MAC
+ -- on the DER-encoded value of publicKey
+ publicKey SubjectPublicKeyInfo } -- from CertTemplate
+
+ PKMACValue ::= SEQUENCE {
+ algId AlgorithmIdentifier{MAC-ALGORITHM,
+ {Password-MACAlgorithms}},
+ value BIT STRING }
+
+ --
+ -- Define the currently only acceptable MAC algorithm to be used
+ -- for the PKMACValue structure
+ --
+
+ id-PasswordBasedMac OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ usa(840) nt(113533) nsn(7) algorithms(66) 13 }
+
+ Password-MACAlgorithms MAC-ALGORITHM ::= {
+ {IDENTIFIER id-PasswordBasedMac
+ PARAMS TYPE PBMParameter ARE required
+ IS-KEYED-MAC TRUE
+ }, ...
+ }
+
+ PBMParameter ::= SEQUENCE {
+ salt OCTET STRING,
+ owf AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {DigestAlgorithms}},
+ -- AlgId for a One-Way Function (SHA-1 recommended)
+ iterationCount INTEGER,
+ -- number of times the OWF is applied
+ mac AlgorithmIdentifier{MAC-ALGORITHM,
+ {MACAlgorithms}}
+ -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC, or HMAC
+ }
+
+ DigestAlgorithms DIGEST-ALGORITHM ::= {
+ mda-sha1, ...
+ }
+
+ MACAlgorithms MAC-ALGORITHM ::= {
+ -- The modules containing the ASN.1 for the DES and 3DES MAC
+ -- algorithms have not been updated at the time that this is
+ -- being published. Users of this module should define the
+ -- appropriate MAC-ALGORITHM objects and uncomment the
+ -- following lines if they support these MAC algorithms.
+ -- maca-des-mac | maca-3des-mac --
+ maca-hMAC-SHA1,
+ ...
+ }
+
+ POPOPrivKey ::= CHOICE {
+ thisMessage [0] BIT STRING, -- Deprecated
+ -- possession is proven in this message (which contains
+ -- the private key itself (encrypted for the CA))
+ subsequentMessage [1] SubsequentMessage,
+ -- possession will be proven in a subsequent message
+ dhMAC [2] BIT STRING, -- Deprecated
+ agreeMAC [3] PKMACValue,
+ encryptedKey [4] EnvelopedData }
+ -- for keyAgreement (only), possession is proven in this message
+ -- (which contains a MAC (over the DER-encoded value of the
+ -- certReq parameter in CertReqMsg, which MUST include both
+ -- subject and publicKey) based on a key derived from the end
+ -- entity's private DH key and the CA's public DH key);
+
+ SubsequentMessage ::= INTEGER {
+ encrCert (0),
+ -- requests that resulting certificate be encrypted for the
+ -- end entity (following which, POP will be proven in a
+ -- confirmation message)
+ challengeResp (1) }
+ -- requests that CA engage in challenge-response exchange with
+ -- end entity in order to prove private key possession
+
+ --
+ -- id-ct-encKeyWithID content type used as the content type for the
+ -- EnvelopedData in POPOPrivKey.
+ -- It contains both a private key and an identifier for key escrow
+ -- agents to check against recovery requestors.
+ --
+
+ ct-encKeyWithID CONTENT-TYPE ::=
+ { EncKeyWithID IDENTIFIED BY id-ct-encKeyWithID }
+
+ id-ct-encKeyWithID OBJECT IDENTIFIER ::= {id-ct 21}
+
+ EncKeyWithID ::= SEQUENCE {
+ privateKey PrivateKeyInfo,
+ identifier CHOICE {
+ string UTF8String,
+ generalName GeneralName
+ } OPTIONAL
+ }
+
+ PrivateKeyInfo ::= SEQUENCE {
+ version INTEGER,
+ privateKeyAlgorithm AlgorithmIdentifier{PUBLIC-KEY, {...}},
+ privateKey OCTET STRING,
+ -- Structure of public key is in PUBLIC-KEY.&PrivateKey
+ attributes [0] IMPLICIT Attributes OPTIONAL
+ }
+
+ Attributes ::= SET OF AttributeSet{{PrivateKeyAttributes}}
+ PrivateKeyAttributes ATTRIBUTE ::= {...}
+
+ --
+ -- 6. Registration Controls in CRMF
+ --
+
+ id-regCtrl OBJECT IDENTIFIER ::= { id-pkip 1 }
+
+ RegControlSet ATTRIBUTE ::= {
+ regCtrl-regToken | regCtrl-authenticator |
+ regCtrl-pkiPublicationInfo | regCtrl-pkiArchiveOptions |
+ regCtrl-oldCertID | regCtrl-protocolEncrKey, ... }
+
+ --
+ -- 6.1. Registration Token Control
+ --
+
+ regCtrl-regToken ATTRIBUTE ::=
+ { TYPE RegToken IDENTIFIED BY id-regCtrl-regToken }
+
+ id-regCtrl-regToken OBJECT IDENTIFIER ::= { id-regCtrl 1 }
+
+ RegToken ::= UTF8String
+
+ --
+ -- 6.2. Authenticator Control
+ --
+
+ regCtrl-authenticator ATTRIBUTE ::=
+ { TYPE Authenticator IDENTIFIED BY id-regCtrl-authenticator }
+
+ id-regCtrl-authenticator OBJECT IDENTIFIER ::= { id-regCtrl 2 }
+
+ Authenticator ::= UTF8String
+
+ --
+ -- 6.3. Publication Information Control
+ --
+
+ regCtrl-pkiPublicationInfo ATTRIBUTE ::=
+ { TYPE PKIPublicationInfo IDENTIFIED BY
+ id-regCtrl-pkiPublicationInfo }
+
+ id-regCtrl-pkiPublicationInfo OBJECT IDENTIFIER ::= { id-regCtrl 3 }
+
+ PKIPublicationInfo ::= SEQUENCE {
+ action INTEGER {
+ dontPublish (0),
+ pleasePublish (1) },
+ pubInfos SEQUENCE SIZE (1..MAX) OF SinglePubInfo OPTIONAL }
+ -- pubInfos MUST NOT be present if action is "dontPublish"
+ -- (if action is "pleasePublish" and pubInfos is omitted,
+ -- "dontCare" is assumed)
+
+ SinglePubInfo ::= SEQUENCE {
+ pubMethod INTEGER {
+ dontCare (0),
+ x500 (1),
+ web (2),
+ ldap (3) },
+ pubLocation GeneralName OPTIONAL }
+
+ --
+ -- 6.4. Archive Options Control
+ --
+
+ regCtrl-pkiArchiveOptions ATTRIBUTE ::=
+ { TYPE PKIArchiveOptions IDENTIFIED BY
+ id-regCtrl-pkiArchiveOptions }
+
+ id-regCtrl-pkiArchiveOptions OBJECT IDENTIFIER ::= { id-regCtrl 4 }
+
+ PKIArchiveOptions ::= CHOICE {
+ encryptedPrivKey [0] EncryptedKey,
+ -- the actual value of the private key
+ keyGenParameters [1] KeyGenParameters,
+ -- parameters that allow the private key to be re-generated
+ archiveRemGenPrivKey [2] BOOLEAN }
+ -- set to TRUE if sender wishes receiver to archive the private
+ -- key of a key pair that the receiver generates in response to
+ -- this request; set to FALSE if no archive is desired.
+
+ EncryptedKey ::= CHOICE {
+ encryptedValue EncryptedValue, -- Deprecated
+ envelopedData [0] EnvelopedData }
+ -- The encrypted private key MUST be placed in the envelopedData
+ -- encryptedContentInfo encryptedContent OCTET STRING.
+
+ --
+ -- We skipped doing the full constraints here since this structure
+ -- has been deprecated in favor of EnvelopedData
+ --
+
+ EncryptedValue ::= SEQUENCE {
+ intendedAlg [0] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the intended algorithm for which the value will be used
+ symmAlg [1] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- the symmetric algorithm used to encrypt the value
+ encSymmKey [2] BIT STRING OPTIONAL,
+ -- the (encrypted) symmetric key used to encrypt the value
+ keyAlg [3] AlgorithmIdentifier{ALGORITHM, {...}} OPTIONAL,
+ -- algorithm used to encrypt the symmetric key
+ valueHint [4] OCTET STRING OPTIONAL,
+ -- a brief description or identifier of the encValue content
+ -- (may be meaningful only to the sending entity, and used only
+ -- if EncryptedValue might be re-examined by the sending entity
+ -- in the future)
+ encValue BIT STRING }
+ -- the encrypted value itself
+ -- When EncryptedValue is used to carry a private key (as opposed to
+ -- a certificate), implementations MUST support the encValue field
+ -- containing an encrypted PrivateKeyInfo as defined in [PKCS11],
+ -- section 12.11. If encValue contains some other format/encoding
+ -- for the private key, the first octet of valueHint MAY be used
+ -- to indicate the format/encoding (but note that the possible values
+ -- of this octet are not specified at this time). In all cases, the
+ -- intendedAlg field MUST be used to indicate at least the OID of
+ -- the intended algorithm of the private key, unless this information
+ -- is known a priori to both sender and receiver by some other means.
+
+ KeyGenParameters ::= OCTET STRING
+
+ --
+ -- 6.5. OldCert ID Control
+ --
+
+ regCtrl-oldCertID ATTRIBUTE ::=
+ { TYPE OldCertId IDENTIFIED BY id-regCtrl-oldCertID }
+
+ id-regCtrl-oldCertID OBJECT IDENTIFIER ::= { id-regCtrl 5 }
+
+ OldCertId ::= CertId
+
+ CertId ::= SEQUENCE {
+ issuer GeneralName,
+ serialNumber INTEGER }
+
+ --
+ -- 6.6. Protocol Encryption Key Control
+ --
+
+ regCtrl-protocolEncrKey ATTRIBUTE ::=
+ { TYPE ProtocolEncrKey IDENTIFIED BY id-regCtrl-protocolEncrKey }
+ id-regCtrl-protocolEncrKey OBJECT IDENTIFIER ::= { id-regCtrl 6 }
+
+ ProtocolEncrKey ::= SubjectPublicKeyInfo
+
+ --
+ -- 7. Registration Info in CRMF
+ --
+
+ id-regInfo OBJECT IDENTIFIER ::= { id-pkip 2 }
+
+ RegInfoSet ATTRIBUTE ::=
+ { regInfo-utf8Pairs | regInfo-certReq }
+
+ --
+ -- 7.1. utf8Pairs RegInfo Control
+ --
+
+ regInfo-utf8Pairs ATTRIBUTE ::=
+ { TYPE UTF8Pairs IDENTIFIED BY id-regInfo-utf8Pairs }
+
+ id-regInfo-utf8Pairs OBJECT IDENTIFIER ::= { id-regInfo 1 }
+ --with syntax
+ UTF8Pairs ::= UTF8String
+
+ --
+ -- 7.2. certReq RegInfo Control
+ --
+
+ regInfo-certReq ATTRIBUTE ::=
+ { TYPE CertReq IDENTIFIED BY id-regInfo-certReq }
+
+ id-regInfo-certReq OBJECT IDENTIFIER ::= { id-regInfo 2 }
+ --with syntax
+ CertReq ::= CertRequest
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
index 5512f1590b..5512f1590b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Protected-Part-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Protected-Part-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
index d6e88a2e47..d6e88a2e47 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/ProtocolObjectIdentifiers.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/ProtocolObjectIdentifiers.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
index 258c5f0b23..258c5f0b23 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
index c8f3a2ff33..c8f3a2ff33 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Presentation-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Presentation-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
index 365144ff35..365144ff35 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Raster-Gr-Profile-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Raster-Gr-Profile-Attributes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
index d00570b7e7..d00570b7e7 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Reliable-Transfer-APDU.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Reliable-Transfer-APDU.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
index 4a59cc403b..4a59cc403b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Abstract-Syntaxes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Abstract-Syntaxes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
index e55ea3c05e..e55ea3c05e 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Generic-ROS-PDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Generic-ROS-PDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
index 671cf0e780..671cf0e780 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects-extensions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects-extensions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
index b497e4126b..b497e4126b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Information-Objects.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Information-Objects.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
index 73b49c8d7a..73b49c8d7a 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Realizations.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Realizations.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
index e526ff4600..e526ff4600 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Remote-Operations-Useful-Definitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Remote-Operations-Useful-Definitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
new file mode 100644
index 0000000000..f74f76ff7c
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SCVP-2009.asn1
@@ -0,0 +1,608 @@
+ SCVP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-scvp-02(52) }
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ Extensions{}, EXTENSION, ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57) }
+
+ AlgorithmIdentifier{}, SIGNATURE-ALGORITHM, PUBLIC-KEY, KEY-AGREE,
+ DIGEST-ALGORITHM, KEY-DERIVATION, MAC-ALGORITHM
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ Certificate, CertificateList, CertificateSerialNumber,
+ SignatureAlgorithms, SubjectPublicKeyInfo
+ FROM PKIX1Explicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-explicit-02(51) }
+
+ GeneralNames, GeneralName, KeyUsage, KeyPurposeId
+ FROM PKIX1Implicit-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkix1-implicit-02(59) }
+
+ AttributeCertificate
+ FROM PKIXAttributeCertificate-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-attribute-cert-02(47) }
+
+ OCSPResponse
+ FROM OCSP-2009
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-ocsp-02(48) }
+
+ ContentInfo, CONTENT-TYPE
+ FROM CryptographicMessageSyntax-2009
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41) }
+
+ mda-sha1
+ FROM PKIXAlgs-2009
+ { iso(1) identified-organization(3) dod(6)
+ internet(1) security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56) } ;
+
+ ContentTypes CONTENT-TYPE ::= {ct-scvp-certValRequest |
+ ct-scvp-certValResponse | ct-scvp-valPolRequest |
+ ct-scvp-valPolResponse, ... }
+
+ id-ct OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs9(9)
+ id-smime(16) 1 }
+
+ ct-scvp-certValRequest CONTENT-TYPE ::=
+ { CVRequest IDENTIFIED BY id-ct-scvp-certValRequest }
+
+ id-ct-scvp-certValRequest OBJECT IDENTIFIER ::= { id-ct 10 }
+
+ -- SCVP Certificate Validation Request
+
+ CVRequest ::= SEQUENCE {
+ cvRequestVersion INTEGER DEFAULT 1,
+ query Query,
+ requestorRef [0] GeneralNames OPTIONAL,
+ requestNonce [1] OCTET STRING OPTIONAL,
+ requestorName [2] GeneralName OPTIONAL,
+ responderName [3] GeneralName OPTIONAL,
+ requestExtensions [4] Extensions{{RequestExtensions}}
+ OPTIONAL,
+ signatureAlg [5] AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}}
+ OPTIONAL,
+ hashAlg [6] OBJECT IDENTIFIER OPTIONAL,
+ requestorText [7] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- Set of signature algorithms is coming from RFC 5280
+ -- SignatureAlgorithms SIGNATURE-ALGORITHM ::= {...}
+
+ -- Add supported request extensions here; all new items should
+ -- be added after the extension marker
+
+ RequestExtensions EXTENSION ::= {...}
+
+ Query ::= SEQUENCE {
+ queriedCerts CertReferences,
+ checks CertChecks,
+ wantBack [1] WantBack OPTIONAL,
+ validationPolicy ValidationPolicy,
+ responseFlags ResponseFlags OPTIONAL,
+ serverContextInfo [2] OCTET STRING OPTIONAL,
+ validationTime [3] GeneralizedTime OPTIONAL,
+ intermediateCerts [4] CertBundle OPTIONAL,
+ revInfos [5] RevocationInfos OPTIONAL,
+ producedAt [6] GeneralizedTime OPTIONAL,
+ queryExtensions [7] Extensions{{QueryExtensions}} OPTIONAL
+ }
+
+ -- Add supported query extensions here; all new items should be added
+ -- after the extension marker
+
+ QueryExtensions EXTENSION ::= {...}
+
+ CertReferences ::= CHOICE {
+ pkcRefs [0] SEQUENCE SIZE (1..MAX) OF PKCReference,
+ acRefs [1] SEQUENCE SIZE (1..MAX) OF ACReference
+ }
+
+ CertReference::= CHOICE {
+ pkc PKCReference,
+ ac ACReference
+ }
+
+ PKCReference ::= CHOICE {
+ cert [0] Certificate,
+ pkcRef [1] SCVPCertID
+ }
+
+ ACReference ::= CHOICE {
+ attrCert [2] AttributeCertificate,
+ acRef [3] SCVPCertID
+ }
+
+ HashAlgorithm ::= AlgorithmIdentifier{DIGEST-ALGORITHM,
+ {mda-sha1, ...}}
+
+ SCVPCertID ::= SEQUENCE {
+ certHash OCTET STRING,
+ issuerSerial SCVPIssuerSerial,
+ hashAlgorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id }
+ }
+
+ SCVPIssuerSerial ::= SEQUENCE {
+ issuer GeneralNames,
+ serialNumber CertificateSerialNumber
+ }
+
+ ValidationPolicy ::= SEQUENCE {
+ validationPolRef ValidationPolRef,
+ validationAlg [0] ValidationAlg OPTIONAL,
+ userPolicySet [1] SEQUENCE SIZE (1..MAX) OF OBJECT
+ IDENTIFIER OPTIONAL,
+ inhibitPolicyMapping [2] BOOLEAN OPTIONAL,
+ requireExplicitPolicy [3] BOOLEAN OPTIONAL,
+ inhibitAnyPolicy [4] BOOLEAN OPTIONAL,
+ trustAnchors [5] TrustAnchors OPTIONAL,
+ keyUsages [6] SEQUENCE OF KeyUsage OPTIONAL,
+ extendedKeyUsages [7] SEQUENCE OF KeyPurposeId OPTIONAL,
+ specifiedKeyUsages [8] SEQUENCE OF KeyPurposeId OPTIONAL
+ }
+
+ CertChecks ::= SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... )
+
+ WantBack ::= SEQUENCE SIZE (1..MAX) OF
+ WANT-BACK.&id ({AllWantBacks})
+
+ POLICY ::= ATTRIBUTE
+
+ ValidationPolRefSet POLICY ::= {
+ svp-defaultValPolicy, ...
+ }
+
+ ValidationPolRef ::= SEQUENCE {
+ valPolId POLICY.&id,
+ valPolParams POLICY.&Type OPTIONAL
+ }
+
+ ValidationAlgSet POLICY ::= {
+ svp-basicValAlg, ...
+ }
+
+ ValidationAlg ::= SEQUENCE {
+ valAlgId POLICY.&id,
+ parameters POLICY.&Type OPTIONAL
+ }
+
+ NameValidationAlgSet POLICY ::= {
+ svp-nameValAlg, ...
+ }
+
+ NameValidationAlgParams ::= SEQUENCE {
+ nameCompAlgId OBJECT IDENTIFIER (NameCompAlgSet, ... ),
+ validationNames GeneralNames
+ }
+
+ TrustAnchors ::= SEQUENCE SIZE (1..MAX) OF PKCReference
+ KeyAgreePublicKey ::= SEQUENCE {
+ algorithm AlgorithmIdentifier{KEY-AGREE,
+ {SupportedKeyAgreePublicKeys}},
+ publicKey BIT STRING,
+ macAlgorithm AlgorithmIdentifier{MAC-ALGORITHM,
+ {SupportedMACAlgorithms}},
+ kDF AlgorithmIdentifier{KEY-DERIVATION,
+ {SupportedKeyDerivationFunctions}}
+ OPTIONAL
+ }
+
+ SupportedKeyAgreePublicKeys KEY-AGREE ::= {...}
+ SupportedMACAlgorithms MAC-ALGORITHM ::= {...}
+ SupportedKeyDerivationFunctions KEY-DERIVATION ::= {...}
+
+ ResponseFlags ::= SEQUENCE {
+ fullRequestInResponse [0] BOOLEAN DEFAULT FALSE,
+ responseValidationPolByRef [1] BOOLEAN DEFAULT TRUE,
+ protectResponse [2] BOOLEAN DEFAULT TRUE,
+ cachedResponse [3] BOOLEAN DEFAULT TRUE
+ }
+
+ CertBundle ::= SEQUENCE SIZE (1..MAX) OF Certificate
+
+ RevocationInfos ::= SEQUENCE SIZE (1..MAX) OF RevocationInfo
+
+ RevocationInfo ::= CHOICE {
+ crl [0] CertificateList,
+ delta-crl [1] CertificateList,
+ ocsp [2] OCSPResponse,
+ other [3] OtherRevInfo
+ }
+
+ REV-INFO ::= TYPE-IDENTIFIER
+
+ OtherRevInfo ::= SEQUENCE {
+ riType REV-INFO.&id,
+ riValue REV-INFO.&Type
+ }
+
+ -- SCVP Certificate Validation Response
+
+ ct-scvp-certValResponse CONTENT-TYPE ::=
+ { CVResponse IDENTIFIED BY id-ct-scvp-certValResponse }
+
+ id-ct-scvp-certValResponse OBJECT IDENTIFIER ::= { id-ct 11 }
+
+ CVResponse ::= SEQUENCE {
+ cvResponseVersion INTEGER,
+ serverConfigurationID INTEGER,
+ producedAt GeneralizedTime,
+ responseStatus ResponseStatus,
+ respValidationPolicy [0] RespValidationPolicy OPTIONAL,
+ requestRef [1] RequestReference OPTIONAL,
+ requestorRef [2] GeneralNames OPTIONAL,
+ requestorName [3] GeneralNames OPTIONAL,
+ replyObjects [4] ReplyObjects OPTIONAL,
+ respNonce [5] OCTET STRING OPTIONAL,
+ serverContextInfo [6] OCTET STRING OPTIONAL,
+ cvResponseExtensions [7] Extensions{{CVResponseExtensions}}
+ OPTIONAL,
+ requestorText [8] UTF8String (SIZE (1..256)) OPTIONAL
+ }
+
+ -- This document defines no extensions
+ CVResponseExtensions EXTENSION ::= {...}
+
+ ResponseStatus ::= SEQUENCE {
+ statusCode CVStatusCode DEFAULT okay,
+ errorMessage UTF8String OPTIONAL
+ }
+
+ CVStatusCode ::= ENUMERATED {
+ okay (0),
+ skipUnrecognizedItems (1),
+ tooBusy (10),
+ invalidRequest (11),
+ internalError (12),
+ badStructure (20),
+ unsupportedVersion (21),
+ abortUnrecognizedItems (22),
+ unrecognizedSigKey (23),
+ badSignatureOrMAC (24),
+ unableToDecode (25),
+ notAuthorized (26),
+ unsupportedChecks (27),
+ unsupportedWantBacks (28),
+ unsupportedSignatureOrMAC (29),
+ invalidSignatureOrMAC (30),
+ protectedResponseUnsupported (31),
+ unrecognizedResponderName (32),
+ relayingLoop (40),
+ unrecognizedValPol (50),
+ unrecognizedValAlg (51),
+ fullRequestInResponseUnsupported (52),
+ fullPolResponseUnsupported (53),
+ inhibitPolicyMappingUnsupported (54),
+ requireExplicitPolicyUnsupported (55),
+ inhibitAnyPolicyUnsupported (56),
+ validationTimeUnsupported (57),
+ unrecognizedCritQueryExt (63),
+ unrecognizedCritRequestExt (64),
+ ...
+ }
+
+ RespValidationPolicy ::= ValidationPolicy
+
+ RequestReference ::= CHOICE {
+ requestHash [0] HashValue, -- hash of CVRequest
+ fullRequest [1] CVRequest }
+
+ HashValue ::= SEQUENCE {
+ algorithm HashAlgorithm
+ DEFAULT { algorithm mda-sha1.&id },
+ value OCTET STRING }
+
+ ReplyObjects ::= SEQUENCE SIZE (1..MAX) OF CertReply
+
+ CertReply ::= SEQUENCE {
+ cert CertReference,
+ replyStatus ReplyStatus DEFAULT success,
+ replyValTime GeneralizedTime,
+ replyChecks ReplyChecks,
+ replyWantBacks ReplyWantBacks,
+ validationErrors [0] SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER ( BasicValidationErrorSet |
+ NameValidationErrorSet,
+ ... ) OPTIONAL,
+ nextUpdate [1] GeneralizedTime OPTIONAL,
+ certReplyExtensions [2] Extensions{{...}} OPTIONAL
+ }
+
+ ReplyStatus ::= ENUMERATED {
+ success (0),
+ malformedPKC (1),
+ malformedAC (2),
+ unavailableValidationTime (3),
+ referenceCertHashFail (4),
+ certPathConstructFail (5),
+ certPathNotValid (6),
+ certPathNotValidNow (7),
+ wantBackUnsatisfied (8)
+ }
+ ReplyChecks ::= SEQUENCE OF ReplyCheck
+
+ ReplyCheck ::= SEQUENCE {
+ check OBJECT IDENTIFIER (CertCheckSet | ACertCheckSet, ... ),
+ status INTEGER DEFAULT 0
+ }
+
+ ReplyWantBacks ::= SEQUENCE OF ReplyWantBack
+
+ ReplyWantBack::= SEQUENCE {
+ wb WANT-BACK.&id({AllWantBacks}),
+ value OCTET STRING
+ (CONTAINING WANT-BACK.&Type({AllWantBacks}{@wb}))
+ }
+
+ WANT-BACK ::= TYPE-IDENTIFIER
+
+ AllWantBacks WANT-BACK ::= {
+ WantBackSet | ACertWantBackSet | AnyWantBackSet, ...
+ }
+
+ CertBundles ::= SEQUENCE SIZE (1..MAX) OF CertBundle
+
+ RevInfoWantBack ::= SEQUENCE {
+ revocationInfo RevocationInfos,
+ extraCerts CertBundle OPTIONAL
+ }
+
+ SCVPResponses ::= SEQUENCE OF ContentInfo
+
+ -- SCVP Validation Policies Request
+
+ ct-scvp-valPolRequest CONTENT-TYPE ::=
+ { ValPolRequest IDENTIFIED BY id-ct-scvp-valPolRequest }
+
+ id-ct-scvp-valPolRequest OBJECT IDENTIFIER ::= { id-ct 12 }
+
+ ValPolRequest ::= SEQUENCE {
+ vpRequestVersion INTEGER DEFAULT 1,
+ requestNonce OCTET STRING
+ }
+
+ -- SCVP Validation Policies Response
+
+ ct-scvp-valPolResponse CONTENT-TYPE ::=
+ { ValPolResponse IDENTIFIED BY id-ct-scvp-valPolResponse }
+
+ id-ct-scvp-valPolResponse OBJECT IDENTIFIER ::= { id-ct 13 }
+ ValPolResponse ::= SEQUENCE {
+ vpResponseVersion INTEGER,
+ maxCVRequestVersion INTEGER,
+ maxVPRequestVersion INTEGER,
+ serverConfigurationID INTEGER,
+ thisUpdate GeneralizedTime,
+ nextUpdate GeneralizedTime OPTIONAL,
+ supportedChecks CertChecks,
+ supportedWantBacks WantBack,
+ validationPolicies SEQUENCE OF OBJECT IDENTIFIER,
+ validationAlgs SEQUENCE OF OBJECT IDENTIFIER,
+ authPolicies SEQUENCE OF AuthPolicy,
+ responseTypes ResponseTypes,
+ defaultPolicyValues RespValidationPolicy,
+ revocationInfoTypes RevocationInfoTypes,
+ signatureGeneration SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ signatureVerification SEQUENCE OF AlgorithmIdentifier
+ {SIGNATURE-ALGORITHM,
+ {SignatureAlgorithms}},
+ hashAlgorithms SEQUENCE SIZE (1..MAX) OF
+ OBJECT IDENTIFIER,
+ serverPublicKeys SEQUENCE OF KeyAgreePublicKey
+ OPTIONAL,
+ clockSkew INTEGER DEFAULT 10,
+ requestNonce OCTET STRING OPTIONAL
+ }
+
+ ResponseTypes ::= ENUMERATED {
+ cached-only (0),
+ non-cached-only (1),
+ cached-and-non-cached (2)
+ }
+
+ RevocationInfoTypes ::= BIT STRING {
+ fullCRLs (0),
+ deltaCRLs (1),
+ indirectCRLs (2),
+ oCSPResponses (3)
+ }
+
+ AuthPolicy ::= OBJECT IDENTIFIER
+
+ -- SCVP Check Identifiers
+
+ id-stc OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 17 }
+
+ CertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-pkc-path | id-stc-build-valid-pkc-path |
+ id-stc-build-status-checked-pkc-path, ... }
+
+ id-stc-build-pkc-path OBJECT IDENTIFIER ::= { id-stc 1 }
+ id-stc-build-valid-pkc-path OBJECT IDENTIFIER ::= { id-stc 2 }
+ id-stc-build-status-checked-pkc-path
+ OBJECT IDENTIFIER ::= { id-stc 3 }
+
+ ACertCheckSet OBJECT IDENTIFIER ::= {
+ id-stc-build-aa-path | id-stc-build-valid-aa-path |
+ id-stc-build-status-checked-aa-path |
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ }
+
+ id-stc-build-aa-path OBJECT IDENTIFIER ::= { id-stc 4 }
+ id-stc-build-valid-aa-path OBJECT IDENTIFIER ::= { id-stc 5 }
+ id-stc-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 6 }
+ id-stc-status-check-ac-and-build-status-checked-aa-path
+ OBJECT IDENTIFIER ::= { id-stc 7 }
+
+ -- SCVP WantBack Identifiers
+
+ id-swb OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 18 }
+
+ WantBackSet WANT-BACK ::= {
+ swb-pkc-cert | swb-pkc-best-cert-path |
+ swb-pkc-revocation-info | swb-pkc-public-key-info |
+ swb-pkc-all-cert-paths | swb-pkc-ee-revocation-info |
+ swb-pkc-CAs-revocation-info
+ }
+
+ ACertWantBackSet WANT-BACK ::= {
+ swb-ac-cert | swb-aa-cert-path |
+ swb-aa-revocation-info | swb-ac-revocation-info
+ }
+
+ AnyWantBackSet WANT-BACK ::= { swb-relayed-responses }
+
+ swb-pkc-best-cert-path WANT-BACK ::=
+ { CertBundle IDENTIFIED BY id-swb-pkc-best-cert-path }
+ id-swb-pkc-best-cert-path OBJECT IDENTIFIER ::= { id-swb 1 }
+ swb-pkc-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-revocation-info }
+ id-swb-pkc-revocation-info OBJECT IDENTIFIER ::= { id-swb 2 }
+
+ swb-pkc-public-key-info WANT-BACK ::=
+ { SubjectPublicKeyInfo IDENTIFIED BY id-swb-pkc-public-key-info }
+ id-swb-pkc-public-key-info OBJECT IDENTIFIER ::= { id-swb 4 }
+
+ swb-aa-cert-path WANT-BACK ::=
+ {CertBundle IDENTIFIED BY id-swb-aa-cert-path }
+ id-swb-aa-cert-path OBJECT IDENTIFIER ::= { id-swb 5 }
+
+ swb-aa-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-aa-revocation-info }
+ id-swb-aa-revocation-info OBJECT IDENTIFIER ::= { id-swb 6 }
+
+ swb-ac-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-ac-revocation-info }
+ id-swb-ac-revocation-info OBJECT IDENTIFIER ::= { id-swb 7 }
+
+ swb-relayed-responses WANT-BACK ::=
+ {SCVPResponses IDENTIFIED BY id-swb-relayed-responses }
+
+ id-swb-relayed-responses OBJECT IDENTIFIER ::= { id-swb 9 }
+
+ swb-pkc-all-cert-paths WANT-BACK ::=
+ {CertBundles IDENTIFIED BY id-swb-pkc-all-cert-paths }
+ id-swb-pkc-all-cert-paths OBJECT IDENTIFIER ::= { id-swb 12}
+
+ swb-pkc-ee-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-ee-revocation-info }
+ id-swb-pkc-ee-revocation-info OBJECT IDENTIFIER ::= { id-swb 13}
+
+ swb-pkc-CAs-revocation-info WANT-BACK ::=
+ { RevInfoWantBack IDENTIFIED BY id-swb-pkc-CAs-revocation-info }
+ id-swb-pkc-CAs-revocation-info OBJECT IDENTIFIER ::= { id-swb 14}
+
+ swb-pkc-cert WANT-BACK ::=
+ { Certificate IDENTIFIED BY id-swb-pkc-cert }
+ id-swb-pkc-cert OBJECT IDENTIFIER ::= { id-swb 10}
+
+ swb-ac-cert WANT-BACK ::=
+ { AttributeCertificate IDENTIFIED BY id-swb-ac-cert }
+ id-swb-ac-cert OBJECT IDENTIFIER ::= { id-swb 11}
+
+ -- SCVP Validation Policy and Algorithm Identifiers
+
+ id-svp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 19 }
+
+ svp-defaultValPolicy POLICY ::=
+ { IDENTIFIED BY id-svp-defaultValPolicy }
+
+ id-svp-defaultValPolicy OBJECT IDENTIFIER ::= { id-svp 1 }
+
+ -- SCVP Basic Validation Algorithm Identifier
+
+ svp-basicValAlg POLICY ::= {IDENTIFIED BY id-svp-basicValAlg }
+
+ id-svp-basicValAlg OBJECT IDENTIFIER ::= { id-svp 3 }
+
+ -- SCVP Basic Validation Algorithm Errors
+
+ id-bvae OBJECT IDENTIFIER ::= id-svp-basicValAlg
+
+ BasicValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-bvae-expired | id-bvae-not-yet-valid |
+ id-bvae-wrongTrustAnchor | id-bvae-noValidCertPath |
+ id-bvae-revoked | id-bvae-invalidKeyPurpose |
+ id-bvae-invalidKeyUsage | id-bvae-invalidCertPolicy
+ }
+
+ id-bvae-expired OBJECT IDENTIFIER ::= { id-bvae 1 }
+ id-bvae-not-yet-valid OBJECT IDENTIFIER ::= { id-bvae 2 }
+ id-bvae-wrongTrustAnchor OBJECT IDENTIFIER ::= { id-bvae 3 }
+ id-bvae-noValidCertPath OBJECT IDENTIFIER ::= { id-bvae 4 }
+ id-bvae-revoked OBJECT IDENTIFIER ::= { id-bvae 5 }
+ id-bvae-invalidKeyPurpose OBJECT IDENTIFIER ::= { id-bvae 9 }
+ id-bvae-invalidKeyUsage OBJECT IDENTIFIER ::= { id-bvae 10 }
+ id-bvae-invalidCertPolicy OBJECT IDENTIFIER ::= { id-bvae 11 }
+
+ -- SCVP Name Validation Algorithm Identifier
+
+ svp-nameValAlg POLICY ::=
+ {TYPE NameValidationAlgParams IDENTIFIED BY id-svp-nameValAlg }
+
+ id-svp-nameValAlg OBJECT IDENTIFIER ::= { id-svp 2 }
+
+ -- SCVP Name Validation Algorithm DN comparison algorithm
+
+ NameCompAlgSet OBJECT IDENTIFIER ::= {
+ id-nva-dnCompAlg
+ }
+
+ id-nva-dnCompAlg OBJECT IDENTIFIER ::= { id-svp 4 }
+ -- SCVP Name Validation Algorithm Errors
+
+ id-nvae OBJECT IDENTIFIER ::= id-svp-nameValAlg
+
+ NameValidationErrorSet OBJECT IDENTIFIER ::= {
+ id-nvae-name-mismatch | id-nvae-no-name | id-nvae-unknown-alg |
+ id-nvae-bad-name | id-nvae-bad-name-type | id-nvae-mixed-names
+ }
+
+ id-nvae-name-mismatch OBJECT IDENTIFIER ::= { id-nvae 1 }
+ id-nvae-no-name OBJECT IDENTIFIER ::= { id-nvae 2 }
+ id-nvae-unknown-alg OBJECT IDENTIFIER ::= { id-nvae 3 }
+ id-nvae-bad-name OBJECT IDENTIFIER ::= { id-nvae 4 }
+ id-nvae-bad-name-type OBJECT IDENTIFIER ::= { id-nvae 5 }
+ id-nvae-mixed-names OBJECT IDENTIFIER ::= { id-nvae 6 }
+
+ -- SCVP Extended Key Usage Key Purpose Identifiers
+
+ id-kp OBJECT IDENTIFIER ::=
+ { iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) 3 }
+
+ SvcpExtKeyUsageSet OBJECT IDENTIFIER ::= {
+ id-kp-scvpServer | id-kp-scvpClient
+ }
+
+ id-kp-scvpServer OBJECT IDENTIFIER ::= { id-kp 15 }
+
+ id-kp-scvpClient OBJECT IDENTIFIER ::= { id-kp 16 }
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1 b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
new file mode 100644
index 0000000000..2bd2aaa435
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SecureMimeMessageV3dot1-2009.asn1
@@ -0,0 +1,122 @@
+ SecureMimeMessageV3dot1-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-msg-v3dot1-02(39)}
+ DEFINITIONS IMPLICIT TAGS ::=
+ BEGIN
+ IMPORTS
+
+ SMIME-CAPS, SMIMECapabilities{}
+ FROM AlgorithmInformation-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-algorithmInformation-02(58)}
+
+ ATTRIBUTE
+ FROM PKIX-CommonTypes-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0) id-mod-pkixCommon-02(57)}
+
+ SubjectKeyIdentifier, IssuerAndSerialNumber, RecipientKeyIdentifier
+ FROM CryptographicMessageSyntax-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cms-2004-02(41)}
+
+ rc2-cbc, SMimeCaps
+ FROM CryptographicMessageSyntaxAlgorithms-2009
+ {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) modules(0) id-mod-cmsalg-2001-02(37)}
+
+ SMimeCaps
+ FROM PKIXAlgs-2009
+ {iso(1) identified-organization(3) dod(6) internet(1) security(5)
+ mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-algorithms2008-02(56)}
+
+ SMimeCaps
+ FROM PKIX1-PSS-OAEP-Algorithms-2009
+ {iso(1) identified-organization(3) dod(6) internet(1)
+ security(5) mechanisms(5) pkix(7) id-mod(0)
+ id-mod-pkix1-rsa-pkalgs-02(54)};
+
+ SMimeAttributeSet ATTRIBUTE ::=
+ { aa-smimeCapabilities | aa-encrypKeyPref, ... }
+
+ -- id-aa is the arc with all new authenticated and unauthenticated
+ -- attributes produced by the S/MIME Working Group
+
+ id-aa OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) usa(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ smime(16) attributes(2)}
+
+ -- The S/MIME Capabilities attribute provides a method of broadcasting
+ -- the symmetric capabilities understood. Algorithms SHOULD be ordered
+ -- by preference and grouped by type
+
+ aa-smimeCapabilities ATTRIBUTE ::=
+ { TYPE SMIMECapabilities{{SMimeCapsSet}} IDENTIFIED BY
+ smimeCapabilities }
+ smimeCapabilities OBJECT IDENTIFIER ::=
+ { iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ 15 }
+
+ SMimeCapsSet SMIME-CAPS ::=
+ { cap-preferBinaryInside | cap-RC2CBC |
+ PKIXAlgs-2009.SMimeCaps |
+ CryptographicMessageSyntaxAlgorithms-2009.SMimeCaps |
+ PKIX1-PSS-OAEP-Algorithms-2009.SMimeCaps, ... }
+
+ -- Encryption Key Preference provides a method of broadcasting the
+ -- preferred encryption certificate.
+
+ aa-encrypKeyPref ATTRIBUTE ::=
+ { TYPE SMIMEEncryptionKeyPreference
+ IDENTIFIED BY id-aa-encrypKeyPref }
+
+ id-aa-encrypKeyPref OBJECT IDENTIFIER ::= {id-aa 11}
+
+ SMIMEEncryptionKeyPreference ::= CHOICE {
+ issuerAndSerialNumber [0] IssuerAndSerialNumber,
+ receipentKeyId [1] RecipientKeyIdentifier,
+ subjectAltKeyIdentifier [2] SubjectKeyIdentifier
+ }
+
+ -- receipentKeyId is spelt incorrectly, but kept for historical
+ -- reasons.
+
+ id-smime OBJECT IDENTIFIER ::= { iso(1) member-body(2)
+ us(840) rsadsi(113549) pkcs(1) pkcs9(9) 16 }
+
+ id-cap OBJECT IDENTIFIER ::= { id-smime 11 }
+
+ -- The preferBinaryInside indicates an ability to receive messages
+ -- with binary encoding inside the CMS wrapper
+
+ cap-preferBinaryInside SMIME-CAPS ::=
+ { -- No value -- IDENTIFIED BY id-cap-preferBinaryInside }
+
+ id-cap-preferBinaryInside OBJECT IDENTIFIER ::= { id-cap 1 }
+
+ -- The following list OIDs to be used with S/MIME V3
+
+ -- Signature Algorithms Not Found in [RFC3370]
+ --
+ -- md2WithRSAEncryption OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-1(1)
+ -- 2}
+ --
+ -- Other Signed Attributes
+ --
+ -- signingTime OBJECT IDENTIFIER ::=
+ -- {iso(1) member-body(2) us(840) rsadsi(113549) pkcs(1) pkcs-9(9)
+ -- 5}
+ -- See [RFC5652] for a description of how to encode the attribute
+ -- value.
+
+ cap-RC2CBC SMIME-CAPS ::=
+ { TYPE SMIMECapabilitiesParametersForRC2CBC
+ IDENTIFIED BY rc2-cbc}
+
+ SMIMECapabilitiesParametersForRC2CBC ::= INTEGER (40 | 128, ...)
+ -- (RC2 Key Length (number of bits))
+
+ END
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
index 07bba30690..07bba30690 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SelectedAttributeTypes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SelectedAttributeTypes.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
index 2917122e94..2917122e94 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SeseAPDUs.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SeseAPDUs.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
index 02205bd64c..02205bd64c 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/SpkmGssTokens.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/SpkmGssTokens.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
index 8f033eab6f..8f033eab6f 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Style-Descriptors.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Style-Descriptors.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
index bfcd0b5dbc..bfcd0b5dbc 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Subprofiles.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Subprofiles.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
index 9633995e3b..9633995e3b 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Temporal-Relationships.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Temporal-Relationships.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
index ccc64a52f5..ccc64a52f5 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Text-Units.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Text-Units.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
index c97c83a569..c97c83a569 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UpperBounds.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UpperBounds.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
index d9601bb7d0..d9601bb7d0 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/UsefulDefinitions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/UsefulDefinitions.asn1
diff --git a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
index 18e51cbc0d..18e51cbc0d 100644
--- a/lib/asn1/test/asn1_SUITE_data/x420/Videotex-Coding-Attributes.asn
+++ b/lib/asn1/test/asn1_SUITE_data/rfcs/Videotex-Coding-Attributes.asn1
diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl
index da07cd1118..a5f46046ff 100644
--- a/lib/asn1/test/asn1_test_lib.erl
+++ b/lib/asn1/test/asn1_test_lib.erl
@@ -21,6 +21,7 @@
-export([compile/3,compile_all/3,compile_erlang/3,
hex_to_bin/1,
+ match_value/2,
parallel/0,
roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]).
@@ -106,6 +107,24 @@ compile_erlang(Mod, Config, Options) ->
hex_to_bin(S) ->
<< <<(hex2num(C)):4>> || C <- S, C =/= $\s >>.
+%% match_value(Pattern, Value) -> ok.
+%% Match Pattern against Value. If the Pattern contains in any
+%% position, the corresponding position in the Value can be
+%% anything. Generate an exception if the Pattern and Value don't
+%% match.
+
+match_value('_', _) ->
+ ok;
+match_value([H1|T1], [H2|T2]) ->
+ match_value(H1, H2),
+ match_value(T1, T2);
+match_value(T1, T2) when tuple_size(T1) =:= tuple_size(T2) ->
+ match_value_tuple(1, T1, T2);
+match_value(Same, Same) ->
+ ok;
+match_value(V1, V2) ->
+ error({nomatch,V1,V2}).
+
roundtrip(Mod, Type, Value) ->
roundtrip(Mod, Type, Value, Value).
@@ -132,6 +151,12 @@ hex2num(C) when $0 =< C, C =< $9 -> C - $0;
hex2num(C) when $A =< C, C =< $F -> C - $A + 10;
hex2num(C) when $a =< C, C =< $f -> C - $a + 10.
+match_value_tuple(I, T1, T2) when I =< tuple_size(T1) ->
+ match_value(element(I, T1), element(I, T2)),
+ match_value_tuple(I+1, T1, T2);
+match_value_tuple(_, _, _) ->
+ ok.
+
test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) ->
case Mod:encoding_rule() of
ber ->
diff --git a/lib/asn1/test/error_SUITE.erl b/lib/asn1/test/error_SUITE.erl
index 1edd60f7c8..a9893b91cc 100644
--- a/lib/asn1/test/error_SUITE.erl
+++ b/lib/asn1/test/error_SUITE.erl
@@ -19,9 +19,12 @@
-module(error_SUITE).
-export([suite/0,all/0,groups/0,
- already_defined/1,bitstrings/1,enumerated/1,
- imports/1,instance_of/1,integers/1,objects/1,
- parameterization/1,values/1]).
+ already_defined/1,bitstrings/1,
+ classes/1,constraints/1,constructed/1,enumerated/1,
+ imports_exports/1,instance_of/1,integers/1,objects/1,
+ object_field_extraction/1,oids/1,rel_oids/1,
+ object_sets/1,parameterization/1,
+ syntax/1,table_constraints/1,tags/1,values/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -34,12 +37,22 @@ groups() ->
[{p,parallel(),
[already_defined,
bitstrings,
+ classes,
+ constraints,
+ constructed,
enumerated,
- imports,
+ imports_exports,
instance_of,
integers,
objects,
+ object_field_extraction,
+ object_sets,
+ oids,
+ rel_oids,
parameterization,
+ syntax,
+ table_constraints,
+ tags,
values]}].
parallel() ->
@@ -94,6 +107,46 @@ bitstrings(Config) ->
]} = run(P, Config),
ok.
+classes(Config) ->
+ M = 'Classes',
+ P = {M,
+ <<"Classes DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " LowerCase ::= CLASS { &id INTEGER UNIQUE }\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE DEFAULT 42}\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_class_name,'LowerCase'}},
+ {structured_error,{M,3},asn1ct_check,
+ {unique_and_default,id}}
+ ]} = run(P, Config),
+ ok.
+
+constraints(Config) ->
+ M = 'Constraints',
+ P = {M,
+ <<"Constraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " II-1 ::= INTEGER (holder-1.&obj)\n"
+ " II-2 ::= INTEGER ('1234'H<..20)\n"
+ " II-3 ::= INTEGER (1..<\"abc\")\n"
+ " II-4 ::= INTEGER (10..1)\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &obj holder-2 }\n"
+ " holder-2 HOLDER ::= { }\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_value},
+ {structured_error,{M,3},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,4},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,5},asn1ct_check,reversed_range}
+ ]} = run(P, Config),
+ ok.
+
enumerated(Config) ->
M = 'Enumerated',
P = {M,
@@ -111,38 +164,77 @@ enumerated(Config) ->
" S2 ::= SEQUENCE {\n"
" e2 EnumExt DEFAULT xyz\n"
" }\n"
+
+ " BadEnum1 ::= ENUMERATED {a, b, c, b }\n"
+ " BadEnum2 ::= ENUMERATED {a(1), b(2), b(3) }\n"
+ " BadEnum3 ::= ENUMERATED {a(1), b(1) }\n"
+ " BadEnum4 ::= ENUMERATED {a, b, ..., c(0) }\n"
+ " BadEnum5 ::= ENUMERATED {a, b, ..., c(10), d(5) }\n"
"END\n">>},
{error,
[
- {structured_error,{'Enumerated',3},asn1ct_check,{undefined,d}},
- {structured_error,{'Enumerated',5},asn1ct_check,{undefined,z}},
- {structured_error,{'Enumerated',10},asn1ct_check,{undefined,aa}},
- {structured_error,{'Enumerated',13},asn1ct_check,{undefined,xyz}}
+ {structured_error,{M,3},asn1ct_check,{undefined,d}},
+ {structured_error,{M,5},asn1ct_check,{undefined,z}},
+ {structured_error,{M,6},asn1ct_check,{undefined,aa}},
+ {structured_error,{M,12},asn1ct_check,{undefined,xyz}},
+ {structured_error,{M,15},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,16},asn1ct_check,
+ {enum_illegal_redefinition,b}},
+ {structured_error,{M,17},asn1ct_check,
+ {enum_reused_value,b,1}},
+ {structured_error,{M,18},asn1ct_check,
+ {enum_reused_value,c,0}},
+ {structured_error,{M,19},asn1ct_check,
+ {enum_not_ascending,d,5,10}}
]
} = run(P, Config),
ok.
-imports(Config) ->
+imports_exports(Config) ->
Ext = 'ExternalModule',
ExtP = {Ext,
<<"ExternalModule DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "IMPORTS\n"
+ " Int, NonExistingImport FROM ImportsFrom;\n"
+
+ " Existing ::= INTEGER\n"
"END\n">>},
- ok = run(ExtP, Config),
+ {error,
+ [{structured_error,
+ {Ext,3},
+ asn1ct_check,
+ {undefined_import,'NonExistingImport',
+ 'ImportsFrom'}}]} = run(ExtP, Config),
M = 'Imports',
P = {M,
<<"Imports DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
- "IMPORTS NotDefined FROM ExternalModule\n"
- "X FROM UndefinedModule objid\n"
- "Y, Z FROM UndefinedModule2;\n"
+ "EXPORTS\n"
+ " T, UndefinedType;\n"
+
+ "IMPORTS\n"
+ " NotDefined, Existing, Int, NonExistingImport\n"
+ " FROM ExternalModule\n"
+ " X FROM UndefinedModule objid\n"
+ " Y, Z FROM UndefinedModule2;\n"
+
"objid OBJECT IDENTIFIER ::= {joint-iso-ccitt(2) remote-operations(4)\n"
" notation(0)}\n"
+ "T ::= INTEGER\n"
"END\n">>},
- {error,[{structured_error,{M,2},asn1ct_check,
- {undefined_import,'NotDefined','ExternalModule'}},
- {structured_error,{M,3},asn1ct_check,{undefined_import,'X','UndefinedModule'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Y','UndefinedModule2'}},
- {structured_error,{M,4},asn1ct_check,{undefined_import,'Z','UndefinedModule2'}}
+ {error,[{structured_error,{M,3},asn1ct_check,
+ {undefined_export, 'UndefinedType'}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NonExistingImport',Ext}},
+ {structured_error,{M,5},asn1ct_check,
+ {undefined_import,'NotDefined',Ext}},
+ {structured_error,{M,7},asn1ct_check,
+ {undefined_import,'X','UndefinedModule'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Y','UndefinedModule2'}},
+ {structured_error,{M,8},asn1ct_check,
+ {undefined_import,'Z','UndefinedModule2'}}
]} = run(P, Config),
ok.
@@ -170,11 +262,14 @@ integers(Config) ->
" Int1 ::= INTEGER {a(1), a(1)}\n"
" Int2 ::= INTEGER {a(1), b(2), a(3)}\n"
" Int3 ::= INTEGER {x(1), y(1)}\n"
+ " i0 INTEGER ::= 1\n"
+ " Int4 ::= INTEGER {x(i0), y(undef) }\n"
"END\n">>},
{error,
[{structured_error,{M,2},asn1ct_check,{namelist_redefinition,a}},
{structured_error,{M,3},asn1ct_check,{namelist_redefinition,a}},
- {structured_error,{M,4},asn1ct_check,{value_reused,1}}
+ {structured_error,{M,4},asn1ct_check,{value_reused,1}},
+ {structured_error,{M,6},asn1ct_check,{undefined,undef}}
]} = run(P, Config),
ok.
@@ -188,6 +283,11 @@ objects(Config) ->
" obj3 CL ::= { &Data OCTET STRING }\n"
" obj4 SMALL ::= { &code 42 }\n"
" InvalidSet CL ::= { obj1 }\n"
+ " obj5 CL ::= {}\n"
+ " ErrSet ::= PT{ {PT{inst}}}\n"
+ " obj6 CL ::= 7\n"
+ " obj7 CL ::= int\n"
+ " obj8 NON-CLASS ::= { &id 1 }\n"
" CL ::= CLASS {\n"
" &code INTEGER UNIQUE,\n"
@@ -203,6 +303,12 @@ objects(Config) ->
" &code INTEGER UNIQUE,\n"
" &i INTEGER\n"
" }\n"
+
+ " PT{SMALL:Small} ::= SEQUENCE { a SMALL.&code ({Small}) }\n"
+ " inst SMALL ::= {&code 42, &i 4711}\n"
+
+ " int INTEGER ::= 42\n"
+ " NON-CLASS ::= SEQUENCE { a BOOLEAN }\n"
"END\n">>},
{error,
[
@@ -216,24 +322,490 @@ objects(Config) ->
{structured_error,{M,5},asn1ct_check,
{missing_mandatory_fields,[i],obj4}},
{structured_error,{M,6},asn1ct_check,
- {invalid_fields,[wrong],'InvalidSet'}}
+ {invalid_fields,[wrong],'InvalidSet'}},
+ {structured_error,{M,7},asn1ct_check,
+ {missing_mandatory_fields,
+ ['Data','Set','VarTypeValue',code,enum,object,
+ vartypevalue],obj5}},
+ {structured_error,{M,8},asn1ct_check,invalid_objectset},
+ {structured_error,{M,9},asn1ct_check,illegal_object},
+ {structured_error,{M,10},asn1ct_check,illegal_object},
+ {structured_error,{M,11},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_field_extraction(Config) ->
+ M = 'ObjectFieldExtraction',
+ P = {M,
+ <<"ObjectFieldExtraction DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+
+ " DataObjSet DATA-CLASS ::= {\n"
+ " holder-object-1.&int,\n"
+ " ...\n"
+ " }\n"
+
+ " DataObjSetNoExt DATA-CLASS ::= {\n"
+ " holder-object-1.&int\n"
+ " }\n"
+
+ " holder-object-1 HOLDER-CLASS ::= {\n"
+ " &int 42\n"
+ " }\n"
+
+ " HOLDER-CLASS ::= CLASS {\n"
+ " &int INTEGER\n"
+ " }\n"
+
+ " DATA-CLASS ::= CLASS {\n"
+ " &id INTEGER\n"
+ " }\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,illegal_object},
+ {structured_error,{M,6},asn1ct_check,illegal_object}
+ ]
+ } = run(P, Config),
+ ok.
+
+object_sets(Config) ->
+ M = 'ObjectSets',
+ P = {M, <<"ObjectSets DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "TEST-UNIQ ::= CLASS { &id INTEGER UNIQUE, &test INTEGER }\n"
+ "UniqSet TEST-UNIQ ::= { { &id 1, &test 1 } | {&id 1, &test 2} }\n"
+
+ "DOUBLE-UNIQ ::= CLASS { &id1 INTEGER UNIQUE,"
+ " &id INTEGER UNIQUE }\n"
+ "DoubleSet DOUBLE-UNIQ ::= { {&id1 1, &id2 2} }\n"
+ "END\n">>},
+ {error,
+ [{structured_error,{M,3},asn1ct_check,{non_unique_object,1}},
+ {structured_error,{M,5},asn1ct_check,multiple_uniqs}
+ ]
+ } = run(P, Config),
+ ok.
+
+oids(Config) ->
+ M = 'OIDS',
+ P = {M,<<"OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "CONTAINER ::= CLASS { &id OBJECT IDENTIFIER UNIQUE,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+
+ "-- This is line 6.\n"
+ "object-1 CONTAINER ::= { &id {1 2 3}, &int 42 }\n"
+ "object-2 CONTAINER ::= { &id {1 999}, &int 0 }\n"
+ "object-3 CONTAINER ::= { &id {1 2}, &seq { a 42 } }\n"
+ "oid-1 OBJECT IDENTIFIER ::= object-1.&int\n"
+ "oid-2 OBJECT IDENTIFIER ::= object-2.&id\n"
+ "oid-3 OBJECT IDENTIFIER ::= object-3.&seq\n"
+ "-- This is line 13.\n"
+
+ "oid-5 OBJECT IDENTIFIER ::= { a 42, b 19 }\n"
+
+ "oid-6 OBJECT IDENTIFIER ::= int\n"
+ "int INTEGER ::= 42\n"
+
+ "oid-7 OBJECT IDENTIFIER ::= seq\n"
+ "seq SEQUENCE { x INTEGER } ::= { x 11 }\n"
+
+ "oid-8 OBJECT IDENTIFIER ::= os\n"
+ "os OCTET STRING ::= '1234'H\n"
+
+ "oid-9 OBJECT IDENTIFIER ::= { 1 os }\n"
+
+ "oid-10 OBJECT IDENTIFIER ::= { 1 invalid }\n"
+
+ "-- This is line 23.\n"
+ "oid-11 OBJECT IDENTIFIER ::= { 0 legal-oid }\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2 3}\n"
+
+ "bad-root-1 OBJECT IDENTIFIER ::= {99}\n"
+ "bad-root-2 OBJECT IDENTIFIER ::= {0 42}\n"
+
+ "oid-object-ref-1 OBJECT IDENTIFIER ::= object-1\n"
+ "oid-object-ref-2 OBJECT IDENTIFIER ::= { object-1 19 } \n"
+
+ "oid-int OBJECT IDENTIFIER ::= 42\n"
+ "oid-sequence OBJECT IDENTIFIER ::= {a 42, b 35}\n"
+
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,8},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,10},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,11},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,19},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,21},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,22},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,24},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,26},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,27},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,28},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,29},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,30},asn1ct_check,{illegal_oid,o_id}},
+ {structured_error,{M,31},asn1ct_check,{illegal_oid,o_id}}
]
} = run(P, Config),
ok.
+rel_oids(Config) ->
+ M = 'REL-OIDS',
+ P = {M,<<"REL-OIDS DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "legal-oid OBJECT IDENTIFIER ::= {1 2}\n"
+ "legal-roid RELATIVE-OID ::= {1 2}\n"
+ "CONTAINER ::= CLASS { &oid OBJECT IDENTIFIER OPTIONAL,\n"
+ " &int INTEGER OPTIONAL,\n"
+ " &seq SEQUENCE { a INTEGER } OPTIONAL\n"
+ "}\n"
+ "object-1 CONTAINER ::= { &oid {1 2 3},\n"
+ " &int 42,\n",
+ " &seq {a 42}\n"
+ " }\n"
+
+ "wrong-type-rel-oid-1 RELATIVE-OID ::= legal-oid\n"
+ "wrong-type-rel-oid-2 RELATIVE-OID ::= object-1.&oid\n"
+ "wrong-type-rel-oid-3 RELATIVE-OID ::= object-1.&int\n"
+ "wrong-type-rel-oid-4 RELATIVE-OID ::= object-1.&seq\n"
+ "wrong-type-rel-oid-5 RELATIVE-OID ::= object-1.&undef\n"
+
+ "oid-bad-first OBJECT IDENTIFIER ::= {legal-roid 3}\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,12},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,13},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,14},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,15},asn1ct_check,{illegal_oid,rel_oid}},
+ {structured_error,{M,16},asn1ct_check,{undefined_field,undef}},
+ {structured_error,{M,17},asn1ct_check,{illegal_oid,o_id}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
parameterization(Config) ->
M = 'Parameterization',
P = {M,
<<"Parameterization DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
" NotUppercase{lowercase} ::= INTEGER (lowercase)\n"
+
+ " P{T1,T2} ::= SEQUENCE { a T1, b T2 }\n"
+ " S ::= P{OCTET STRING}\n"
+
+ " Seq ::= SEQUENCE { a INTEGER }\n"
+ " Sbad ::= Seq{INTEGER}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,{M,2},asn1ct_check,
+ {illegal_typereference,lowercase}},
+ {structured_error,{M,4},asn1ct_check,
+ param_wrong_number_of_arguments},
+ {structured_error,{M,6},asn1ct_check,
+ {param_bad_type, 'Seq'}}
+ ]
+ } = run(P, Config),
+ ok.
+
+
+constructed(Config) ->
+ M = 'Const',
+ P = {M,
+ <<"Const DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq1 ::= SEQUENCE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Ch ::= CHOICE {a INTEGER, b BIT STRING, a BOOLEAN}\n"
+ " Seq2 ::= SEQUENCE {COMPONENTS OF Ch}\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE, &Type }\n"
+ " Seq3 ::= SEQUENCE { id CL.&id, d CL.&foo }\n"
+
+ " Seq4 ::= SEQUENCE { a INTEGER, z INTEGER OPTIONAL, b Set1 }\n"
+ " Set1 ::= SET { c BOOLEAN, d INTEGER }\n"
+ " s1 Seq4 ::= {a 42, b {c TRUE, zz 4711}}\n"
+ " s2 Seq4 ::= {a 42, b {c TRUE, d FALSE}}\n"
+ " s3 Seq4 ::= {a 42, b {c TRUE}}\n"
+ " s4 Seq4 ::= {a 42, b {c TRUE, d 4711}, zz 4712}\n"
+ " s5 Seq4 ::= {a 42}\n"
+ " s6 Seq4 ::= {a 42, zz 4712, b {c TRUE, d 4711}}\n"
"END\n">>},
{error,
- [{structured_error,{'Parameterization',2},asn1ct_check,
- {illegal_typereference,lowercase}}
- ]
- } = run(P, Config),
+ [{structured_error,{M,2},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,3},asn1ct_check,{duplicate_identifier,a}},
+ {structured_error,{M,4},asn1ct_check,{illegal_COMPONENTS_OF,'Ch'}},
+ {structured_error,{M,6},asn1ct_check,{illegal_object_field,foo}},
+
+ {structured_error,{M,9},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,10},asn1ct_check,illegal_integer_value},
+ {structured_error,{M,11},asn1ct_check,{missing_id,d}},
+ {structured_error,{M,12},asn1ct_check,{illegal_id,zz}},
+ {structured_error,{M,13},asn1ct_check,{missing_id,b}},
+ {structured_error,{M,14},asn1ct_check,{illegal_id,zz}}
+ ]
+ } = run(P, Config),
+ ok.
+
+syntax(Config) ->
+ M = 'Syntax',
+ P = {M,
+ <<"Syntax DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " obj1 CL ::= { WRONG }\n"
+ " obj2 CL ::= { CODE 42 AGAIN WRONG }\n"
+ " obj3 CL ::= { INTEGER }\n"
+ " obj4 CL ::= { BIT STRING }\n"
+ " obj5 CL ::= { , }\n"
+ " obj6 CL ::= { CODE , }\n"
+ " obj7 CL ::= { CODE \"abc\" }\n"
+ " obj8 CL ::= { CODE }\n"
+ " obj9 CL ::= { CODE 42 ENUM}\n"
+ " obj10 CL ::= { CODE 42 ENUM BIT STRING}\n"
+
+ " obj11 CL ::= { CODE 42 TYPE 13}\n"
+ " obj12 CL ::= { CODE 42 TYPE d}\n"
+ " obj13 CL ::= { CODE 42 TYPE bs-value}\n"
+
+ " bad-syntax-1 BAD-SYNTAX-1 ::= { BAD 42 }\n"
+
+ " obj14 CL ::= { CODE 42 OBJ-SET integer }\n"
+ " obj15 CL ::= { CODE 42 OBJ-SET { A B } }\n"
+ " obj16 CL ::= { CODE 42 OBJ-SET SEQUENCE { an INTEGER } }\n"
+
+ " obj17 CL ::= { CODE 42 OID {seqtag 42} }\n"
+ " obj18 CL ::= { CODE 42 OID {seqtag 42, seqtag-again 43} }\n"
+ " obj19 CL ::= { CODE 42 OID {one 1 two 2} }\n"
+
+ " BAD-SYNTAX-1 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-2 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " BAD &Bad\n"
+ " }\n"
+
+ " BAD-SYNTAX-3 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " [ID &code]\n"
+ " }\n"
+
+ " BAD-SYNTAX-4 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-5 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID\n"
+ " }\n"
+
+ " BAD-SYNTAX-6 ::= CLASS {\n"
+ " &code INTEGER UNIQUE\n"
+ " } WITH SYNTAX {\n"
+ " ID &code, &code\n"
+ " }\n"
+
+ " BAD-SYNTAX-7 ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &Type\n"
+ " } WITH SYNTAX {\n"
+ " ID &Type, &code, &code, &Type\n"
+ " }\n"
+
+ " CL ::= CLASS {\n"
+ " &code INTEGER UNIQUE,\n"
+ " &enum ENUMERATED { a, b, c} OPTIONAL,\n"
+ " &Type OPTIONAL,\n"
+ " &ObjSet CL OPTIONAL,\n"
+ " &oid OBJECT IDENTIFIER OPTIONAL\n"
+ " } WITH SYNTAX {\n"
+ " CODE &code [ENUM &enum] [TYPE &Type] [OBJ-SET &ObjSet]\n"
+ " [OID &oid]\n"
+ " }\n"
+
+ " bs-value BIT STRING ::= '1011'B\n"
+
+ " integer INTEGER ::= 42\n"
+ "END\n">>},
+ {error,
+ [
+ {structured_error,{M,2},asn1ct_check,
+ {syntax_nomatch,"WRONG"}},
+ {structured_error,{M,3},asn1ct_check,
+ {syntax_nomatch,"AGAIN"}},
+ {structured_error,{M,4},asn1ct_check,
+ {syntax_nomatch,"INTEGER"}},
+ {structured_error,{M,5},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,6},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,7},asn1ct_check,
+ {syntax_nomatch,"\",\""}},
+ {structured_error,{M,8},asn1ct_check,
+ {syntax_nomatch,"\"abc\""}},
+ {structured_error,{M,9},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,10},asn1ct_check,
+ syntax_nomatch},
+ {structured_error,{M,11},asn1ct_check,
+ {syntax_nomatch,"BIT STRING"}},
+ {structured_error,{M,12},asn1ct_check,
+ {syntax_nomatch,"13"}},
+ {structured_error,{M,13},asn1ct_check,
+ {syntax_nomatch,"d"}},
+ {structured_error,{M,14},asn1ct_check,
+ {syntax_nomatch,"bs-value"}},
+ {structured_error,{M,15},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,16},asn1ct_check,
+ {syntax_nomatch,"integer"}},
+ {structured_error,{M,17},asn1ct_check,
+ {syntax_nomatch,"\"A B\""}},
+ {structured_error,{M,18},asn1ct_check,
+ {syntax_nomatch,"SEQUENCE"}},
+ {structured_error,{M,19},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42\""}},
+ {structured_error,{M,20},asn1ct_check,
+ {syntax_nomatch,"\"seqtag 42 seqtag-again 43\""}},
+ {structured_error,{M,21},asn1ct_check,
+ {syntax_nomatch,"\"one 1 two 2\""}},
+ {structured_error,{M,22},asn1ct_check,
+ {syntax_undefined_field,bad}},
+ {structured_error,{M,27},asn1ct_check,
+ {syntax_undefined_field,'Bad'}},
+ {structured_error,{M,32},asn1ct_check,
+ {syntax_mandatory_in_optional_group,code}},
+ {structured_error,{M,37},asn1ct_check,
+ {syntax_missing_mandatory_fields,[code]}},
+ {structured_error,{M,42},asn1ct_check,
+ {syntax_missing_mandatory_fields,['Type',code]}},
+ {structured_error,{M,48},asn1ct_check,
+ {syntax_duplicated_fields,[code]}},
+ {structured_error,{M,53},asn1ct_check,
+ {syntax_duplicated_fields,['Type',code]}}
+ ]
+ } = run(P, Config),
+ ok.
+
+table_constraints(Config) ->
+ M = 'TableConstraints',
+ P = {M,
+ <<"TableConstraints DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ " Seq-1 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Seq-2 ::= SEQUENCE {\n"
+ " contentType INTEGER,\n"
+ " content CONTENTS.&Type({Contents}{@contentType})\n"
+ " }\n"
+
+ " Int ::= INTEGER ({1})\n"
+
+ " Seq-3 ::= SEQUENCE {\n"
+ " contentType CONTENTS.&id({1})\n"
+ " }\n"
+
+ "Contents CONTENTS ::= {\n"
+ " {OCTET STRING IDENTIFIED BY {2 1 1}}\n"
+ "}\n"
+
+ "CONTENTS ::= TYPE-IDENTIFIER\n"
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,2},asn1ct_check,
+ {missing_table_constraint,contentType}},
+ {structured_error,
+ {M,6},asn1ct_check,
+ {missing_ocft,contentType}},
+ {structured_error,
+ {M,10},asn1ct_check,
+ illegal_table_constraint},
+ {structured_error,
+ {M,11},asn1ct_check,
+ invalid_table_constraint}
+ ]} = run(P, Config),
+ ok.
+
+tags(Config) ->
+ M = 'Tags',
+ P = {M,
+ <<"Tags DEFINITIONS AUTOMATIC TAGS ::= BEGIN\n"
+ "SeqOpt1 ::= SEQUENCE\n"
+ "{\n"
+ "bool1 BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 SeqIn OPTIONAL\n"
+ "}\n"
+
+ "SeqOpt1Imp ::= SEQUENCE \n"
+ "{\n"
+ "bool1 [1] BOOLEAN OPTIONAL,\n"
+ "int1 INTEGER,\n"
+ "seq1 [2] SeqIn OPTIONAL,\n"
+ "seq2 [2] SeqIn OPTIONAL,\n"
+ "...,\n"
+ "int2 [3] SeqIn,\n"
+ "int3 [3] SeqIn\n"
+ "}\n"
+
+ "SeqIn ::= SEQUENCE \n"
+ "{\n"
+ "boolIn BOOLEAN,\n"
+ "intIn INTEGER\n"
+ "}\n"
+ "\n"
+
+
+ "Set1 ::= SET {\n"
+ " os [0] OCTET STRING,\n"
+ " bool [0] BOOLEAN\n"
+ "}\n"
+
+ "Seq1 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT Choice OPTIONAL\n"
+ "}\n"
+ "Seq2 ::= SEQUENCE {\n"
+ "a [0] IMPLICIT ANY OPTIONAL\n"
+ "}\n"
+ "Choice ::=\n"
+ "CHOICE {\n"
+ "a [0] BOOLEAN,\n"
+ "b [1] INTEGER\n"
+ "}\n"
+
+ "END\n">>},
+ {error,
+ [{structured_error,
+ {M,8},asn1ct_check,
+ {duplicate_tags,[seq1,seq2]}},
+ {structured_error,
+ {M,24},asn1ct_check,
+ {duplicate_tags,[bool,os]}},
+ {structured_error,
+ {M,28},asn1ct_check,
+ {implicit_tag_before,choice}},
+ {structured_error,
+ {M,31},asn1ct_check,
+ {implicit_tag_before,open_type}}
+ ]} = run(P, Config),
ok.
+
values(Config) ->
M = 'Values',
P = {M,
@@ -241,6 +813,53 @@ values(Config) ->
" os1 OCTET STRING ::= \"abc\"\n"
" os2 OCTET STRING ::= 42\n"
" os3 OCTET STRING ::= { 1, 3 }\n"
+ " os4 OCTET STRING ::= '1234'H\n"
+ " Seq ::= SEQUENCE {\n"
+ " an OCTET STRING\n"
+ " }\n"
+ " seq Seq ::= { an int }\n"
+ " os5 OCTET STRING ::= holder-1.&str\n"
+ " os6 OCTET STRING ::= int\n"
+
+ " int1 INTEGER ::= \"string\"\n"
+ " int2 INTEGER ::= os4\n"
+ " int3 INTEGER ::= not-defined\n"
+ " int4 INTEGER ::= holder-1.&str\n"
+ " int5 INTEGER ::= holder-2.&obj\n"
+ " int6 INTEGER ::= holder-2.&undefined-field\n"
+ " int7 INTEGER ::= holder-2.&UndefinedField.&id\n"
+
+ " bs1 BIT STRING ::= 42\n"
+ " bs2 BIT STRING ::= {a,b}\n"
+ " bs3 BIT STRING {a(0),z(25)} ::= {a,b}\n"
+ " bs4 BIT STRING {a(0),z(25)} ::= int\n"
+ " bs5 BIT STRING ::= holder-2.&str\n"
+ " bs6 BIT STRING ::= holder-2.&obj\n"
+
+ " b1 BOOLEAN ::= 42\n"
+ " b2 BOOLEAN ::= {a,b}\n"
+
+ " HOLDER ::= CLASS {\n"
+ " &str IA5String,\n"
+ " &obj HOLDER OPTIONAL\n"
+ " }\n"
+
+ " holder-1 HOLDER ::= { &str \"xyz\" }\n"
+ " holder-2 HOLDER ::= { &str \"xyz\", &obj holder-1 }\n"
+
+ " ext-1 EXTERNAL ::= {identification bad:{1 2 3}, data-value '123'H}\n"
+ " ext-2 EXTERNAL ::= {identification syntax:{1 2 3}, data '123'H}\n"
+
+ " CH ::= CHOICE { a INTEGER, b BOOLEAN }\n"
+ " ch1 CH ::= 2344\n"
+ " ch2 CH ::= zz:34\n"
+
+ " st1 an < Seq ::= 42\n"
+ " st2 zz < CH ::= 42\n"
+ " st3 a < HOLDER ::= 42\n"
+ " st4 a < INTEGER ::= 42\n"
+
+ " int INTEGER ::= 42\n"
"END\n">>},
{error,
[
@@ -249,7 +868,59 @@ values(Config) ->
{structured_error,{M,3},asn1ct_check,
illegal_octet_string_value},
{structured_error,{M,4},asn1ct_check,
- illegal_octet_string_value}
+ illegal_octet_string_value},
+ {structured_error,{M,9},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,10},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,11},asn1ct_check,
+ illegal_octet_string_value},
+ {structured_error,{M,12},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,13},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,14},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,15},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,16},asn1ct_check,
+ illegal_integer_value},
+ {structured_error,{M,17},asn1ct_check,
+ {undefined_field,'undefined-field'}},
+ {structured_error,{M,18},asn1ct_check,
+ {undefined_field,'UndefinedField'}},
+ {structured_error,{M,19},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,20},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,21},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,22},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,23},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,24},asn1ct_check,
+ {illegal_value, "BIT STRING"}},
+ {structured_error,{M,25},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,26},asn1ct_check,
+ {illegal_value, "BOOLEAN"}},
+ {structured_error,{M,33},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,34},asn1ct_check,
+ illegal_external_value},
+ {structured_error,{M,36},asn1ct_check,
+ {illegal_id, 2344}},
+ {structured_error,{M,37},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,38},asn1ct_check,
+ {illegal_choice_type, 'Seq'}},
+ {structured_error,{M,39},asn1ct_check,
+ {illegal_id, zz}},
+ {structured_error,{M,40},asn1ct_check,
+ {illegal_choice_type, 'HOLDER'}},
+ {structured_error,{M,41},asn1ct_check,
+ {illegal_choice_type, 'INTEGER'}}
]
} = run(P, Config),
ok.
@@ -258,5 +929,7 @@ values(Config) ->
run({Mod,Spec}, Config) ->
Base = atom_to_list(Mod) ++ ".asn1",
File = filename:join(?config(priv_dir, Config), Base),
+ Include0 = filename:dirname(?config(data_dir, Config)),
+ Include = filename:join(filename:dirname(Include0), "asn1_SUITE_data"),
ok = file:write_file(File, Spec),
- asn1ct:compile(File).
+ asn1ct:compile(File, [{i, Include}]).
diff --git a/lib/asn1/test/syntax_SUITE.erl b/lib/asn1/test/syntax_SUITE.erl
new file mode 100644
index 0000000000..1a2c938fe5
--- /dev/null
+++ b/lib/asn1/test/syntax_SUITE.erl
@@ -0,0 +1,340 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(syntax_SUITE).
+-export([suite/0,all/0,groups/0,
+ assignment/1,
+ class/1,
+ constraints/1,
+ exports/1,
+ header/1,
+ imports/1,
+ objects/1,
+ sequence/1,
+ syntax/1,
+ tokenizer/1,
+ types/1,
+ values/1]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+suite() -> [{ct_hooks, [ts_install_cth]}].
+
+all() ->
+ [{group,p}].
+
+groups() ->
+ [{p,parallel(),
+ [assignment,
+ class,
+ constraints,
+ exports,
+ header,
+ imports,
+ objects,
+ sequence,
+ syntax,
+ tokenizer,
+ types,
+ values]}].
+
+parallel() ->
+ case erlang:system_info(schedulers) > 1 of
+ true -> [parallel];
+ false -> []
+ end.
+
+assignment(Config) ->
+ Head = "Assignment DEFINITIONS AUTOMATIC TAGS ::=\nBEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"42",3,{syntax_error,42}},
+ {"i",4,{syntax_error,'END'}},
+ {"i ::=",3,{syntax_error,'::='}},
+ {"i type",4,{syntax_error,'END'}},
+ {"i type ::=",3,{syntax_error,'::='}},
+ {"i TYPE",4,{syntax_error,'END'}},
+ {"i TYPE ::= ",4,{syntax_error,'END'}},
+ {"i INTEGER ::= 42 garbage",4,{syntax_error,'END'}},
+ {"i{T} Type",4,{syntax_error,'END'}},
+ {"TYPE",4,{syntax_error,'END'}},
+ {"TYPE ::=",4,{syntax_error,'END'}},
+ {"TYPE{ ::=",3,{syntax_error,'::='}},
+ {"TYPE{P, ::=",3,{syntax_error,'::='}},
+ {"TYPE{P,} ::=",3,{syntax_error,'}'}},
+ {"TYPE{Gov:} ::=",3,{syntax_error,':'}},
+ {"TYPE{A} CL ",4,{syntax_error,'END'}},
+ {"ObjSet CL",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Assignment", Config),
+ ok.
+
+class(Config) ->
+ Head = "Class DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS {",
+ End = "\nEND\n",
+ L0 = [{"id",3,{syntax_error,'id'}},
+ {"&id INTEGER",4,{syntax_error,'END'}},
+ {"&id INTEGER,",4,{syntax_error,'END'}},
+ {"&id,",3,{syntax_error,','}},
+ {"&id OPTIONAL",3,{syntax_error,'OPTIONAL'}},
+ {"&id INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&var &Field",4,{syntax_error,'END'}},
+ {"&Type,",4,{syntax_error,'END'}},
+ {"&Type OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER OPTIONAL",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {",4,{syntax_error,'END'}},
+ {"&ValueSet INTEGER DEFAULT {a",4,{syntax_error,'END'}},
+ {"&Var &Field",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Class", Config),
+ ok.
+
+constraints(Config) ->
+ Head = "Constraints DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"INTEGER (",4,{syntax_error,'END'}},
+ {"INTEGER (10x",3,{syntax_error,x}},
+ {"INTEGER (10|(10y",3,{syntax_error,y}},
+ {"INTEGER (CONSTRAINED BY {}",4,{syntax_error,'END'}},
+ {"INTEGER (CONSTRAINED BY {INTEGER garbage",3,
+ {syntax_error,garbage}},
+ {"INTEGER ({ObjSet",4,{syntax_error,'END'}},
+ {"INTEGER ({ObjSet}{",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x",3,{syntax_error,'{'}},
+ {"INTEGER ({ObjSet}{@x}",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:",4,{syntax_error,'END'}},
+ {"INTEGER (10 !BOOLEAN:FALSE",4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { Type })",
+ 3,{syntax_error,'Type'}},
+ {"SEQUENCE {} (WITH COMPONENTS { x (10)",
+ 4,{syntax_error,'END'}},
+ {"SEQUENCE {} (WITH COMPONENTS { ..., x (10)",
+ 4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Constraints", Config),
+ ok.
+
+exports(Config) ->
+ Head = "Exports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " EXPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Exports", Config),
+ ok.
+
+header(Config) ->
+ L = [{"lowercase",1,{syntax_error,lowercase}},
+ {"H ",2,{syntax_error,'END-OF-FILE'}},
+ {"H-",1,{syntax_error,'-'}},
+ {"42",1,{syntax_error,42}},
+ {"H definitions",1,{syntax_error,definitions}},
+ {"H DEFINITIONS STUPID TAGS",1,{syntax_error,'STUPID'}},
+ {"H DEFINITIONS WHATEVER",1,{syntax_error,'WHATEVER'}},
+ {"H DEFINITIONS ::= BEGIN",2,{syntax_error,'END-OF-FILE'}},
+ {"BOOLEAN",1,{syntax_error,'BOOLEAN'}}
+ ],
+ run(L, "H", Config),
+ ok.
+
+imports(Config) ->
+ Head = "Imports DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " IMPORTS ",
+ End = "\nEND\n",
+ L0 = [{"Type FROM X",4,{syntax_error,'END'}},
+ {"Symbols TO Y",3,{syntax_error,'TO'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Imports", Config),
+ ok.
+
+objects(Config) ->
+ Head = "Objects DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " object CLASS-NAME ::= ",
+ End = "\nEND\n",
+ L0 = [{"{",4,{syntax_error,'END'}},
+ {"{&min 1, max 10}",3,{syntax_error,max}},
+ {"{&min 1, Max 10}",3,{syntax_error,'Max'}},
+ {"{min 1, &max 10}",3,{syntax_error,'&max'}},
+ {"{min 1, &Max 10}",3,{syntax_error,'&Max'}},
+ {"{RESERVERD WORD BIT}",3,{syntax_error,'BIT'}},
+ {"{&min 1",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Objects", Config),
+ ok.
+
+sequence(Config) ->
+ Head = "Sequence DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= SEQUENCE {",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}},
+ {" UpperCase",3,{syntax_error,'UpperCase'}},
+ {" a b",4,{syntax_error,'END'}},
+ {" i INTEGER",4,{syntax_error,'END'}},
+ {" ...",4,{syntax_error,'END'}},
+ {" ..., [[",4,{syntax_error,'END'}},
+ {" ..., [[ a INTEGER ]",3,{syntax_error,']'}},
+ {" ..., [[ a INTEGER,",3,{syntax_error,','}},
+ {" ..., [[ a INTEGER, ... ]]",3,{syntax_error,','}},
+ {" ... !42 xxx",3,{syntax_error,'xxx'}},
+ {" ... !42, a INTEGER,",3,{syntax_error,','}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Sequence", Config),
+ ok.
+
+syntax(Config) ->
+ Head = "Syntax DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " CL ::= CLASS { &id INTEGER UNIQUE } WITH SYNTAX ",
+ End = "\nEND\n",
+ L0 = [{"{}",3,{syntax_error,'}'}},
+ {"WORD",3,{syntax_error,'WORD'}},
+ {"{ Word }",3,{syntax_error,'Word'}},
+ {"{ [ Word ] }",3,{syntax_error,'Word'}},
+ {"{ [ WORD }",3,{syntax_error,'}'}},
+ {"{ WORD;",3,{syntax_error,';'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Syntax", Config),
+ ok.
+
+tokenizer(Config) ->
+ Head = "Tokenize DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n",
+ End = "\nEND\n",
+ L0 = [{"'",3,eol_in_token},
+ {"'42'B",3,{invalid_binary_number,"42"}},
+ {"'ZZZ'H",3,{invalid_hex_number,"ZZZ"}},
+ {"\"abc",3,missing_quote_at_eof},
+ {"/*",3,eof_in_comment}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Tokenizer", Config, asn1ct_tok),
+ ok.
+
+types(Config) ->
+ Head = "Types DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"BIT STRING garbage",4,{syntax_error,'END'}},
+ {"BIT STRING {",4,{syntax_error,'END'}},
+ {"BIT STRING { a(42",3,{syntax_error,42}},
+ {"BIT STRING { a(0)",4,{syntax_error,'END'}},
+ {"CHOICE {",4,{syntax_error,'END'}},
+ {"CHOICE { ..., a}",3,{syntax_error,'...'}},
+ {"CHOICE { UpperCase",3,{syntax_error,'UpperCase'}},
+ {"CHOICE { i INTEGER",4,{syntax_error,'END'}},
+ {"CHOICE { ..., i INTEGER }",3,{syntax_error,'...'}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER",
+ 4,{syntax_error,'END'}},
+ {"CHOICE { b BOOLEAN, ..., [[ e BOOLEAN, ...]]}",
+ 3,{syntax_error,','}},
+ {"CHOICE { b BOOLEAN, ..., i INTEGER, ..., x BIT STRING}",
+ 3,{syntax_error,','}},
+ {"ENUMERATED {",4,{syntax_error,'END'}},
+ {"ENUMERATED { 42 }",3,{syntax_error,42}},
+ {"ENUMERATED { a, b",4,{syntax_error,'END'}},
+ {"ENUMERATED { a, }",3,{syntax_error,','}},
+ {"ENUMERATED { a, ...,\nb, ..., c }",4,{syntax_error,','}},
+ {"INTEGER {",4,{syntax_error,'END'}},
+ {"INTEGER { a(42)",4,{syntax_error,'END'}},
+ {"SEQUENCE",3,{syntax_error,'SEQUENCE'}},
+ %% More tests for SEQUENCE in sequence/1.
+ {"SEQUENCE SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SEQUENCE (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { i INTEGER",4,{syntax_error,'END'}},
+ {"SET { ...",4,{syntax_error,'END'}},
+ {"SET SIZE (1..10)",4,{syntax_error,'END'}},
+ {"SET (SIZE (1..10))",4,{syntax_error,'END'}},
+ {"SET { ... !42 xxx",3,{syntax_error,'xxx'}},
+ {"SET { ... !42, a INTEGER,",3,{syntax_error,','}},
+ {"[",4,{syntax_error,'END'}},
+ {"[42",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Types", Config),
+ ok.
+
+values(Config) ->
+ Head = "Values DEFINITIONS AUTOMATIC TAGS ::=\n"
+ "BEGIN\n"
+ " value Type ::= ",
+ End = "\nEND\n",
+ L0 = [{"",4,{syntax_error,'END'}}
+ ],
+ L = [{Head++S++End,Line,E} || {S,Line,E} <- L0],
+ run(L, "Values", Config),
+ ok.
+
+run(List, File, Config) ->
+ run(List, File, Config, asn1ct_parser2).
+
+run(List, File0, Config, Module) ->
+ Base = File0 ++ ".asn1",
+ File = filename:join(?config(priv_dir, Config), Base),
+ case run_1(List, Base, File, Module, 0) of
+ 0 -> ok;
+ Errors -> ?t:fail(Errors)
+ end.
+
+run_1([{Source,Line,Error}=Exp|T], Base, File, Module, N) ->
+ ok = file:write_file(File, Source),
+ io:format("~s", [Source]),
+ case asn1ct:compile(File) of
+ {error,[{structured_error,{Base,L},Module,E}]} ->
+ case {L,E} of
+ {Line,Error} ->
+ run_1(T, Base, File, Module, N);
+ {Line,OtherError} ->
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [OtherError,Error]),
+ run_1(T, Base, File, Module, N+1);
+ {OtherLine,Error} ->
+ io:format("*** Wrong line: ~p, expected ~p ***\n",
+ [OtherLine,Line]),
+ run_1(T, Base, File, Module, N+1);
+ {_,_} ->
+ io:format("*** Wrong line: ~p, expected ~p ***",
+ [L,Line]),
+ io:format("*** Wrong error: ~p, expected ~p ***\n",
+ [E,Error]),
+ run_1(T, Base, File, Module, N+1)
+ end;
+ Other ->
+ io:format("~p\nGOT: ~p", [Exp,Other])
+ end;
+run_1([], _, _, _, N) ->
+ N.
diff --git a/lib/asn1/test/testChoExtension.erl b/lib/asn1/test/testChoExtension.erl
index 09e19ceebb..f36d6c1cbf 100644
--- a/lib/asn1/test/testChoExtension.erl
+++ b/lib/asn1/test/testChoExtension.erl
@@ -39,11 +39,6 @@ extension(_Rules) ->
roundtrip('ChoExt3', {int,33}),
roundtrip('ChoExt4', {str,<<"abc">>}),
- roundtrip('ChoEmptyRoot', {bool,false}),
- roundtrip('ChoEmptyRoot', {bool,true}),
- roundtrip('ChoEmptyRoot', {int,0}),
- roundtrip('ChoEmptyRoot', {int,7}),
-
ok.
diff --git a/lib/asn1/test/testConstraints.erl b/lib/asn1/test/testConstraints.erl
index 3ccf883bd6..5d65cb8d73 100644
--- a/lib/asn1/test/testConstraints.erl
+++ b/lib/asn1/test/testConstraints.erl
@@ -231,6 +231,28 @@ int_constraints(Rules) ->
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 19000),
seq_roundtrip(Rules, 'SeqOverlapping', 'SeqNonOverlapping', 26900),
+ %%==========================================================
+ %% Constraints from object fields.
+ %%==========================================================
+ range_error(Rules, 'IntObjectConstr', 1),
+ roundtrip('IntObjectConstr', 2),
+ roundtrip('IntObjectConstr', 3),
+ roundtrip('IntObjectConstr', 4),
+ range_error(Rules, 'IntObjectConstr', 5),
+
+
+ %%==========================================================
+ %% INTEGER constraints defined using named INTEGERs.
+ %%==========================================================
+ 42 = 'Constraints':'constrainedNamedInt-1'(),
+ 100 = 'Constraints':'constrainedNamedInt-2'(),
+ range_error(Rules, 'ConstrainedNamedInt', 41),
+ roundtrip('ConstrainedNamedInt', v1),
+ range_error(Rules, 'ConstrainedNamedInt', 43),
+
+ range_error(Rules, 'SeqWithNamedInt', {'SeqWithNamedInt',-100}),
+ roundtrip('SeqWithNamedInt', {'SeqWithNamedInt',v2}),
+
ok.
%% PER: Ensure that if the lower bound is Lb, Lb+16#80 is encoded
diff --git a/lib/asn1/test/testDoubleEllipses.erl b/lib/asn1/test/testDoubleEllipses.erl
index 4e8972cdfc..bd6831bf1e 100644
--- a/lib/asn1/test/testDoubleEllipses.erl
+++ b/lib/asn1/test/testDoubleEllipses.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,17 +24,20 @@
-include_lib("test_server/include/test_server.hrl").
-record('Seq',{a, c}).
+-record('SeqV1',{a, b}).
-record('SeqV2',{a, b ,c}).
-record('SeqAlt',{a,d,b,e,c,f,g}).
-record('SeqAltV2',{a,d,b,e,h,i,c,f,g}).
-record('Set',{a, c}).
+-record('SetV1',{a, b}).
-record('SetV2',{a, b ,c}).
-record('SetAlt',{a,d,b,e,c,f,g}).
-record('SetAltV2',{a,d,b,e,h,i,c,f,g}).
main(_Rules) ->
roundtrip('Seq', #'Seq'{a=10,c=true}),
+ roundtrip('SeqV1', #'SeqV1'{a=10,b=false}),
roundtrip('SeqV2', #'SeqV2'{a=10,b=false,c=true}),
roundtrip('SeqAlt',
#'SeqAlt'{a=10,d=12,b = <<2#1010:4>>,
@@ -45,6 +48,7 @@ main(_Rules) ->
e=true,h="PS",i=13,c=false,f=14,g=16}),
roundtrip('Set', #'Set'{a=10,c=true}),
+ roundtrip('SetV1', #'SetV1'{a=10,b=false}),
roundtrip('SetV2', #'SetV2'{a=10,b=false,c=true}),
roundtrip('SetAlt',
#'SetAlt'{a=10,d=12,
@@ -54,6 +58,14 @@ main(_Rules) ->
#'SetAltV2'{a=10,d=12,
b = <<2#1010:4>>,
e=true,h="PS",i=13,c=false,f=14,g=16}),
+
+ roundtrip('SeqDoubleEmpty1',
+ {'SeqDoubleEmpty1'}),
+ roundtrip('SeqDoubleEmpty2',
+ {'SeqDoubleEmpty2',true,42}),
+ roundtrip('SeqDoubleEmpty2',
+ {'SeqDoubleEmpty2',true,asn1_NOVALUE}),
+
ok.
roundtrip(T, V) ->
diff --git a/lib/asn1/test/testEnumExt.erl b/lib/asn1/test/testEnumExt.erl
index 878518be11..29995d6340 100644
--- a/lib/asn1/test/testEnumExt.erl
+++ b/lib/asn1/test/testEnumExt.erl
@@ -78,6 +78,9 @@ common(Erule) ->
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e40,9357}),
v_roundtrip(Erule, 'SeqBig', {'SeqBig',true,e80,9357}),
+
+ v_roundtrip(Erule, 'EnumSkip', d),
+
ok.
roundtrip(Type, Value) ->
@@ -85,11 +88,20 @@ roundtrip(Type, Value) ->
v_roundtrip(Erule, Type, Value) ->
Encoded = roundtrip(Type, Value),
- Encoded = asn1_test_lib:hex_to_bin(v(Erule, Value)).
-
-v(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
-v(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
-v(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
-v(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
-v(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
-v(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+ Encoded = asn1_test_lib:hex_to_bin(v(Erule, Type, Value)).
+
+v(Erule, 'SeqBig', Value) ->
+ v_seq_big(Erule, Value);
+v(Erule, 'EnumSkip', Value) ->
+ v_enum_skip(Erule, Value).
+
+v_seq_big(ber, {'SeqBig',true,e40,9357}) -> "300A8001 FF810141 8202248D";
+v_seq_big(ber, {'SeqBig',true,e80,9357}) -> "300B8001 FF810200 81820224 8D";
+v_seq_big(per, {'SeqBig',true,e40,9357}) -> "E0014002 248D";
+v_seq_big(per, {'SeqBig',true,e80,9357}) -> "E0018002 248D";
+v_seq_big(uper, {'SeqBig',true,e40,9357}) -> "E0280044 91A0";
+v_seq_big(uper, {'SeqBig',true,e80,9357}) -> "E0300044 91A0".
+
+v_enum_skip(per, d) -> "82";
+v_enum_skip(uper, d) -> "82";
+v_enum_skip(ber, d) -> "0A0103".
diff --git a/lib/asn1/test/testExtensibilityImplied.erl b/lib/asn1/test/testExtensibilityImplied.erl
new file mode 100644
index 0000000000..8049bb6e53
--- /dev/null
+++ b/lib/asn1/test/testExtensibilityImplied.erl
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testExtensibilityImplied).
+-export([main/0]).
+
+main() ->
+ M = 'ExtensibilityImplied',
+ {'Seq2',true} = M:decode('Seq2', M:encode('Seq1', {'Seq1',true,42})),
+ {'Set2',true} = M:decode('Set2', M:encode('Set1', {'Set1',true,42})),
+ {asn1_enum,_} = M:decode('Enum2', M:encode('Enum1', ext)),
+ ok.
diff --git a/lib/asn1/test/testImporting.erl b/lib/asn1/test/testImporting.erl
new file mode 100644
index 0000000000..de8beae38b
--- /dev/null
+++ b/lib/asn1/test/testImporting.erl
@@ -0,0 +1,34 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testImporting).
+-export([main/0]).
+
+main() ->
+ M = 'Importing',
+ roundtrip('Seq', {'Seq',5}),
+ roundtrip('OtherSeq', {'Seq',42,true}),
+ {'Seq',42,true} = M:seq(),
+ roundtrip('ObjSeq', {'ObjSeq',1,<<"XYZ">>}),
+ roundtrip('ObjSeq', {'ObjSeq',2,19}),
+ ok.
+
+roundtrip(Type, Value) ->
+ asn1_test_lib:roundtrip('Importing', Type, Value).
diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl
index 37c134b1b9..3044d5cd2a 100644
--- a/lib/asn1/test/testInfObj.erl
+++ b/lib/asn1/test/testInfObj.erl
@@ -74,6 +74,12 @@ main(_Erule) ->
{'ConstructedPdu',7,[]}),
roundtrip('InfObj', 'ConstructedPdu',
{'ConstructedPdu',7,[64,1,19,17,35]}),
+ {'ConstructedPdu',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedPdu',9,[{RecTag9,-15,35},{RecTag9,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedPdu',
+ {'ConstructedPdu',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',1,{'CONSTRUCTED-DEFAULT_Type',-2001,true}}),
@@ -96,6 +102,12 @@ main(_Erule) ->
{'ConstructedSet',7,[]}),
roundtrip('InfObj', 'ConstructedSet',
{'ConstructedSet',7,[64,1,19,17,35]}),
+ {'ConstructedSet',8,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',8,[{'_',-15,35},{'_',533,-70}]}),
+ {'ConstructedSet',9,[{_,-15,35},{_,533,-70}]} =
+ enc_dec('InfObj', 'ConstructedSet',
+ {'ConstructedSet',9,[{'_',-15,35},{'_',533,-70}]}),
roundtrip('InfObj', 'Seq2',
{'Seq2',42,[true,false,false,true],
@@ -126,12 +138,37 @@ main(_Erule) ->
test_objset('OstSeq45', [4,5]),
test_objset('OstSeq12345', [1,2,3,4,5]),
+ test_objset('OstSeq12Except', [1,2]),
+ test_objset('OstSeq123Except', [1,2]),
+
test_objset('ExOstSeq12', [1,2]),
test_objset('ExOstSeq123', [1,2,3]),
- %%test_objset('ExOstSeq1234', [1,2,3,4]),
+ test_objset('ExOstSeq1234', [1,2,3,4]),
test_objset('ExOstSeq45', [4,5]),
test_objset('ExOstSeq12345', [1,2,3,4,5]),
+ test_objset('ExOstSeq12Except', [1,2]),
+ test_objset('ExOstSeq123Except', [1,2]),
+
+ roundtrip('InfObj', 'ExtClassSeq', {'ExtClassSeq', 4}),
+
+ {1,2,42} = 'InfObj':'value-1'(),
+ {1,2,42,25} = 'InfObj':'value-2'(),
+ {100,101} = 'InfObj':'value-3'(),
+ {1,2,100,101} = 'InfObj':'value-4'(),
+
+ roundtrip('InfObj', 'Rdn', {'Rdn',{2,5,4,41},"abc"}),
+
+ roundtrip('InfObj', 'TiAliasSeq',
+ {'TiAliasSeq',{'TiAliasSeq_prf',{2,1,2},'NULL'}}),
+
+ roundtrip('InfObj', 'ContentInfo',
+ {'ContentInfo',{2,7,8,9},"string"}),
+ {2,7,8,9} = 'InfObj':'id-content-type'(),
+
+ <<2#1011:4>> = 'InfObj':'tricky-bit-string'(),
+ <<16#CAFE:16>> = 'InfObj':'tricky-octet-string'(),
+
ok.
test_objset(Type, Keys) ->
diff --git a/lib/asn1/test/testInfObjExtract.erl b/lib/asn1/test/testInfObjExtract.erl
new file mode 100644
index 0000000000..0ef967c1f6
--- /dev/null
+++ b/lib/asn1/test/testInfObjExtract.erl
@@ -0,0 +1,72 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testInfObjExtract).
+
+-export([main/0]).
+
+main() ->
+ roundtrip_data_object_13('DataSeq-1'),
+
+ roundtrip_data_object_1('DataSeq-2'),
+ roundtrip_data_object_1('DataSeq-3'),
+ roundtrip_data_object_1('DataSeq-4'),
+
+ roundtrip_data_object_13('DataSeq-5'),
+ roundtrip_data_object_13('DataSeq-6'),
+
+ roundtrip_data_object_1('DataSeqSingleSet-1'),
+ roundtrip_data_object_1('DataSeqSingleSet-2'),
+
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',1,true}),
+ roundtrip('ObjClassSeq-1', {'ObjClassSeq-1',2,true}),
+
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',0,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',3,true}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',4,false}),
+ roundtrip_error('ObjClassSeq-1', {'ObjClassSeq-1',5,true}),
+
+ ok.
+
+roundtrip_data_object_13(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,4,42}).
+
+roundtrip_data_object_1(SeqType) ->
+ roundtrip(SeqType, {SeqType,1,false}),
+ roundtrip(SeqType, {SeqType,1,true}),
+ roundtrip_error(SeqType, {SeqType,1,42}),
+ roundtrip_error(SeqType, {SeqType,2,<<"abc">>}),
+ roundtrip_error(SeqType, {SeqType,3,<<42:5>>}),
+ roundtrip_error(SeqType, {SeqType,999,42}).
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('InfObjExtract', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('InfObjExtract', T, V) of
+ ok ->
+ test_server:fail()
+ catch
+ _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testParamBasic.erl b/lib/asn1/test/testParamBasic.erl
index 39f7947e8d..5f6116bba4 100644
--- a/lib/asn1/test/testParamBasic.erl
+++ b/lib/asn1/test/testParamBasic.erl
@@ -46,6 +46,14 @@ main(Rules) ->
roundtrip('AnAlgorithm', {'AnAlgorithm',1,42}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,true}),
roundtrip('AnAlgorithm', {'AnAlgorithm',2,false}),
+ {'AnAlgorithm',1,42} = 'ParamBasic':'alg-seq-1'(),
+ {'AnAlgorithm',2,true} = 'ParamBasic':'alg-seq-2'(),
+
+ roundtrip('Seq', {'Seq',
+ {'Seq_c1',{2,1,1},42},
+ {'Seq_c2',{2,1,1,1},asn1_NOVALUE}}),
+
+ {_,{2,9,9,9,7},'NULL'} = 'ParamBasic':'algid-hmacWithSHA1'(),
ok.
roundtrip(Type, Value) ->
diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl
index e07379e634..d7893a2d58 100644
--- a/lib/asn1/test/testPrim.erl
+++ b/lib/asn1/test/testPrim.erl
@@ -98,6 +98,11 @@ enum(Rules) ->
ber ->
ok
end,
+
+ roundtrip('NegEnumVal', neg),
+ roundtrip('NegEnumVal', zero),
+ roundtrip('EnumVal128', val),
+
ok.
diff --git a/lib/asn1/test/testPrimStrings.erl b/lib/asn1/test/testPrimStrings.erl
index 155d6f6ff5..43835728e8 100644
--- a/lib/asn1/test/testPrimStrings.erl
+++ b/lib/asn1/test/testPrimStrings.erl
@@ -232,7 +232,8 @@ bit_string(Rules, Opts) ->
end.
random_bits(N) ->
- Seed = integer_to_list(erlang:phash2(erlang:now())),
+ Seed0 = {erlang:monotonic_time(),erlang:unique_integer()},
+ Seed = integer_to_list(erlang:phash2(Seed0)),
random_bits(<<>>, N, Seed).
random_bits(Bin, N, Seed) ->
diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl
new file mode 100644
index 0000000000..6281d09873
--- /dev/null
+++ b/lib/asn1/test/testRfcs.erl
@@ -0,0 +1,75 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testRfcs).
+
+-export([compile/3,test/0]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+compile(Config, Erules, Options0) ->
+ Options = [no_ok_wrapper|Options0],
+ DataDir = ?config(data_dir, Config),
+ Specs0 = filelib:wildcard("*.asn1", filename:join(DataDir, rfcs)),
+ Specs = [filename:join(rfcs, Spec) || Spec <- Specs0],
+ 122 = length(Specs),
+ CaseDir = ?config(case_dir, Config),
+ asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]).
+
+test() ->
+ {1,3,6,1,5,5,7,48,1,2} =
+ IdPkixOcspNonce =
+ 'OCSP-2009':'id-pkix-ocsp-nonce'(),
+ roundtrip('OCSP-2009', 'OCSPRequest',
+ {'OCSPRequest',
+ {'TBSRequest',
+ 0,
+ {rfc822Name,"name string"},
+ [{'Request',
+ {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
+ <<"POTATOHASH">>,<<"HASHBROWN">>,42},
+ [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
+ asn1_NOVALUE},
+ asn1_NOVALUE}),
+ otp_7759(),
+ ok.
+
+roundtrip(Module, Type, Value0) ->
+ Enc = Module:encode(Type, Value0),
+ Value1 = Module:decode(Type, Enc),
+ asn1_test_lib:match_value(Value0, Value1),
+ ok.
+
+otp_7759() ->
+ %% The release note for asn-1.6.6 says:
+ %% Decode of an open_type when the value was empty tagged
+ %% type encoded with indefinite length failed.
+ Mod = 'OLD-PKCS7',
+ Encoded = encoded_msg(),
+ ContentInfo = Mod:decode('ContentInfo', Encoded),
+ io:format("~p\n", [ContentInfo]),
+ {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
+ X = Mod:decode('SignedData', PKCS7_content),
+ io:format("~p\n", [X]),
+ io:nl(),
+ ok.
+
+encoded_msg() ->
+ <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/testSelectionTypes.erl b/lib/asn1/test/testSelectionTypes.erl
index 6d060321da..7d273fe656 100644
--- a/lib/asn1/test/testSelectionTypes.erl
+++ b/lib/asn1/test/testSelectionTypes.erl
@@ -23,10 +23,34 @@
-include_lib("test_server/include/test_server.hrl").
test() ->
- Val = ["PrintableString","PrintableString","PrintableString"],
["Es"] = Val2 = ['SelectionType':einsteinium()],
- roundtrip('MendeleyevTable', Val),
+ roundtrip('MendeleyevTable', ["fox","tree","cat","stone"]),
roundtrip('MendeleyevTable', Val2),
+ roundtrip('MendeleyevSet', [42,57,93,101]),
+
+ M = 'SelectionType',
+ true = M:boolv(),
+ 4 = M:intv(),
+ <<2#1001:4>> = M:bsv(),
+ <<16#3130:16>> = M:osv(),
+ 'NULL' = M:nullv(),
+ {2,1,1} = M:oiv(),
+ "ObjectDesc" = M:odv(),
+ "utf8" = M:utfv(),
+ {5,32767,256} = M:rov(),
+ "089" = M:numsv(),
+ "telet" = M:teletv(),
+ "t61" = M:t61v(),
+ "video" = M:videov(),
+ "ia5" = M:ia5v(),
+ "9805281429Z" = M:utctimev(),
+ "19980528142905.1" = M:gTime(),
+ "graphic" = M:gsv(),
+ "visible" = M:vsv(),
+ "general" = M:gStringv(),
+ "Universal" = M:univv(),
+ "bmp" = M:bmov(),
+
ok.
roundtrip(T, V) ->
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
new file mode 100644
index 0000000000..1ef61a885a
--- /dev/null
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -0,0 +1,175 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(testUniqueObjectSets).
+-export([main/3]).
+
+%% Run-time function called by the generated code.
+seq_roundtrip(I, D0) ->
+ M = 'UniqueObjectSets',
+ try
+ {ok,Enc} = M:encode('Seq', {'Seq',I,D0}),
+ {ok,{'Seq',I,D}} = M:decode('Seq', Enc),
+ D
+ catch C:E ->
+ Stk = erlang:get_stacktrace(),
+ io:format("FAILED: ~p ~p\n", [I,D0]),
+ erlang:raise(C, E, Stk)
+ end.
+
+types() ->
+ [{"CHOICE { a INTEGER, b BIT STRING }", {b,<<42:3>>}},
+ {"INTEGER",42},
+ {"SEQUENCE {a OCTET STRING}",{'_',<<"abc">>}},
+ {"SEQUENCE {b BOOLEAN, ...}",{'_',true}},
+ {"SEQUENCE {b BOOLEAN, ..., s IA5String, ..., e ENUMERATED { x, y, z}}",
+ {'_',false,"string",y}},
+ {"SET {a BIT STRING}",{'_',<<1:17>>}},
+ {"SEQUENCE OF INTEGER",[-19,0,555,777]},
+ {"SET OF BOOLEAN",[true,false,true]},
+ {"SEQUENCE OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]},
+ {"SET OF SEQUENCE {x INTEGER (0..7)}",[{'_',7},{'_',0}]}
+ ].
+
+main(CaseDir, Rule, Opts) ->
+ D0 = types(),
+ {D1,_} = lists:mapfoldl(fun({T,S}, I) ->
+ {{I,T,S},I+1}
+ end, 1, D0),
+ Types = [gen_types(I, Type) || {I,Type,_} <- D1],
+ Set = [gen_set_items(I, T) || {I,T,_} <- D1],
+ Objs = [gen_obj(I) || {I,_,_} <- D1],
+ DupObjs = [gen_dup_obj(I, T) || {I,T,_} <- D1],
+ DupObjRefs0 = [gen_dup_obj_refs(I) || {I,_,_} <- D1],
+ DupObjRefs = string:join(DupObjRefs0, " |\n"),
+ Asn1Spec = 'UniqueObjectSets',
+ A = ["UniqueObjectSets DEFINITIONS AUTOMATIC TAGS ::=\n",
+ "BEGIN\n\n",
+ "TEST-UNIQUE ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &Type OPTIONAL\n"
+ "}\n"
+ "WITH SYNTAX {IDENTIFIED BY &id [TYPE &Type]}\n",
+ $\n,
+ "DUP-CONTAINER ::= CLASS {\n"
+ " &id INTEGER UNIQUE,\n"
+ " &data TEST-UNIQUE\n"
+ "} WITH SYNTAX {\n"
+ " ID &id, &data\n"
+ "}\n",
+ $\n,
+ Types,$\n,
+ "UniqSet TEST-UNIQUE ::= {\n",
+ Set,
+ " DupSet-1 |\n",
+ " DupSet-2, ...\n",
+ "}\n\n",
+ Objs,$\n,
+ DupObjs,$\n,
+ "DupSet-1 TEST-UNIQUE ::= {\n",
+ DupObjRefs,$\n,
+ "}\n\n",
+ "DupSet-2 TEST-UNIQUE ::= {\n",
+ DupObjRefs,",...\n",
+ "}\n\n",
+ "Seq ::= SEQUENCE {\n"
+ " id TEST-UNIQUE.&id ({UniqSet}),\n"
+ " type TEST-UNIQUE.&Type ({UniqSet}{@id})\n"
+ "}\n"
+ "END\n"],
+ Asn1File = filename:join(CaseDir, atom_to_list(Asn1Spec)++".asn1"),
+ ok = file:write_file(Asn1File, A),
+
+ TestModule = 'unique_object_sets',
+ Test0 = [gen_test(I, Data) || {I,_,Data} <- D1],
+ Test = ["-module(",atom_to_list(TestModule),").\n"
+ "-export([main/1]).\n"
+ "\n"
+ "main(SeqRoundtrip) ->\n",
+ " ",atom_to_list(Rule)," = '",atom_to_list(Asn1Spec),
+ "':encoding_rule(),\n",
+ Test0,
+ " ok.\n"
+ ],
+ ErlFile = filename:join(CaseDir, atom_to_list(TestModule)++".erl"),
+ ok = file:write_file(ErlFile, Test),
+
+ io:format("~s\n~s\n", [Asn1File,ErlFile]),
+ case Rule of
+ per ->
+ io:put_chars([A,$\n,Test,$\n]);
+ _ ->
+ ok
+ end,
+
+ ok = asn1ct:compile(Asn1File, [Rule,{outdir,CaseDir}|Opts]),
+ {ok,TestModule} = c:c(ErlFile, [{outdir,CaseDir}]),
+ TestModule:main(fun seq_roundtrip/2),
+ ok.
+
+gen_types(I, Type) ->
+ io_lib:format("AType~p ::= ~s\n", [I,Type]).
+
+gen_set_items(I, T) ->
+ io_lib:format(" {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE AType~p} |\n"
+ " {IDENTIFIED BY ~p TYPE ~s} |\n"
+ " obj-~p |\n\n",
+ [I,I,I,I,I,T,I]).
+
+gen_obj(I) ->
+ io_lib:format("obj-~p TEST-UNIQUE ::= {IDENTIFIED BY ~p TYPE AType~p}\n",
+ [I,I,I]).
+
+gen_dup_obj(I, T) ->
+ io_lib:format("dup-obj-~p DUP-CONTAINER ::= "
+ "{ID ~p, {IDENTIFIED BY ~p TYPE ~s}}\n",
+ [I,I,I+1000,T]).
+
+gen_dup_obj_refs(I) ->
+ io_lib:format("dup-obj-~p.&data", [I]).
+
+gen_test(I, Data) ->
+ io_lib:format(" ~s = SeqRoundtrip(~p, ~p),\n",
+ [match_term(Data),I,Data]).
+
+match_term('_') ->
+ "_";
+match_term([H|T]=L) ->
+ case is_intlist(L) of
+ true ->
+ io_lib:format("~p", [L]);
+ false ->
+ ["[",match_term(H),"|",match_term(T),"]"]
+ end;
+match_term(Tuple) when is_tuple(Tuple) ->
+ ["{",match_term_tuple(Tuple, 1),"}"];
+match_term(Other) ->
+ io_lib:format("~p", [Other]).
+
+match_term_tuple(T, I) when I =< tuple_size(T) ->
+ [match_term(element(I, T)),
+ if I < tuple_size(T) -> ",";
+ true -> "" end|match_term_tuple(T, I+1)];
+match_term_tuple(_, _) ->
+ [].
+
+is_intlist(L) ->
+ lists:all(fun is_integer/1, L).
diff --git a/lib/asn1/test/testValueTest.erl b/lib/asn1/test/testValueTest.erl
new file mode 100644
index 0000000000..8a8e973621
--- /dev/null
+++ b/lib/asn1/test/testValueTest.erl
@@ -0,0 +1,114 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testValueTest).
+
+-export([main/0]).
+
+main() ->
+ M = 'ValueTest',
+
+ %% Basic types
+ 12 = M:'vANY'(),
+ true = M:'vBOOLEAN'(),
+ 12 = M:'vINTEGER'(),
+ 0 = M:'vINTEGERNNL'(),
+ button1 = M:'vENUMERATED'(),
+ [zero,two] = M:'vBS'(),
+ 'NULL' = M:'vNULL'(),
+ <<16#31,16#32,16#33>> = M:'vOS'(),
+
+ %% OID
+ {2,1,1} = M:'vOD'(),
+ {1,2} = M:'integer-first'(),
+ {2,4,5} = M:'rel-oid-1'(),
+ {0,2,4,5} = M:'include-roid'(),
+ {1,2,1} = M:'include-oid'(),
+ {1,2,1,2,4,5,42} = M:'include-all'(),
+
+ %% Character strings
+ "01234567" = M:'numericstring'(),
+ "PrintableString" = M:'printablestring'(),
+ "VisibleString" = M:'visiblestring'(),
+ [0,13] = M:'cr'(),
+ ["First line",[0,13],"Second line"] = M:'ia5string1'(),
+ [[5,5],[4,4],[6,6]] = M:'ia5string2'(),
+ "TeletexString" = M:'teletexstring'(),
+ "VideotexString" = M:'videotexstring'(),
+ "97100211-0500" = M:'utctime'(),
+ "19971002103130.5" = M:'generalizedtime'(),
+ "ObjectDescriptor" = M:'objectdescriptor'(),
+ "GraphicString" = M:'graphicstring'(),
+ "GeneralString" = M:'generalstring'(),
+ "BMPString" = M:'bmpstring1'(),
+ [0,0,0,65] = M:'latinCapitalLetterA'(),
+ [0,0,3,145] = M:'greekCapitalLetterSigma'(),
+ ["This is a capital A: ",
+ [0,0,0,65],
+ ", and a capital sigma: ",
+ [0,0,3,145],
+ "; try and spot the difference!"] = M:'my-universalstring'(),
+
+ %% Integers
+ 42 = M:someInteger(),
+ 42 = M:otherInteger(),
+ {'IntegerSeq',42} = M:integerSeq1(),
+
+ %% Value from object
+ 2 = M:'int-from-object-1'(),
+ 4 = M:'int-from-object-2'(),
+ roundtrip_error('II', 1),
+ roundtrip('II', 2),
+ roundtrip('II', 3),
+ roundtrip('II', 4),
+ roundtrip_error('II', 5),
+
+ %% Recursive value definitions.
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq1(),
+ <<16#40,16#41,16#42>> = M:otherOctetString(),
+ <<16#40,16#41,16#42>> = M:someOctetString(),
+ {'OctetStringSeq',<<16#40,16#41,16#42>>} = M:octetStringSeq2(),
+ {'OctetStringSeq',<<16#40,16#41,16#FF>>} = M:octetStringSeq3(),
+ <<16#40,16#41,16#FF>> = M:'os-1'(),
+ <<16#40,16#41,16#FF>> = M:'os-2'(),
+
+ %% Recursive BIT STRING definitions.
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq1(),
+ {'BsSeq',<<2#101101:6>>,[c]} = M:bsSeq2(),
+ {'BsSeq',<<2#101:3>>,[a,c]} = M:bsSeq3(),
+ <<2#101101:6>> = M:someBitString(),
+ <<2#101101:6>> = M:otherBitString(),
+ <<2#101:3>> = M:bsFromObject(),
+ <<2#101:3>> = M:bsFromObjectInd(),
+ [c] = M:someNamedBs(),
+ [c] = M:someOtherNamedBs(),
+
+ ok.
+
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('ValueTest', T, V).
+
+roundtrip_error(T, V) ->
+ try asn1_test_lib:roundtrip('ValueTest', T, V) of
+ ok ->
+ test_server:fail()
+ catch _:_ ->
+ ok
+ end.
diff --git a/lib/asn1/test/testX420.erl b/lib/asn1/test/testX420.erl
deleted file mode 100644
index 4ddc55dc16..0000000000
--- a/lib/asn1/test/testX420.erl
+++ /dev/null
@@ -1,93 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
-%%-------------------------------------------------------------------
-
--module(testX420).
-
--export([compile/3, ticket7759/2]).
-
--include_lib("test_server/include/test_server.hrl").
-
-
-compile(Erule, Options, Config) ->
- Specs0 = specs(),
- 99 = length(Specs0),
- CaseDir = ?config(case_dir, Config),
- Specs = [filename:join(x420, Spec) || Spec <- Specs0],
- asn1_test_lib:compile_all(Specs, Config, [Erule,{i,CaseDir}|Options]).
-
-specs() ->
- ["ACSE-1", "AuthenticationFramework", "BasicAccessControl",
- "CertificateExtensions", "Character-Coding-Attributes",
- "Character-Presentation-Attributes", "Character-Profile-Attributes",
- "Colour-Attributes", "DOR-definition", "DSAOperationalAttributeTypes",
- "Default-Value-Lists", "DirectoryAbstractService",
- "DirectoryAccessProtocol", "DirectoryInformationShadowProtocol",
- "DirectoryOperationalBindingManagementProtocol",
- "DirectoryOperationalBindingTypes", "DirectoryProtectionMappings",
- "DirectoryShadowAbstractService", "DirectorySystemProtocol",
- "DistributedOperations", "Document-Profile-Descriptor",
- "EnhancedSecurity", "External-References", "GULSProtectionMappings",
- "GenericProtectingTransferSyntax", "Geo-Gr-Coding-Attributes",
- "Geo-Gr-Presentation-Attributes", "Geo-Gr-Profile-Attributes",
- "GulsSecurityExchanges", "GulsSecurityTransformations",
- "HierarchicalOperationalBindings", "IPMSAbstractService",
- "IPMSAutoActionTypes", "IPMSExtendedBodyPartTypes",
- "IPMSExtendedBodyPartTypes2", "IPMSExtendedVoiceBodyPartType",
- "IPMSFileTransferBodyPartType", "IPMSForwardedContentBodyPartType",
- "IPMSForwardedReportBodyPartType", "IPMSFunctionalObjects",
- "IPMSHeadingExtensions", "IPMSInformationObjects",
- "IPMSMessageStoreAttributes", "IPMSObjectIdentifiers",
- "IPMSObjectIdentifiers2", "IPMSSecurityExtensions", "IPMSUpperBounds",
- "ISO-STANDARD-9541-FONT-ATTRIBUTE-SET", "ISO8571-FTAM", "ISO9541-SN",
- "Identifiers-and-Expressions", "InformationFramework",
- "Interchange-Data-Elements", "Layout-Descriptors", "Link-Descriptors",
- "Location-Expressions", "Logical-Descriptors", "MHSObjectIdentifiers",
- "MHSProtocolObjectIdentifiers", "MSAbstractService",
- "MSAccessProtocol", "MSGeneralAttributeTypes",
- "MSGeneralAutoActionTypes", "MSMatchingRules", "MSObjectIdentifiers",
- "MSUpperBounds", "MTAAbstractService", "MTSAbstractService",
- "MTSAbstractService88", "MTSAccessProtocol", "MTSObjectIdentifiers",
- "MTSUpperBounds", "Notation", "ObjectIdentifiers",
- "OperationalBindingManagement", "PKCS7", "PKCS7BodyPartType",
- "Protected-Part-Descriptors", "ProtocolObjectIdentifiers",
- "Raster-Gr-Coding-Attributes", "Raster-Gr-Presentation-Attributes",
- "Raster-Gr-Profile-Attributes", "Reliable-Transfer-APDU",
- "Remote-Operations-Abstract-Syntaxes",
- "Remote-Operations-Generic-ROS-PDUs",
- "Remote-Operations-Information-Objects-extensions",
- "Remote-Operations-Information-Objects",
- "Remote-Operations-Realizations",
- "Remote-Operations-Useful-Definitions", "SelectedAttributeTypes",
- "SeseAPDUs", "SpkmGssTokens", "Style-Descriptors", "Subprofiles",
- "Temporal-Relationships", "Text-Units", "UpperBounds",
- "UsefulDefinitions", "Videotex-Coding-Attributes"].
-
-ticket7759(_Erule,_Config) ->
- Encoded = encoded_msg(),
- io:format("Testing ticket7759 ...~n",[]),
- {ok, ContentInfo} = 'PKCS7':decode('ContentInfo',Encoded),
- {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
- {ok,_} = 'PKCS7':decode('SignedData',PKCS7_content),
- ok.
-
-
-encoded_msg() ->
- <<48,128,6,9,42,134,72,134,247,13,1,7,2,160,128,48,128,2,1,1,49,11,48,9,6,5,43,14,3,2,26,5,0,48,128,6,9,42,134,72,134,247,13,1,7,1,160,128,36,128,0,0,0,0,0,0, 49,130,1,192,48,130,1,188,2,1,1,48,50,48,38,49,17,48,15,6,3,85,4,3,12,8,65,100,109,105,110,67,65,49,49,17,48,15,6,3,85,4,10,12,8,69,82,73,67,83,83,79,78,2,8,15,151,245,186,21,23,240,96,48,9,6,5,43,14,3,2,26,5,0,160,129,229,48,17,6,10,96,134,72,1,134,248,69,1,9,2,49,3,19,1,51,48,17,6,10,96,134,72,1,134,248,69,1,9,3,49,3,19,1,51,48,24,6,9,42,134,72,134,247,13,1,9,3,49,11,6,9,42,134,72,134,247,13,1,7,1,48,28,6,9,42,134,72,134,247,13,1,9,5,49,15,23,13,48,56,49,50,49,48,48,57,53,52,50,51,90,48,28,6,10,96,134,72,1,134,248,69,1,9,7,49,14,19,12,49,53,50,56,49,52,50,52,48,57,53,53,48,32,6,10,96,134,72,1,134,248,69,1,9,5,49,18,4,16,165,115,177,71,78,88,239,113,78,56,98,98,18,202,217,235,48,32,6,10,96,134,72,1,134,248,69,1,9,6,49,18,4,16,227,174,230,251,43,153,252,65,11,93,231,83,34,18,55,46,48,35,6,9,42,134,72,134,247,13,1,9,4,49,22,4,20,218,57,163,238,94,107,75,13,50,85,191,239,149,96,24,144,175,216,7,9,48,13,6,9,42,134,72,134,247,13,1,1,1,5,0,4,129,128,106,233,116,125,140,51,133,173,63,41,54,138,214,211,89,215,169,125,98,77,16,222,216,240,211,79,125,111,87,186,73,63,253,204,107,102,177,63,174,197,224,212,231,172,149,246,33,68,223,67,102,93,64,152,152,5,216,102,247,134,36,197,150,236,57,77,56,138,95,71,204,31,23,149,241,213,78,172,165,249,100,187,12,45,19,57,67,120,54,63,15,239,41,217,127,61,254,60,201,104,68,3,135,214,206,93,253,255,192,94,56,107,68,210,57,61,41,249,47,156,130,244,52,12,163,216,236,69,0,0,0,0,0,0>>.
diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl
index 7f358e863c..4b6357a395 100644
--- a/lib/asn1/test/test_compile_options.erl
+++ b/lib/asn1/test/test_compile_options.erl
@@ -24,7 +24,7 @@
-export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1,
- record_name_prefix/1,verbose/1,warnings_as_errors/1]).
+ record_name_prefix/1,verbose/1]).
%% OTP-5689
wrong_path(Config) ->
@@ -132,43 +132,6 @@ verbose(Config) when is_list(Config) ->
?line [] = test_server:capture_get(),
ok.
-warnings_as_errors(Config) when is_list(Config) ->
- PrivDir = ?config(priv_dir,Config),
- Asn1File = filename:join([PrivDir,"WERROR.asn1"]),
- OutFile = filename:join([PrivDir,"WERROR.erl"]),
- Opts = [{outdir,PrivDir},noobj,verbose],
-
- %% Generate WERR.asn to emit warning
- %% Warning: Wrong format of type/value
- %% false/{'Externalvaluereference',_,'WERR',noInvokeId}
- Warn = <<"WERROR DEFINITIONS IMPLICIT TAGS ::=\n"
- "\n"
- "BEGIN\n"
- "\n"
- "InvokeId ::= CHOICE\n"
- "{\n"
- " present INTEGER,\n"
- " absent NULL\n"
- "}\n"
- "\n"
- "noInvokeId InvokeId ::= absent:NULL\n"
- "\n"
- "NoInvokeId InvokeId ::= {noInvokeId}\n"
- "\n"
- "END -- end of useful definitions.\n">>,
- ?line ok = file:write_file(Asn1File, Warn),
-
- %% Test warnings_as_errors compile
- ?line false = filelib:is_regular(OutFile),
- ?line {error, _} = asn1ct:compile(Asn1File, [warnings_as_errors|Opts]),
- ?line false = filelib:is_regular(OutFile),
-
- %% Test normal compile
- ?line ok = asn1ct:compile(Asn1File, Opts),
- ?line true = filelib:is_regular(OutFile),
- ?line ok = file:delete(OutFile),
- ok.
-
outfiles_check(OutDir) ->
outfiles_check(OutDir,outfiles1()).
diff --git a/lib/asn1/vsn.mk b/lib/asn1/vsn.mk
index daaf26a17f..d4c46863a3 100644
--- a/lib/asn1/vsn.mk
+++ b/lib/asn1/vsn.mk
@@ -1,2 +1 @@
-#next version number to use is 2.0
-ASN1_VSN = 3.0.3
+ASN1_VSN = 4.0
diff --git a/lib/common_test/doc/src/event_handler_chapter.xml b/lib/common_test/doc/src/event_handler_chapter.xml
index 45f01c12ec..f39f391818 100644
--- a/lib/common_test/doc/src/event_handler_chapter.xml
+++ b/lib/common_test/doc/src/event_handler_chapter.xml
@@ -59,6 +59,15 @@
Event handlers plugged into this manager will receive the events from
all the test nodes as well as information from the CT Master server
itself.</p>
+
+ <p>User specific event handlers may be plugged into a Common Test event
+ manager, either by telling Common Test to install them before the test
+ run (see below), or by adding the handlers dynamically during the test
+ run by means of
+ <c>gen_event:add_handler/3</c> or <c>gen_event:add_sup_handler/3</c>.
+ In the latter scenario, the reference of the Common Test event manager is
+ required. To get it, call <c>ct:get_event_mgr_ref/0</c> or (on the CT
+ Master node) <c>ct_master:get_event_mgr_ref/0</c>.</p>
</section>
<section>
<marker id="usage"></marker>
diff --git a/lib/common_test/doc/src/install_chapter.xml b/lib/common_test/doc/src/install_chapter.xml
index 7f8c606324..31125b945c 100644
--- a/lib/common_test/doc/src/install_chapter.xml
+++ b/lib/common_test/doc/src/install_chapter.xml
@@ -34,7 +34,7 @@
<title>General information</title>
<p>The two main interfaces for running tests with Common Test
- are an executable program named ct_run and an
+ are an executable program named <c>ct_run</c> and an
erlang module named <c>ct</c>. The ct_run program
is compiled for the underlying operating system (e.g. Unix/Linux
or Windows) during the build of the Erlang/OTP system, and is
@@ -43,67 +43,10 @@
The <c>ct</c> interface functions can be called from the Erlang shell,
or from any Erlang function, on any supported platform.</p>
- <p>A legacy Bourne shell script - named run_test - exists,
- which may be manually generated and installed. This script may be used
- instead of the ct_run program mentioned above, e.g. if the user
- wishes to modify or customize the Common Test start flags in a simpler
- way than making changes to the ct_run C program.</p>
-
<p>The Common Test application is installed with the Erlang/OTP
system and no additional installation step is required to start using
- Common Test by means of the ct_run executable program, and/or the interface
- functions in the <c>ct</c> module. If you wish to use the legacy Bourne
- shell script version run_test, however, this script needs to be
- generated first, according to the instructions below.</p>
-
- <note><p>Before reading on, please note that since Common Test version
- 1.5, the run_test shell script is no longer required for starting
- tests with Common Test from the OS command line. The ct_run
- program (descibed above) is the new recommended command line interface
- for Common Test. The shell script exists mainly for legacy reasons and
- may not be updated in future releases of Common Test. It may even be removed.
- </p></note>
-
- <p>Optional step to generate a shell script for starting Common Test:</p>
- <p>To generate the run_test shell script, navigate to the
- <c><![CDATA[common_test-<vsn>]]></c> directory, located among the other
- OTP applications (under the OTP lib directory). Here execute the
- <c>install.sh</c> script with argument <c>local</c>:</p>
-
- <p><c>
- $ ./install.sh local
- </c></p>
-
- <p>This generates the executable run_test script in the
- <c><![CDATA[common_test-<vsn>/priv/bin]]></c> directory. The script
- will include absolute paths to the Common Test and Test Server
- application directories, so it's possible to copy or move the script to
- a different location on the file system, if desired, without having to
- update it. It's of course possible to leave the script under the
- <c>priv/bin</c> directory and update the PATH variable accordingly (or
- create a link or alias to it).</p>
-
- <p>If you, for any reason, have copied Common Test and Test Server
- to a different location than the default OTP lib directory, you can
- generate a run_test script with a different top level directory,
- simply by specifying the directory, instead of <c>local</c>, when running
- <c>install.sh</c>. Example:</p>
-
- <p><c>
- $ install.sh /usr/local/test_tools
- </c></p>
-
- <p>Note that the <c><![CDATA[common_test-<vsn>]]></c> and
- <c><![CDATA[test_server-<vsn>]]></c> directories must be located under the
- same top directory. Note also that the install script does not copy files
- or update environment variables. It only generates the run_test
- script.</p>
-
- <p>Whenever you install a new version of Erlang/OTP, the run_test
- script needs to be regenerated, or updated manually with new directory names
- (new version numbers), for it to "see" the latest Common Test and Test Server
- versions.</p>
-
+ Common Test by means of the <c>ct_run</c> executable program, and/or
+ the interface functions in the <c>ct</c> module.</p>
</section>
</chapter>
diff --git a/lib/common_test/doc/src/notes.xml b/lib/common_test/doc/src/notes.xml
index 94738d2eff..472e3b7833 100644
--- a/lib/common_test/doc/src/notes.xml
+++ b/lib/common_test/doc/src/notes.xml
@@ -32,6 +32,226 @@
<file>notes.xml</file>
</header>
+<section><title>Common_Test 1.10.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ A fault in the Common Test logger process, that caused
+ the application to crash when running on a long name
+ node, has been corrected.</p>
+ <p>
+ Own Id: OTP-12643</p>
+ </item>
+ <item>
+ <p>
+ A 'wait_for_prompt' option in ct_telnet:expect/3 has been
+ introduced which forces the function to not return until
+ a prompt string has been received, even if other expect
+ patterns have already been found.</p>
+ <p>
+ Own Id: OTP-12688 Aux Id: seq12818 </p>
+ </item>
+ <item>
+ <p>
+ If the last expression in a test case causes a timetrap
+ timeout, the stack trace is ignored and not printed to
+ the test case log file. This happens because the
+ {Suite,TestCase,Line} info is not available in the stack
+ trace in this scenario, due to tail call elimination.
+ Common Test has been modified to handle this situation by
+ inserting a {Suite,TestCase,last_expr} tuple in the
+ correct place and printing the stack trace as expected.</p>
+ <p>
+ Own Id: OTP-12697 Aux Id: seq12848 </p>
+ </item>
+ <item>
+ <p>
+ Fixed a buffer problem in ct_netconfc which could cause
+ that some messages where buffered forever.</p>
+ <p>
+ Own Id: OTP-12698 Aux Id: seq12844 </p>
+ </item>
+ <item>
+ <p>
+ The VTS mode in Common Test has been modified to use a
+ private version of the Webtool application (ct_webtool).</p>
+ <p>
+ Own Id: OTP-12704 Aux Id: OTP-10922 </p>
+ </item>
+ <item>
+ <p>
+ Add possibility to add user capabilities in
+ <c>ct_netconfc:hello/3</c>.</p>
+ <p>
+ Own Id: OTP-12707 Aux Id: seq12846 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Common_Test 1.10</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ The tests overview file, index.html, did not always get
+ updated correctly after a new test run. This was because
+ of a bug in the Common Test log cache mechanism which has
+ now been corrected.</p>
+ <p>
+ Own Id: OTP-11400</p>
+ </item>
+ <item>
+ <p>
+ When a successful test case returns, Common Test should,
+ according to the documentation, send a tc_done event to
+ the event handlers with Result = ok in the data field.
+ However, Common Test sets Result to the return value of
+ the test case instead. Common Test has been modified now
+ to comply with the documentation.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-12279 Aux Id: seq12737, OTP-12531 </p>
+ </item>
+ <item>
+ <p>
+ A ct_telnet:expect/3 call could never be aborted before
+ an idle_timeout, even if total_timeout had been set to a
+ lower value (i.e. a shorter time). This problem has been
+ fixed.</p>
+ <p>
+ Own Id: OTP-12335</p>
+ </item>
+ <item>
+ <p>
+ The undocumented return value {skipped,Reason} from
+ config functions and test cases was handled
+ inconsistently. Test cases were e.g. reported as
+ "skipped" to CT Hook functions, but "successful" to event
+ handlers. Now, the above return value is consistently
+ handled the same way as {skip,Reason} and this has also
+ been documented.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-12359 Aux Id: seq12760 </p>
+ </item>
+ <item>
+ <p>
+ The Erlang source code to HTML generator would sometimes
+ fail because epp:parse_erl_form/1 could not find and
+ expand required macros in included header files. The
+ problem has been solved by making sure common_test always
+ passes the full include path to epp. Also, a bug that
+ could cause erl_syntax:revert/1 to fail because of a
+ badly formed syntax tree has been corrected.</p>
+ <p>
+ Own Id: OTP-12419</p>
+ </item>
+ <item>
+ <p>
+ A missing group option in the ct_run help text has been
+ added.</p>
+ <p>
+ Own Id: OTP-12433 Aux Id: seq12788 </p>
+ </item>
+ <item>
+ <p>
+ Printouts by means of ct:log/2/3 or ct:pal/2/3 from the
+ hook functions on_tc_fail/2 and on_tc_skip/2 would (quite
+ unexpectedly) end up in the "unexpected i/o" log file
+ instead of in the test case log file. This behaviour has
+ been changed so that now, all printouts (including stdio
+ printouts) from these hook functions will be routed to
+ the test case log file.</p>
+ <p>
+ Own Id: OTP-12468</p>
+ </item>
+ <item>
+ <p>
+ ct_netconfc:action/3 will now - if the return type is
+ void - accept an RPC reply on the form
+ {ok,[simple_xml()]}, and in this event return only the
+ atom ok.</p>
+ <p>
+ Own Id: OTP-12491 Aux Id: seq12797 </p>
+ </item>
+ <item>
+ <p>
+ OTP-11971 erroneously changed the handling of relative
+ paths for incl_dirs specified in the cover spec file.
+ This is now corrected so these are expected to be
+ relative to the directory where the cover spec file
+ itself is stored</p>
+ <p>
+ Own Id: OTP-12498 Aux Id: OTP-11971 </p>
+ </item>
+ <item>
+ <p>
+ Some test cases have been updated to use ct:sleep/1
+ instead of timer:sleep/1. The reason being that the sleep
+ times need to be scaled to compensate for slow execution
+ (e.g. when cover is running).</p>
+ <p>
+ Own Id: OTP-12574</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Common Test now exports a function,
+ ct:get_event_mgr_ref/0, that returns the name of the
+ Common Test event manager. This makes it possible to plug
+ in event handlers to the event manager while tests are
+ running (using the gen_event API).</p>
+ <p>
+ Own Id: OTP-12506 Aux Id: seq12802 </p>
+ </item>
+ <item>
+ <p>
+ When a test case (or configuration function) fails
+ because of an exit signal from a linked process, Common
+ Test previously passed only the reason for process
+ termination to the CT post hook functions and the event
+ handlers (in the tc_done event). This has been changed so
+ that now the tuple {'EXIT',ReasonForProcessTermination}
+ is passed instead. This makes it much easier in the CT
+ post hook functions to distinguish a failure of this sort
+ from other types of errors and from the return value of a
+ successful test case.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-12531 Aux Id: OTP-12279 </p>
+ </item>
+ <item>
+ <p>
+ A new feature has been introduced in ct_telnet:get_data/1
+ that makes it possible to automatically poll the telnet
+ connection in case an incomplete string (one that has not
+ yet been terminated by a newline) remains in the receive
+ buffer. The polling is controlled by two new telnet
+ config values, which are documented in the ct_telnet
+ reference manual. The polling mechanism is disabled by
+ default (making the get_data/1 function backwards
+ compatible).</p>
+ <p>
+ Own Id: OTP-12627</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Common_Test 1.9</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/common_test/doc/src/run_test_chapter.xml b/lib/common_test/doc/src/run_test_chapter.xml
index 864f82cb63..df60e5f7f2 100644
--- a/lib/common_test/doc/src/run_test_chapter.xml
+++ b/lib/common_test/doc/src/run_test_chapter.xml
@@ -1005,6 +1005,31 @@
for starting the tests, the relaxed scanner
mode is enabled by means of the tuple: <c>{allow_user_terms,true}</c></p>
</section>
+ <section>
+ <title>Reading test specification terms</title>
+ <p>It's possible to look up terms in the current test specification
+ (i.e. the spec that's been used to configure and run the current test).
+ The function <c>get_testspec_terms()</c> returns a list of all test spec
+ terms (both config- and test terms) and <c>get_testspec_terms(Tags)</c>
+ returns the term (or a list of terms) matching the tag (or tags) in
+ <c>Tags</c>.</p>
+ <p>For example, in the test specification:</p>
+ <pre>
+ ...
+ {label, my_server_smoke_test}.
+ {config, "../../my_server_setup.cfg"}.
+ {config, "../../my_server_interface.cfg"}.
+ ...</pre>
+ <p>And in e.g. a test suite or a CT hook function:</p>
+ <pre>
+ ...
+ [{label,[{_Node,TestType}]}, {config,CfgFiles}] =
+ ct:get_testspec_terms([label,config]),
+
+ [verify_my_server_cfg(TestType, CfgFile) || {Node,CfgFile} &lt;- CfgFiles,
+ Node == node()];
+ ...</pre>
+ </section>
</section>
<section>
diff --git a/lib/common_test/install.sh.in b/lib/common_test/install.sh.in
deleted file mode 100644
index 5108c7a259..0000000000
--- a/lib/common_test/install.sh.in
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/sh
-
-CT_ROOT=$1
-CT_VSN=@CT_VSN@
-TS_VSN=@TS_VSN@
-
-if [ -z "$CT_ROOT" ]
-then
- echo "install.sh: need CT_ROOT (absolute) directory or 'local' as argument"
- exit 1
-fi
-
-if [ $CT_ROOT = "local" ]
-then
- CT_DIR=`pwd`
- cd priv
- sed -e "s,@CTPATH@,$CT_DIR/ebin," \
- -e "s,@TSPATH@,$CT_DIR/../test_server/ebin," \
- run_test.in > bin/run_test
- chmod 775 bin/run_test
- echo "install successful, start script created in " $CT_ROOT/common_test-$CT_VSN/priv/bin
-else
-
- if [ ! -d "$CT_ROOT" ]
- then
- echo "install.sh: CT_ROOT argument must be a valid directory"
- exit 1
- fi
-
- if [ `echo $CT_ROOT | awk '{ print substr($1,1,1) }'` != "/" ]
- then
- echo "install.sh: need an absolute path to CT_ROOT"
- exit 1
- fi
-
- if [ ! -d $CT_ROOT/common_test-$CT_VSN ]
- then
- echo "install.sh: The directory $CT_ROOT/common_test-$CT_VSN does not exist"
- exit 1
- fi
-
- if [ -d $CT_ROOT/common_test-$CT_VSN/priv ]
- then
- cd $CT_ROOT/common_test-$CT_VSN/priv
- sed -e "s;@CTPATH@;$CT_ROOT/common_test-$CT_VSN/ebin;" \
- -e "s;@TSPATH@;$CT_ROOT/test_server-$TS_VSN/ebin;" \
- run_test.in > bin/run_test
- chmod 775 bin/run_test
- echo "install successful, start script created in " $CT_ROOT/common_test-$CT_VSN/priv/bin
- fi
-fi
-
-
diff --git a/lib/common_test/priv/Makefile.in b/lib/common_test/priv/Makefile.in
index 5a9fabbe45..7765b06f95 100644
--- a/lib/common_test/priv/Makefile.in
+++ b/lib/common_test/priv/Makefile.in
@@ -66,12 +66,7 @@ JS = jquery-latest.js jquery.tablesorter.min.js
# Rules
#
-include ../../test_server/vsn.mk
debug opt:
- $(V_at)sed -e 's;@CT_VSN@;$(VSN);' \
- -e 's;@TS_VSN@;$(TEST_SERVER_VSN);' \
- ../install.sh.in > install.sh
- $(V_at)chmod 775 install.sh
docs:
diff --git a/lib/common_test/priv/run_test.in b/lib/common_test/priv/run_test.in
deleted file mode 100644
index 1508751e4f..0000000000
--- a/lib/common_test/priv/run_test.in
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/sh
-
-args=""
-
-while [ $1 ]; do
- if [ $1 = "-config" ]; then
- args="$args -ct_config";
- elif [ $1 = "-decrypt_key" ]; then
- args="$args -ct_decrypt_key";
- elif [ $1 = "-decrypt_file" ]; then
- args="$args -ct_decrypt_file";
- elif [ $1 = "-vts" ]; then
- vts=1;
- args="$args $1";
- elif [ $1 = "-browser" ]; then
- browser=$2;
- args="$args $1";
- elif [ $1 = "-shell" ]; then
- shell=1;
- args="$args $1";
- elif [ $1 = "-ctname" ]; then
- ctname=$2;
- args="$args";
- elif [ $1 = "-ctmaster" ]; then
- master=1;
- args="$args";
- else
- args="$args $1"
- fi
- shift
-done
-
-if [ $vts ]; then
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s webtool script_start vts $browser \
- -s ct_run script_start \
- $args;
-elif [ $shell ]; then
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s ct_run script_start \
- $args;
-elif [ $ctname ]; then
- erl -sname $ctname \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- $args;
-elif [ $master ]; then
- erl -sname ct_master \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- $args;
-else
- erl -sname ct \
- -pa @CTPATH@ \
- -pa @TSPATH@ \
- -s ct_run script_start \
- -s erlang halt \
- $args
-fi
diff --git a/lib/common_test/src/Makefile b/lib/common_test/src/Makefile
index 2723b066f0..e3d5102db8 100644
--- a/lib/common_test/src/Makefile
+++ b/lib/common_test/src/Makefile
@@ -62,6 +62,8 @@ MODULES= \
ct_telnet_client \
ct_make \
vts \
+ ct_webtool \
+ ct_webtool_sup \
unix_telnet \
ct_config \
ct_config_plain \
diff --git a/lib/common_test/src/common_test.app.src b/lib/common_test/src/common_test.app.src
index 580d5dbd7b..0be1466fc9 100644
--- a/lib/common_test/src/common_test.app.src
+++ b/lib/common_test/src/common_test.app.src
@@ -63,9 +63,10 @@
ct_master_logs]},
{applications, [kernel,stdlib]},
{env, []},
- {runtime_dependencies,["xmerl-1.3.7","webtool-0.8.10","tools-2.6.14",
- "test_server-3.7.1","stdlib-2.0","ssh-3.0.1",
- "snmp-4.25.1","sasl-2.4","runtime_tools-1.8.14",
- "kernel-3.0","inets-5.10","erts-6.0",
- "debugger-4.0","crypto-3.3","compiler-5.0"]}]}.
+ {runtime_dependencies,["xmerl-1.3.8","tools-2.8",
+ "test_server-3.9","stdlib-2.5","ssh-4.0",
+ "snmp-5.1.2","sasl-2.4.2","runtime_tools-1.8.16",
+ "kernel-4.0","inets-6.0","erts-7.0",
+ "debugger-4.1","crypto-3.6","compiler-6.0",
+ "observer-2.1"]}]}.
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 85afdc7834..5ed1346f1e 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -52,6 +52,7 @@
-module(ct).
-include("ct.hrl").
+-include("ct_util.hrl").
%% Command line user interface for running tests
-export([install/1, run/1, run/2, run/3,
@@ -77,6 +78,8 @@
%% Other interface functions
-export([get_status/0, abort_current_testcase/1,
+ get_event_mgr_ref/0,
+ get_testspec_terms/0, get_testspec_terms/1,
encrypt_config_file/2, encrypt_config_file/3,
decrypt_config_file/2, decrypt_config_file/3]).
@@ -461,6 +464,50 @@ reload_config(Required)->
ct_config:reload_config(Required).
%%%-----------------------------------------------------------------
+%%% @spec get_testspec_terms() -> TestSpecTerms | undefined
+%%% TestSpecTerms = [{Tag,Value}]
+%%% Value = [term()]
+%%%
+%%% @doc Get a list of all test specification terms used to
+%%% configure and run this test.
+%%%
+get_testspec_terms() ->
+ case ct_util:get_testdata(testspec) of
+ undefined ->
+ undefined;
+ CurrSpecRec ->
+ ct_testspec:testspec_rec2list(CurrSpecRec)
+ end.
+
+%%%-----------------------------------------------------------------
+%%% @spec get_testspec_terms(Tags) -> TestSpecTerms | undefined
+%%% Tags = [Tag] | Tag
+%%% Tag = atom()
+%%% TestSpecTerms = [{Tag,Value}] | {Tag,Value}
+%%% Value = [{Node,term()}] | [term()]
+%%% Node = atom()
+%%%
+%%% @doc Read one or more terms from the test specification used
+%%% to configure and run this test. Tag is any valid test specification
+%%% tag, such as e.g. <c>label</c>, <c>config</c>, <c>logdir</c>.
+%%% User specific terms are also available to read if the
+%%% <c>allow_user_terms</c> option has been set. Note that all value tuples
+%%% returned, except user terms, will have the node name as first element.
+%%% Note also that in order to read test terms, use <c>Tag = tests</c>
+%%% (rather than <c>suites</c>, <c>groups</c> or <c>cases</c>). Value is
+%%% then the list of *all* tests on the form:
+%%% <c>[{Node,Dir,[{TestSpec,GroupsAndCases1},...]},...], where
+%%% GroupsAndCases = [{Group,[Case]}] | [Case]</c>.
+get_testspec_terms(Tags) ->
+ case ct_util:get_testdata(testspec) of
+ undefined ->
+ undefined;
+ CurrSpecRec ->
+ ct_testspec:testspec_rec2list(Tags, CurrSpecRec)
+ end.
+
+
+%%%-----------------------------------------------------------------
%%% @spec log(Format) -> ok
%%% @equiv log(default,50,Format,[])
log(Format) ->
@@ -1005,6 +1052,18 @@ abort_current_testcase(Reason) ->
test_server_ctrl:abort_current_testcase(Reason).
%%%-----------------------------------------------------------------
+%%% @spec get_event_mgr_ref() -> EvMgrRef
+%%% EvMgrRef = atom()
+%%%
+%%% @doc <p>Call this function in order to get a reference to the
+%%% CT event manager. The reference can be used to e.g. add
+%%% a user specific event handler while tests are running.
+%%% Example:
+%%% <c>gen_event:add_handler(ct:get_event_mgr_ref(), my_ev_h, [])</c></p>
+get_event_mgr_ref() ->
+ ?CT_EVMGR_REF.
+
+%%%-----------------------------------------------------------------
%%% @spec encrypt_config_file(SrcFileName, EncryptFileName) ->
%%% ok | {error,Reason}
%%% SrcFileName = string()
diff --git a/lib/common_test/src/ct_config.erl b/lib/common_test/src/ct_config.erl
index 5c80a299f8..4b92ca6f8f 100644
--- a/lib/common_test/src/ct_config.erl
+++ b/lib/common_test/src/ct_config.erl
@@ -693,8 +693,7 @@ make_crypto_key(String) ->
{[K1,K2,K3],IVec}.
random_bytes(N) ->
- {A,B,C} = now(),
- random:seed(A, B, C),
+ random:seed(os:timestamp()),
random_bytes_1(N, []).
random_bytes_1(0, Acc) -> Acc;
diff --git a/lib/common_test/src/ct_conn_log_h.erl b/lib/common_test/src/ct_conn_log_h.erl
index cff02a46d9..2d15035cd8 100644
--- a/lib/common_test/src/ct_conn_log_h.erl
+++ b/lib/common_test/src/ct_conn_log_h.erl
@@ -34,6 +34,8 @@
-define(WIDTH,80).
+-define(now, os:timestamp()).
+
%%%-----------------------------------------------------------------
%%% Callbacks
init({GL,ConnLogs}) ->
@@ -72,14 +74,14 @@ handle_event({_Type, GL, _Msg}, State) when node(GL) /= node() ->
handle_event({_Type,GL,{Pid,{ct_connection,Mod,Action,ConnName},Report}},
State) ->
Info = conn_info(Pid,#conn_log{name=ConnName,action=Action,module=Mod}),
- write_report(now(),Info,Report,GL,State),
+ write_report(?now,Info,Report,GL,State),
{ok, State};
handle_event({_Type,GL,{Pid,Info=#conn_log{},Report}}, State) ->
- write_report(now(),conn_info(Pid,Info),Report,GL,State),
+ write_report(?now,conn_info(Pid,Info),Report,GL,State),
{ok, State};
handle_event({error_report,GL,{Pid,_,[{ct_connection,ConnName}|R]}}, State) ->
%% Error reports from connection
- write_error(now(),conn_info(Pid,#conn_log{name=ConnName}),R,GL,State),
+ write_error(?now,conn_info(Pid,#conn_log{name=ConnName}),R,GL,State),
{ok, State};
handle_event(_What, State) ->
{ok, State}.
diff --git a/lib/common_test/src/ct_cover.erl b/lib/common_test/src/ct_cover.erl
index c7f446dee9..b630a51835 100644
--- a/lib/common_test/src/ct_cover.erl
+++ b/lib/common_test/src/ct_cover.erl
@@ -174,7 +174,7 @@ get_spec_test(File) ->
[] -> [#cover{app=none, level=details}];
_ -> Res
end,
- case get_cover_opts(Apps, Terms, []) of
+ case get_cover_opts(Apps, Terms, Dir, []) of
E = {error,_} ->
E;
[CoverSpec] ->
@@ -205,124 +205,125 @@ collect_apps([], Apps) ->
%% get_cover_opts(Terms) -> AppCoverInfo
%% AppCoverInfo: [#cover{app=App,...}]
-get_cover_opts([App | Apps], Terms, CoverInfo) ->
- case get_app_info(App, Terms) of
+get_cover_opts([App | Apps], Terms, Dir, CoverInfo) ->
+ case get_app_info(App, Terms, Dir) of
E = {error,_} -> E;
AppInfo ->
AppInfo1 = files2mods(AppInfo),
- get_cover_opts(Apps, Terms, [AppInfo1|CoverInfo])
+ get_cover_opts(Apps, Terms, Dir, [AppInfo1|CoverInfo])
end;
-get_cover_opts([], _, CoverInfo) ->
+get_cover_opts([], _, _, CoverInfo) ->
lists:reverse(CoverInfo).
-%% get_app_info(App, Terms) -> App1
+%% get_app_info(App, Terms, Dir) -> App1
-get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{incl_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{incl_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{incl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{incl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{incl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{incl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.incl_mods,
- get_app_info(App#cover{incl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{incl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", false, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{excl_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".beam", false, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".beam", true, []) of
+get_app_info(App=#cover{app=none}, [{excl_dirs_r,Dirs}|Terms],Dir) ->
+ get_app_info(App, [{excl_dirs_r,none,Dirs}|Terms],Dir);
+get_app_info(App=#cover{app=Name}, [{excl_dirs_r,Name,Dirs}|Terms],Dir) ->
+ case get_files(Dirs, Dir, ".beam", true, []) of
E = {error,_} -> E;
Mods1 ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms)
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms]) ->
- get_app_info(App, [{excl_mods,none,Mods1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{excl_mods,Mods1}|Terms], Dir) ->
+ get_app_info(App, [{excl_mods,none,Mods1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{excl_mods,Name,Mods1}|Terms], Dir) ->
Mods = App#cover.excl_mods,
- get_app_info(App#cover{excl_mods=Mods++Mods1},Terms);
+ get_app_info(App#cover{excl_mods=Mods++Mods1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms]) ->
- get_app_info(App, [{cross,none,Cross}|Terms]);
-get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{cross,Cross}|Terms], Dir) ->
+ get_app_info(App, [{cross,none,Cross}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{cross,Name,Cross1}|Terms], Dir) ->
Cross = App#cover.cross,
- get_app_info(App#cover{cross=Cross++Cross1},Terms);
+ get_app_info(App#cover{cross=Cross++Cross1},Terms,Dir);
-get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", false, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", false, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms]) ->
- get_app_info(App, [{src_dirs_r,none,Dirs}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms]) ->
- case get_files(Dirs, ".erl", true, []) of
+get_app_info(App=#cover{app=none}, [{src_dirs_r,Dirs}|Terms], Dir) ->
+ get_app_info(App, [{src_dirs_r,none,Dirs}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_dirs_r,Name,Dirs}|Terms], Dir) ->
+ case get_files(Dirs, Dir, ".erl", true, []) of
E = {error,_} -> E;
Src1 ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms)
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir)
end;
-get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms]) ->
- get_app_info(App, [{src_files,none,Src1}|Terms]);
-get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms]) ->
+get_app_info(App=#cover{app=none}, [{src_files,Src1}|Terms], Dir) ->
+ get_app_info(App, [{src_files,none,Src1}|Terms], Dir);
+get_app_info(App=#cover{app=Name}, [{src_files,Name,Src1}|Terms], Dir) ->
Src = App#cover.src,
- get_app_info(App#cover{src=Src++Src1},Terms);
+ get_app_info(App#cover{src=Src++Src1},Terms,Dir);
-get_app_info(App, [_|Terms]) ->
- get_app_info(App, Terms);
+get_app_info(App, [_|Terms], Dir) ->
+ get_app_info(App, Terms, Dir);
-get_app_info(App, []) ->
+get_app_info(App, [], _) ->
App.
%% get_files(...)
-get_files([Dir|Dirs], Ext, Recurse, Files) ->
- case file:list_dir(Dir) of
+get_files([Dir|Dirs], RootDir, Ext, Recurse, Files) ->
+ DirAbs = filename:absname(Dir, RootDir),
+ case file:list_dir(DirAbs) of
{ok,Entries} ->
- {SubDirs,Matches} = analyse_files(Entries, Dir, Ext, [], []),
+ {SubDirs,Matches} = analyse_files(Entries, DirAbs, Ext, [], []),
if Recurse == false ->
- get_files(Dirs, Ext, Recurse, Files++Matches);
+ get_files(Dirs, RootDir, Ext, Recurse, Files++Matches);
true ->
- Files1 = get_files(SubDirs, Ext, Recurse, Files++Matches),
- get_files(Dirs, Ext, Recurse, Files1)
+ Files1 = get_files(SubDirs, RootDir, Ext, Recurse, Files++Matches),
+ get_files(Dirs, RootDir, Ext, Recurse, Files1)
end;
{error,Reason} ->
- {error,{Reason,Dir}}
+ {error,{Reason,DirAbs}}
end;
-get_files([], _Ext, _R, Files) ->
+get_files([], _RootDir, _Ext, _R, Files) ->
Files.
%% analyse_files(...)
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index e8ea7992b4..91368d3137 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -113,6 +113,7 @@ init_tc1(?MODULE,_,error_in_suite,[Config0]) when is_list(Config0) ->
ct_event:notify(#event{name=tc_start,
node=node(),
data={?MODULE,error_in_suite}}),
+ ct_suite_init(?MODULE, error_in_suite, [], Config0),
case ?val(error, Config0) of
undefined ->
{fail,"unknown_error_in_suite"};
@@ -635,7 +636,20 @@ try_set_default(Name,Key,Info,Where) ->
end_tc(Mod, Fun, Args) ->
%% Have to keep end_tc/3 for backwards compatibility issues
end_tc(Mod, Fun, Args, '$end_tc_dummy').
-end_tc(?MODULE,error_in_suite,_, _) -> % bad start!
+end_tc(?MODULE,error_in_suite,{Result,[Args]},Return) ->
+ %% this clause gets called if CT has encountered a suite that
+ %% can't be executed
+ FinalNotify =
+ case ct_hooks:end_tc(?MODULE, error_in_suite, Args, Result, Return) of
+ '$ct_no_change' ->
+ Result;
+ HookResult ->
+ HookResult
+ end,
+ Event = #event{name=tc_done,
+ node=node(),
+ data={?MODULE,error_in_suite,tag(FinalNotify)}},
+ ct_event:sync_notify(Event),
ok;
end_tc(Mod,Func,{TCPid,Result,[Args]}, Return) when is_pid(TCPid) ->
end_tc(Mod,Func,TCPid,Result,Args,Return);
@@ -686,18 +700,21 @@ end_tc(Mod,Func,TCPid,Result,Args,Return) ->
undefined ->
%% send sync notification so that event handlers may print
%% in the log file before it gets closed
- ct_event:sync_notify(#event{name=tc_done,
- node=node(),
- data={Mod,FuncSpec,
- tag_cth(FinalNotify)}}),
+ Event = #event{name=tc_done,
+ node=node(),
+ data={Mod,FuncSpec,
+ tag(FinalNotify)}},
+ ct_event:sync_notify(Event),
Result1;
Fun ->
%% send sync notification so that event handlers may print
%% in the log file before it gets closed
- ct_event:sync_notify(#event{name=tc_done,
- node=node(),
- data={Mod,FuncSpec,
- tag(FinalNotify)}}),
+ Event = #event{name=tc_done,
+ node=node(),
+ data={Mod,FuncSpec,
+ tag({'$test_server_framework_test',
+ FinalNotify})}},
+ ct_event:sync_notify(Event),
Fun(end_tc, Return)
end,
@@ -770,44 +787,37 @@ end_tc(Mod,Func,TCPid,Result,Args,Return) ->
%% {error,Reason} | {skip,Reason} | {timetrap_timeout,TVal} |
%% {testcase_aborted,Reason} | testcase_aborted_or_killed |
-%% {'EXIT',Reason} | Other (ignored return value, e.g. 'ok')
-tag({STag,Reason}) when STag == skip; STag == skipped ->
- case Reason of
- {failed,{_,init_per_testcase,_}} -> {auto_skipped,Reason};
- _ -> {skipped,Reason}
- end;
-tag({auto_skip,Reason}) ->
+%% {'EXIT',Reason} | {fail,Reason} | {failed,Reason} |
+%% {user_timetrap_error,Reason} |
+%% Other (ignored return value, e.g. 'ok')
+tag({'$test_server_framework_test',Result}) ->
+ case tag(Result) of
+ ok -> Result;
+ Failure -> Failure
+ end;
+tag({skipped,Reason={failed,{_,init_per_testcase,_}}}) ->
{auto_skipped,Reason};
-tag(E = {ETag,_}) when ETag == error; ETag == 'EXIT';
- ETag == timetrap_timeout;
- ETag == testcase_aborted ->
- {failed,E};
-tag(E = testcase_aborted_or_killed) ->
- {failed,E};
-tag(Other) ->
- Other.
-
-tag_cth({skipped,Reason={failed,{_,init_per_testcase,_}}}) ->
- {auto_skipped,Reason};
-tag_cth({STag,Reason}) when STag == skip; STag == skipped ->
+tag({STag,Reason}) when STag == skip; STag == skipped ->
case Reason of
{failed,{_,init_per_testcase,_}} -> {auto_skipped,Reason};
_ -> {skipped,Reason}
end;
-tag_cth({auto_skip,Reason}) ->
+tag({auto_skip,Reason}) ->
{auto_skipped,Reason};
-tag_cth({fail,Reason}) ->
+tag({fail,Reason}) ->
{failed,{error,Reason}};
-tag_cth(E = {ETag,_}) when ETag == error; ETag == 'EXIT';
+tag(Failed = {failed,_Reason}) ->
+ Failed;
+tag(E = {ETag,_}) when ETag == error; ETag == 'EXIT';
ETag == timetrap_timeout;
ETag == testcase_aborted ->
{failed,E};
-tag_cth(E = testcase_aborted_or_killed) ->
+tag(E = testcase_aborted_or_killed) ->
{failed,E};
-tag_cth(List) when is_list(List) ->
- ok;
-tag_cth(Other) ->
- Other.
+tag(UserTimetrap = {user_timetrap_error,_Reason}) ->
+ UserTimetrap;
+tag(_Other) ->
+ ok.
%%%-----------------------------------------------------------------
%%% @spec error_notification(Mod,Func,Args,Error) -> ok
@@ -841,6 +851,8 @@ error_notification(Mod,Func,_Args,{Error,Loc}) ->
io_lib:format("{test_case_failed,~p}", [Reason]);
Result -> Result
end;
+ {'EXIT',_Reason} = EXIT ->
+ io_lib:format("~P", [EXIT,5]);
{Spec,_Reason} when is_atom(Spec) ->
io_lib:format("~w", [Spec]);
Other ->
@@ -875,8 +887,8 @@ error_notification(Mod,Func,_Args,{Error,Loc}) ->
end,
PrintErr = fun(ErrFormat, ErrArgs) ->
- Div = "~n- - - - - - - - - - - - - - - - "
- "- - - - - - - - - -~n",
+ Div = "~n- - - - - - - - - - - - - - - - - - - "
+ "- - - - - - - - - - - - - - - - - - - - -~n",
io:format(user, lists:concat([Div,ErrFormat,Div,"~n"]),
ErrArgs),
Link =
@@ -1064,9 +1076,32 @@ get_all_cases1(_, []) ->
get_all(Mod, ConfTests) ->
case catch apply(Mod, all, []) of
- {'EXIT',_} ->
- Reason =
- list_to_atom(atom_to_list(Mod)++":all/0 is missing"),
+ {'EXIT',{undef,[{Mod,all,[],_} | _]}} ->
+ Reason =
+ case code:which(Mod) of
+ non_existing ->
+ list_to_atom(atom_to_list(Mod)++
+ " can not be compiled or loaded");
+ _ ->
+ list_to_atom(atom_to_list(Mod)++":all/0 is missing")
+ end,
+ %% this makes test_server call error_in_suite as first
+ %% (and only) test case so we can report Reason properly
+ [{?MODULE,error_in_suite,[[{error,Reason}]]}];
+ {'EXIT',ExitReason} ->
+ case ct_util:get_testdata({error_in_suite,Mod}) of
+ undefined ->
+ ErrStr = io_lib:format("~n*** ERROR *** "
+ "~w:all/0 failed: ~p~n",
+ [Mod,ExitReason]),
+ io:format(user, ErrStr, []),
+ %% save the error info so it doesn't get printed twice
+ ct_util:set_testdata_async({{error_in_suite,Mod},
+ ExitReason});
+ _ExitReason ->
+ ct_util:delete_testdata({error_in_suite,Mod})
+ end,
+ Reason = list_to_atom(atom_to_list(Mod)++":all/0 failed"),
%% this makes test_server call error_in_suite as first
%% (and only) test case so we can report Reason properly
[{?MODULE,error_in_suite,[[{error,Reason}]]}];
@@ -1268,6 +1303,11 @@ report(What,Data) ->
Data1 = if GrName == undefined -> {Suite,Func,Result};
true -> Data
end,
+ %% Register the group leader for the process calling the report
+ %% function, making it possible for a hook function to print
+ %% in the test case log file
+ ReportingPid = self(),
+ ct_logs:register_groupleader(ReportingPid, group_leader()),
case Result of
{failed, _} ->
ct_hooks:on_tc_fail(What, Data1);
@@ -1282,7 +1322,10 @@ report(What,Data) ->
_Else ->
ok
end,
+ ct_logs:unregister_groupleader(ReportingPid),
case {Func,Result} of
+ {error_in_suite,_} when Suite == ?MODULE ->
+ ok;
{init_per_suite,_} ->
ok;
{end_per_suite,_} ->
diff --git a/lib/common_test/src/ct_gen_conn.erl b/lib/common_test/src/ct_gen_conn.erl
index 56082086f6..8da10ee0f3 100644
--- a/lib/common_test/src/ct_gen_conn.erl
+++ b/lib/common_test/src/ct_gen_conn.erl
@@ -24,10 +24,9 @@
-module(ct_gen_conn).
--compile(export_all).
-
--export([start/4, stop/1, get_conn_pid/1]).
+-export([start/4, stop/1, get_conn_pid/1, check_opts/1]).
-export([call/2, call/3, return/2, do_within_time/2]).
+-export([log/3, start_log/1, cont_log/2, end_log/0]).
%%----------------------------------------------------------------------
%% Exported types
diff --git a/lib/common_test/src/ct_logs.erl b/lib/common_test/src/ct_logs.erl
index 7037cdca73..7c8c720e13 100644
--- a/lib/common_test/src/ct_logs.erl
+++ b/lib/common_test/src/ct_logs.erl
@@ -29,6 +29,7 @@
-module(ct_logs).
-export([init/2, close/2, init_tc/1, end_tc/1]).
+-export([register_groupleader/2, unregister_groupleader/1]).
-export([get_log_dir/0, get_log_dir/1]).
-export([log/3, start_log/1, cont_log/2, end_log/0]).
-export([set_stylesheet/2, clear_stylesheet/1]).
@@ -72,6 +73,8 @@
-define(abs(Name), filename:absname(Name)).
+-define(now, os:timestamp()).
+
-record(log_cache, {version,
all_runs = [],
tests = []}).
@@ -267,7 +270,7 @@ init_tc(RefreshLog) ->
ok.
%%%-----------------------------------------------------------------
-%%% @spec end_tc(TCPid) -> ok | {error,Reason}
+%%% @spec end_tc(TCPid) -> ok
%%%
%%% @doc Test case clean up (tool-internal use only).
%%%
@@ -278,6 +281,26 @@ end_tc(TCPid) ->
call({end_tc,TCPid}).
%%%-----------------------------------------------------------------
+%%% @spec register_groupleader(Pid,GroupLeader) -> ok
+%%%
+%%% @doc To enable logging to a group leader (tool-internal use only).
+%%%
+%%% <p>This function is called by ct_framework:report/2</p>
+register_groupleader(Pid,GroupLeader) ->
+ call({register_groupleader,Pid,GroupLeader}),
+ ok.
+
+%%%-----------------------------------------------------------------
+%%% @spec unregister_groupleader(Pid) -> ok
+%%%
+%%% @doc To disable logging to a group leader (tool-internal use only).
+%%%
+%%% <p>This function is called by ct_framework:report/2</p>
+unregister_groupleader(Pid) ->
+ call({unregister_groupleader,Pid}),
+ ok.
+
+%%%-----------------------------------------------------------------
%%% @spec log(Heading,Format,Args) -> ok
%%%
%%% @doc Log internal activity (tool-internal use only).
@@ -290,7 +313,7 @@ end_tc(TCPid) ->
%%% data to log (as in <code>io:format(Format,Args)</code>).</p>
log(Heading,Format,Args) ->
cast({log,sync,self(),group_leader(),ct_internal,?MAX_IMPORTANCE,
- [{int_header(),[log_timestamp(now()),Heading]},
+ [{int_header(),[log_timestamp(?now),Heading]},
{Format,Args},
{int_footer(),[]}]}),
ok.
@@ -312,7 +335,7 @@ log(Heading,Format,Args) ->
%%% @see end_log/0
start_log(Heading) ->
cast({log,sync,self(),group_leader(),ct_internal,?MAX_IMPORTANCE,
- [{int_header(),[log_timestamp(now()),Heading]}]}),
+ [{int_header(),[log_timestamp(?now),Heading]}]}),
ok.
%%%-----------------------------------------------------------------
@@ -470,11 +493,11 @@ tc_print(Category,Importance,Format,Args) ->
get_heading(default) ->
io_lib:format("\n-----------------------------"
"-----------------------\n~s\n",
- [log_timestamp(now())]);
+ [log_timestamp(?now)]);
get_heading(Category) ->
io_lib:format("\n-----------------------------"
"-----------------------\n~s ~w\n",
- [log_timestamp(now()),Category]).
+ [log_timestamp(?now),Category]).
%%%-----------------------------------------------------------------
@@ -532,13 +555,13 @@ div_header(Class) ->
div_header(Class,"User").
div_header(Class,Printer) ->
"\n<div class=\"" ++ atom_to_list(Class) ++ "\"><b>*** " ++ Printer ++
- " " ++ log_timestamp(now()) ++ " ***</b>".
+ " " ++ log_timestamp(?now) ++ " ***</b>".
div_footer() ->
"</div>".
maybe_log_timestamp() ->
- {MS,S,US} = now(),
+ {MS,S,US} = ?now,
case get(log_timestamp) of
{MS,S,_} ->
ok;
@@ -665,7 +688,7 @@ logger(Parent, Mode, Verbosity) ->
make_last_run_index(Time),
CtLogFd = open_ctlog(?misc_io_log),
io:format(CtLogFd,int_header()++int_footer(),
- [log_timestamp(now()),"Common Test Logger started"]),
+ [log_timestamp(?now),"Common Test Logger started"]),
Parent ! {started,self(),{Time,filename:absname("")}},
set_evmgr_gl(CtLogFd),
@@ -764,6 +787,14 @@ logger_loop(State) ->
return(From,ok),
logger_loop(State#logger_state{tc_groupleaders =
rm_tc_gl(TCPid,State)});
+ {{register_groupleader,Pid,GL},From} ->
+ GLs = add_tc_gl(Pid,GL,State),
+ return(From,ok),
+ logger_loop(State#logger_state{tc_groupleaders = GLs});
+ {{unregister_groupleader,Pid},From} ->
+ return(From,ok),
+ logger_loop(State#logger_state{tc_groupleaders =
+ rm_tc_gl(Pid,State)});
{{get_log_dir,true},From} ->
return(From,{ok,State#logger_state.log_dir}),
logger_loop(State);
@@ -806,7 +837,7 @@ logger_loop(State) ->
stop ->
io:format(State#logger_state.ct_log_fd,
int_header()++int_footer(),
- [log_timestamp(now()),"Common Test Logger finished"]),
+ [log_timestamp(?now),"Common Test Logger finished"]),
close_ctlog(State#logger_state.ct_log_fd),
ok
end.
@@ -1876,6 +1907,18 @@ sort_all_runs(Dirs) ->
{Date1,HH1,MM1,SS1} > {Date2,HH2,MM2,SS2}
end, Dirs).
+sort_ct_runs(Dirs) ->
+ %% Directory naming: <Prefix>.NodeName.Date_Time[/...]
+ %% Sort on Date_Time string: "YYYY-MM-DD_HH.MM.SS"
+ lists:sort(
+ fun(Dir1,Dir2) ->
+ [SS1,MM1,DateHH1 | _] =
+ lists:reverse(string:tokens(filename:dirname(Dir1),[$.])),
+ [SS2,MM2,DateHH2 | _] =
+ lists:reverse(string:tokens(filename:dirname(Dir2),[$.])),
+ {DateHH1,MM1,SS1} =< {DateHH2,MM2,SS2}
+ end, Dirs).
+
dir_diff_all_runs(Dirs, LogCache) ->
case LogCache#log_cache.all_runs of
[] ->
@@ -2011,6 +2054,13 @@ runentry(Dir, Totals={Node,Label,Logs,
?testname_width-3)),
lists:flatten(io_lib:format("~ts...",[Trunc]))
end,
+ TotMissingStr =
+ if NotBuilt > 0 ->
+ ["<font color=\"red\">",
+ integer_to_list(NotBuilt),"</font>"];
+ true ->
+ integer_to_list(NotBuilt)
+ end,
Total = TotSucc+TotFail+AllSkip,
A = xhtml(["<td align=center><font size=\"-1\">",Node,
"</font></td>\n",
@@ -2030,7 +2080,7 @@ runentry(Dir, Totals={Node,Label,Logs,
"<td align=right>",TotFailStr,"</td>\n",
"<td align=right>",integer_to_list(AllSkip),
" (",UserSkipStr,"/",AutoSkipStr,")</td>\n",
- "<td align=right>",integer_to_list(NotBuilt),"</td>\n"],
+ "<td align=right>",TotMissingStr,"</td>\n"],
TotalsStr = A++B++C,
XHTML = [xhtml("<tr>\n", ["<tr class=\"",odd_or_even(),"\">\n"]),
@@ -2188,7 +2238,8 @@ make_all_suites_index(When) when is_atom(When) ->
end
end,
- LogDirs = filelib:wildcard(logdir_prefix()++".*/*"++?logdir_ext),
+ Wildcard = logdir_prefix()++".*/*"++?logdir_ext,
+ LogDirs = sort_ct_runs(filelib:wildcard(Wildcard)),
LogCacheInfo = get_cache_data(UseCache),
diff --git a/lib/common_test/src/ct_master.erl b/lib/common_test/src/ct_master.erl
index b42ff73846..2cdb259899 100644
--- a/lib/common_test/src/ct_master.erl
+++ b/lib/common_test/src/ct_master.erl
@@ -25,6 +25,7 @@
-export([run/1,run/3,run/4]).
-export([run_on_node/2,run_on_node/3]).
-export([run_test/1,run_test/2]).
+-export([get_event_mgr_ref/0]).
-export([basic_html/1]).
-export([abort/0,abort/1,progress/0]).
@@ -292,6 +293,18 @@ progress() ->
call(progress).
%%%-----------------------------------------------------------------
+%%% @spec get_event_mgr_ref() -> MasterEvMgrRef
+%%% MasterEvMgrRef = atom()
+%%%
+%%% @doc <p>Call this function in order to get a reference to the
+%%% CT master event manager. The reference can be used to e.g.
+%%% add a user specific event handler while tests are running.
+%%% Example:
+%%% <c>gen_event:add_handler(ct_master:get_event_mgr_ref(), my_ev_h, [])</c></p>
+get_event_mgr_ref() ->
+ ?CT_MEVMGR_REF.
+
+%%%-----------------------------------------------------------------
%%% @spec basic_html(Bool) -> ok
%%% Bool = true | false
%%%
diff --git a/lib/common_test/src/ct_master_logs.erl b/lib/common_test/src/ct_master_logs.erl
index 5393097f57..384c1f6863 100644
--- a/lib/common_test/src/ct_master_logs.erl
+++ b/lib/common_test/src/ct_master_logs.erl
@@ -37,6 +37,8 @@
-define(details_file_name,"details.info").
-define(table_color,"lightblue").
+-define(now, os:timestamp()).
+
%%%--------------------------------------------------------------------
%%% API
%%%--------------------------------------------------------------------
@@ -54,7 +56,7 @@ start(LogDir,Nodes) ->
end.
log(Heading,Format,Args) ->
- cast({log,self(),[{int_header(),[log_timestamp(now()),Heading]},
+ cast({log,self(),[{int_header(),[log_timestamp(?now),Heading]},
{Format,Args},
{int_footer(),[]}]}),
ok.
@@ -132,7 +134,7 @@ init(Parent,LogDir,Nodes) ->
atom_to_list(N) ++ " "
end,Nodes)),
- io:format(CtLogFd,int_header(),[log_timestamp(now()),"Test Nodes\n"]),
+ io:format(CtLogFd,int_header(),[log_timestamp(?now),"Test Nodes\n"]),
io:format(CtLogFd,"~ts\n",[NodeStr]),
io:put_chars(CtLogFd,[int_footer(),"\n"]),
@@ -189,7 +191,7 @@ loop(State) ->
make_all_runs_index(State#state.logdir),
io:format(State#state.log_fd,
int_header()++int_footer(),
- [log_timestamp(now()),"Finished!"]),
+ [log_timestamp(?now),"Finished!"]),
close_ct_master_log(State#state.log_fd),
close_nodedir_index(State#state.nodedir_ix_fd),
ok
diff --git a/lib/common_test/src/ct_netconfc.erl b/lib/common_test/src/ct_netconfc.erl
index 14ee55703f..cca08bd063 100644
--- a/lib/common_test/src/ct_netconfc.erl
+++ b/lib/common_test/src/ct_netconfc.erl
@@ -172,6 +172,7 @@
only_open/2,
hello/1,
hello/2,
+ hello/3,
close_session/1,
close_session/2,
kill_session/2,
@@ -457,23 +458,35 @@ only_open(KeyOrName, ExtraOpts) ->
%%----------------------------------------------------------------------
%% @spec hello(Client) -> Result
-%% @equiv hello(Client, infinity)
+%% @equiv hello(Client, [], infinity)
hello(Client) ->
- hello(Client,?DEFAULT_TIMEOUT).
+ hello(Client,[],?DEFAULT_TIMEOUT).
%%----------------------------------------------------------------------
-spec hello(Client,Timeout) -> Result when
Client :: handle(),
Timeout :: timeout(),
Result :: ok | {error,error_reason()}.
+%% @spec hello(Client, Timeout) -> Result
+%% @equiv hello(Client, [], Timeout)
+hello(Client,Timeout) ->
+ hello(Client,[],Timeout).
+
+%%----------------------------------------------------------------------
+-spec hello(Client,Options,Timeout) -> Result when
+ Client :: handle(),
+ Options :: [{capability, [string()]}],
+ Timeout :: timeout(),
+ Result :: ok | {error,error_reason()}.
%% @doc Exchange `hello' messages with the server.
%%
-%% Sends a `hello' message to the server and waits for the return.
-%%
+%% Adds optional capabilities and sends a `hello' message to the
+%% server and waits for the return.
%% @end
%%----------------------------------------------------------------------
-hello(Client,Timeout) ->
- call(Client, {hello, Timeout}).
+hello(Client,Options,Timeout) ->
+ call(Client, {hello, Options, Timeout}).
+
%%----------------------------------------------------------------------
%% @spec get_session_id(Client) -> Result
@@ -794,8 +807,9 @@ action(Client,Action) ->
Client :: client(),
Action :: simple_xml(),
Timeout :: timeout(),
- Result :: {ok,[simple_xml()]} | {error,error_reason()}.
-%% @doc Execute an action.
+ Result :: ok | {ok,[simple_xml()]} | {error,error_reason()}.
+%% @doc Execute an action. If the return type is void, <c>ok</c> will
+%% be returned instead of <c>{ok,[simple_xml()]}</c>.
%%
%% @end
%%----------------------------------------------------------------------
@@ -1074,9 +1088,9 @@ terminate(_, #state{connection=Connection}) ->
ok.
%% @private
-handle_msg({hello,Timeout}, From,
+handle_msg({hello, Options, Timeout}, From,
#state{connection=Connection,hello_status=HelloStatus} = State) ->
- case do_send(Connection, client_hello()) of
+ case do_send(Connection, client_hello(Options)) of
ok ->
case HelloStatus of
undefined ->
@@ -1153,7 +1167,9 @@ handle_msg({Ref,timeout},#state{pending=Pending} = State) ->
close_session -> stop;
_ -> noreply
end,
- {R,State#state{pending=Pending1}}.
+ %% Halfhearted try to get in correct state, this matches
+ %% the implementation before this patch
+ {R,State#state{pending=Pending1, buff= <<>>}}.
%% @private
%% Called by ct_util_server to close registered connections before terminate.
@@ -1257,10 +1273,14 @@ set_request_timer(T) ->
%%%-----------------------------------------------------------------
-client_hello() ->
+client_hello(Options) when is_list(Options) ->
+ UserCaps = [{capability, UserCap} ||
+ {capability, UserCap} <- Options,
+ is_list(hd(UserCap))],
{hello, ?NETCONF_NAMESPACE_ATTR,
[{capabilities,
- [{capability,[?NETCONF_BASE_CAP++?NETCONF_BASE_CAP_VSN]}]}]}.
+ [{capability,[?NETCONF_BASE_CAP++?NETCONF_BASE_CAP_VSN]}|
+ UserCaps]}]}.
%%%-----------------------------------------------------------------
@@ -1343,72 +1363,54 @@ to_xml_doc(Simple) ->
%%%-----------------------------------------------------------------
%%% Parse and handle received XML data
-handle_data(NewData,#state{connection=Connection,buff=Buff} = State) ->
+handle_data(NewData,#state{connection=Connection,buff=Buff0} = State0) ->
log(Connection,recv,NewData),
- Data = <<Buff/binary,NewData/binary>>,
- case xmerl_sax_parser:stream(<<>>,
- [{continuation_fun,fun sax_cont/1},
- {continuation_state,{Data,Connection,false}},
- {event_fun,fun sax_event/3},
- {event_state,[]}]) of
- {ok, Simple, Rest} ->
- decode(Simple,State#state{buff=Rest});
- {fatal_error,_Loc,Reason,_EndTags,_EventState} ->
- ?error(Connection#connection.name,[{parse_error,Reason},
- {buffer,Buff},
- {new_data,NewData}]),
- case Reason of
- {could_not_fetch_data,Msg} ->
- handle_msg(Msg,State#state{buff = <<>>});
- _Other ->
- Pending1 =
- case State#state.pending of
- [] ->
- [];
- Pending ->
- %% Assuming the first request gets the
- %% first answer
- P=#pending{tref=TRef,caller=Caller} =
- lists:last(Pending),
- _ = timer:cancel(TRef),
- Reason1 = {failed_to_parse_received_data,Reason},
- ct_gen_conn:return(Caller,{error,Reason1}),
- lists:delete(P,Pending)
- end,
- {noreply,State#state{pending=Pending1,buff = <<>>}}
- end
- end.
-
-%%%-----------------------------------------------------------------
-%%% Parsing of XML data
-%% Contiuation function for the sax parser
-sax_cont(done) ->
- {<<>>,done};
-sax_cont({Data,Connection,false}) ->
+ Data = append_wo_initial_nl(Buff0,NewData),
case binary:split(Data,[?END_TAG],[]) of
- [All] ->
- %% No end tag found. Remove what could be a part
- %% of an end tag from the data and save for next
- %% iteration
- SafeSize = size(All)-5,
- <<New:SafeSize/binary,Save:5/binary>> = All,
- {New,{Save,Connection,true}};
- [_Msg,_Rest]=Msgs ->
- %% We have at least one full message. Any excess data will
- %% be returned from xmerl_sax_parser:stream/2 in the Rest
- %% parameter.
- {list_to_binary(Msgs),done}
- end;
-sax_cont({Data,Connection,true}) ->
- case ssh_receive_data() of
- {ok,Bin} ->
- log(Connection,recv,Bin),
- sax_cont({<<Data/binary,Bin/binary>>,Connection,false});
- {error,Reason} ->
- throw({could_not_fetch_data,Reason})
+ [_NoEndTagFound] ->
+ {noreply, State0#state{buff=Data}};
+ [FirstMsg,Buff1] ->
+ SaxArgs = [{event_fun,fun sax_event/3}, {event_state,[]}],
+ case xmerl_sax_parser:stream(FirstMsg, SaxArgs) of
+ {ok, Simple, _Thrash} ->
+ case decode(Simple, State0#state{buff=Buff1}) of
+ {noreply, #state{buff=Buff} = State} when Buff =/= <<>> ->
+ %% Recurse if we have more data in buffer
+ handle_data(<<>>, State);
+ Other ->
+ Other
+ end;
+ {fatal_error,_Loc,Reason,_EndTags,_EventState} ->
+ ?error(Connection#connection.name,
+ [{parse_error,Reason},
+ {buffer, Buff0},
+ {new_data,NewData}]),
+ handle_error(Reason, State0#state{buff= <<>>})
+ end
end.
-
+%% xml does not accept a leading nl and some netconf server add a nl after
+%% each ?END_TAG, ignore them
+append_wo_initial_nl(<<>>,NewData) -> NewData;
+append_wo_initial_nl(<<"\n", Data/binary>>, NewData) ->
+ append_wo_initial_nl(Data, NewData);
+append_wo_initial_nl(Data, NewData) ->
+ <<Data/binary, NewData/binary>>.
+
+handle_error(Reason, State) ->
+ Pending1 = case State#state.pending of
+ [] -> [];
+ Pending ->
+ %% Assuming the first request gets the
+ %% first answer
+ P=#pending{tref=TRef,caller=Caller} =
+ lists:last(Pending),
+ _ = timer:cancel(TRef),
+ Reason1 = {failed_to_parse_received_data,Reason},
+ ct_gen_conn:return(Caller,{error,Reason1}),
+ lists:delete(P,Pending)
+ end,
+ {noreply, State#state{pending=Pending1}}.
%% Event function for the sax parser. It builds a simple XML structure.
%% Care is taken to keep namespace attributes and prefixes as in the original XML.
@@ -1606,6 +1608,9 @@ decode_ok(Other) ->
decode_data([{Tag,Attrs,Content}]) ->
case get_local_name_atom(Tag) of
+ ok ->
+ %% when action has return type void
+ ok;
data ->
%% Since content of data has nothing from the netconf
%% namespace, we remove the parent's xmlns attribute here
@@ -1869,16 +1874,6 @@ get_tag([]) ->
%%%-----------------------------------------------------------------
%%% SSH stuff
-ssh_receive_data() ->
- receive
- {ssh_cm, CM, {data, Ch, _Type, Data}} ->
- ssh_connection:adjust_window(CM,Ch,size(Data)),
- {ok, Data};
- {ssh_cm, _CM, {Closed, _Ch}} = X when Closed == closed; Closed == eof ->
- {error,X};
- {_Ref,timeout} = X ->
- {error,X}
- end.
ssh_open(#options{host=Host,timeout=Timeout,port=Port,ssh=SshOpts,name=Name}) ->
case ssh:connect(Host, Port,
diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl
index eb9e9c832f..3f0b5bda67 100644
--- a/lib/common_test/src/ct_release_test.erl
+++ b/lib/common_test/src/ct_release_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2014. All Rights Reserved.
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -51,10 +51,11 @@
%% executed.
%%
%% <dl>
-%% <dt>Module:upgrade_init(State) -> NewState</dt>
+%% <dt>Module:upgrade_init(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Initialyze system before upgrade test starts.
%%
@@ -63,17 +64,22 @@
%% the boot script, so this callback is intended for additional
%% initialization, if necessary.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_init(CtData,State) ->
+%% {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,myapp),
%% open_connection(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_upgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_upgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that upgrade was successful.
%%
@@ -82,17 +88,21 @@
%% been made permanent. It allows application specific checks to
%% ensure that the upgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_upgraded(State) ->
+%% upgrade_upgraded(CtData,State) ->
%% check_connection_still_open(State).'''
%% </dd>
%%
-%% <dt>Module:upgrade_downgraded(State) -> NewState</dt>
+%% <dt>Module:upgrade_downgraded(CtData,State) -> NewState</dt>
%% <dd>Types:
%%
-%% <b><c>State = NewState = cb_state()</c></b>
+%% <b><code>CtData = {@link ct_data()}</code></b><br/>
+%% <b><code>State = NewState = cb_state()</code></b>
%%
%% Check that downgrade was successful.
%%
@@ -101,10 +111,13 @@
%% made permanent. It allows application specific checks to ensure
%% that the downgrade was successful.
%%
+%% <code>CtData</code> is an opaque data structure which shall be used
+%% in any call to <code>ct_release_test</code> inside the callback.
+%%
%% Example:
%%
%% ```
-%% upgrade_init(State) ->
+%% upgrade_downgraded(CtData,State) ->
%% check_connection_closed(State).'''
%% </dd>
%% </dl>
@@ -112,7 +125,7 @@
%%-----------------------------------------------------------------
-module(ct_release_test).
--export([init/1, upgrade/4, cleanup/1]).
+-export([init/1, upgrade/4, cleanup/1, get_app_vsns/2, get_appup/2]).
-include_lib("kernel/include/file.hrl").
@@ -121,12 +134,17 @@
-define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps
%%-----------------------------------------------------------------
+-record(ct_data, {from,to}).
+
+%%-----------------------------------------------------------------
-type config() :: [{atom(),term()}].
-type cb_state() :: term().
+-opaque ct_data() :: #ct_data{}.
+-export_type([ct_data/0]).
--callback upgrade_init(cb_state()) -> cb_state().
--callback upgrade_upgraded(cb_state()) -> cb_state().
--callback upgrade_downgraded(cb_state()) -> cb_state().
+-callback upgrade_init(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_upgraded(ct_data(),cb_state()) -> cb_state().
+-callback upgrade_downgraded(ct_data(),cb_state()) -> cb_state().
%%-----------------------------------------------------------------
-spec init(Config) -> Result when
@@ -207,12 +225,12 @@ init(Config) ->
%% <li>Perform the upgrade test and allow customized
%% control by using callbacks:
%% <ol>
-%% <li>Callback: `upgrade_init/1'</li>
+%% <li>Callback: `upgrade_init/2'</li>
%% <li>Unpack the new release</li>
%% <li>Install the new release</li>
-%% <li>Callback: `upgrade_upgraded/1'</li>
+%% <li>Callback: `upgrade_upgraded/2'</li>
%% <li>Install the original release</li>
-%% <li>Callback: `upgrade_downgraded/1'</li>
+%% <li>Callback: `upgrade_downgraded/2'</li>
%% </ol>
%% </li>
%% </ol>
@@ -314,6 +332,71 @@ cleanup(Config) ->
Config.
%%-----------------------------------------------------------------
+-spec get_app_vsns(CtData,App) -> {ok,{From,To}} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ From :: string(),
+ To :: string(),
+ Reason :: {app_not_found,App}.
+%% @doc Get versions involved in this upgrade for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It returns the old (From) and new (To) versions involved
+%% in the upgrade/downgrade test for the given application.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+get_app_vsns(#ct_data{from=FromApps,to=ToApps},App) ->
+ case {lists:keyfind(App,1,FromApps),lists:keyfind(App,1,ToApps)} of
+ {{App,FromVsn,_},{App,ToVsn,_}} ->
+ {ok,{FromVsn,ToVsn}};
+ _ ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
+-spec get_appup(CtData,App) -> {ok,Appup} | {error,Reason} when
+ CtData :: ct_data(),
+ App :: atom(),
+ Appup :: {From,To,Up,Down},
+ From :: string(),
+ To :: string(),
+ Up :: [Instr],
+ Down :: [Instr],
+ Instr :: term(),
+ Reason :: {app_not_found,App} | {vsn_not_found,{App,From}}.
+%% @doc Get appup instructions for the given application.
+%%
+%% This function can be called from inside any of the callback
+%% functions. It reads the appup file for the given application and
+%% returns the instructions for upgrade and downgrade for the versions
+%% in the test.
+%%
+%% <code>CtData</code> must be the first argument received in the
+%% calling callback function - an opaque data structure set by
+%% <code>ct_release_tests</code>.
+%%
+%% See reference manual for appup files for types definitions for the
+%% instructions.
+get_appup(#ct_data{from=FromApps,to=ToApps},App) ->
+ case lists:keyfind(App,1,ToApps) of
+ {App,ToVsn,ToDir} ->
+ Appup = filename:join([ToDir, "ebin", atom_to_list(App)++".appup"]),
+ {ok, [{ToVsn, Ups, Downs}]} = file:consult(Appup),
+ {App,FromVsn,_} = lists:keyfind(App,1,FromApps),
+ case {systools_relup:appup_search_for_version(FromVsn,Ups),
+ systools_relup:appup_search_for_version(FromVsn,Downs)} of
+ {{ok,Up},{ok,Down}} ->
+ {ok,{FromVsn,ToVsn,Up,Down}};
+ _ ->
+ {error,{vsn_not_found,{App,FromVsn}}}
+ end;
+ false ->
+ {error,{app_not_found,App}}
+ end.
+
+%%-----------------------------------------------------------------
init_upgrade_test() ->
%% Check that a real release is running, not e.g. cerl
ok = application:ensure_started(sasl),
@@ -558,8 +641,14 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
Start = filename:join([InstallDir,bin,start]),
{ok,Node} = start_node(Start,FromVsn,FromAppsVsns),
+ %% Add path to this module, to allow calls to get_appup/2
+ Dir = filename:dirname(code:which(?MODULE)),
+ _ = rpc:call(Node,code,add_pathz,[Dir]),
+
ct:log("Node started: ~p",[Node]),
- State1 = do_callback(Node,Cb,upgrade_init,InitState),
+ CtData = #ct_data{from = [{A,V,code:lib_dir(A)} || {A,V} <- FromAppsVsns],
+ to=[{A,V,code:lib_dir(A)} || {A,V} <- ToAppsVsns]},
+ State1 = do_callback(Node,Cb,upgrade_init,[CtData,InitState]),
[{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
@@ -592,7 +681,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,old}] =
rpc:call(Node,release_handler,which_releases,[]),
- State2 = do_callback(Node,Cb,upgrade_upgraded,State1),
+ State2 = do_callback(Node,Cb,upgrade_upgraded,[CtData,State1]),
ct:log("Re-installing old release"),
case rpc:call(Node,release_handler,install_release,[FromVsn]) of
@@ -615,7 +704,7 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
{"OTP upgrade test",FromVsn,_,permanent}] =
rpc:call(Node,release_handler,which_releases,[]),
- _State3 = do_callback(Node,Cb,upgrade_downgraded,State2),
+ _State3 = do_callback(Node,Cb,upgrade_downgraded,[CtData,State2]),
ct:log("Terminating node ~p",[Node]),
erlang:monitor_node(Node,true),
@@ -625,15 +714,15 @@ do_upgrade({Cb,InitState},FromVsn,FromAppsVsns,ToRel,ToAppsVsns,InstallDir) ->
ok.
-do_callback(Node,Mod,Func,State) ->
+do_callback(Node,Mod,Func,Args) ->
Dir = filename:dirname(code:which(Mod)),
_ = rpc:call(Node,code,add_path,[Dir]),
ct:log("Calling ~p:~p/1",[Mod,Func]),
- R = rpc:call(Node,Mod,Func,[State]),
- ct:log("~p:~p/1 returned: ~p",[Mod,Func,R]),
+ R = rpc:call(Node,Mod,Func,Args),
+ ct:log("~p:~p/~w returned: ~p",[Mod,Func,length(Args),R]),
case R of
{badrpc,Error} ->
- test_server:fail({test_upgrade_callback,Mod,Func,State,Error});
+ test_server:fail({test_upgrade_callback,Mod,Func,Args,Error});
NewState ->
NewState
end.
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index 00d0aab507..0eafe72020 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -77,7 +77,8 @@
multiply_timetraps = 1,
scale_timetraps = false,
create_priv_dir,
- testspecs = [],
+ testspec_files = [],
+ current_testspec,
tests,
starter}).
@@ -225,18 +226,24 @@ finish(Tracing, ExitStatus, Args) ->
if ExitStatus == interactive_mode ->
interactive_mode;
true ->
- %% it's possible to tell CT to finish execution with a call
- %% to a different function than the normal halt/1 BIF
- %% (meant to be used mainly for reading the CT exit status)
- case get_start_opt(halt_with,
- fun([HaltMod,HaltFunc]) ->
- {list_to_atom(HaltMod),
- list_to_atom(HaltFunc)} end,
- Args) of
- undefined ->
- halt(ExitStatus);
- {M,F} ->
- apply(M, F, [ExitStatus])
+ case get_start_opt(vts, true, Args) of
+ true ->
+ %% VTS mode, don't halt the node
+ ok;
+ _ ->
+ %% it's possible to tell CT to finish execution with a call
+ %% to a different function than the normal halt/1 BIF
+ %% (meant to be used mainly for reading the CT exit status)
+ case get_start_opt(halt_with,
+ fun([HaltMod,HaltFunc]) ->
+ {list_to_atom(HaltMod),
+ list_to_atom(HaltFunc)} end,
+ Args) of
+ undefined ->
+ halt(ExitStatus);
+ {M,F} ->
+ apply(M, F, [ExitStatus])
+ end
end
end.
@@ -244,7 +251,7 @@ script_start1(Parent, Args) ->
%% read general start flags
Label = get_start_opt(label, fun([Lbl]) -> Lbl end, Args),
Profile = get_start_opt(profile, fun([Prof]) -> Prof end, Args),
- Vts = get_start_opt(vts, true, Args),
+ Vts = get_start_opt(vts, true, undefined, Args),
Shell = get_start_opt(shell, true, Args),
Cover = get_start_opt(cover, fun([CoverFile]) -> ?abs(CoverFile) end, Args),
CoverStop = get_start_opt(cover_stop,
@@ -293,10 +300,10 @@ script_start1(Parent, Args) ->
application:set_env(common_test, auto_compile, true),
InclDirs =
case proplists:get_value(include, Args) of
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl];
+ [filename:absname(Incl)];
undefined ->
[]
end,
@@ -330,8 +337,8 @@ script_start1(Parent, Args) ->
Stylesheet = get_start_opt(stylesheet,
fun([SS]) -> ?abs(SS) end, Args),
%% basic_html - used by ct_logs
- BasicHtml = case proplists:get_value(basic_html, Args) of
- undefined ->
+ BasicHtml = case {Vts,proplists:get_value(basic_html, Args)} of
+ {undefined,undefined} ->
application:set_env(common_test, basic_html, false),
undefined;
_ ->
@@ -364,9 +371,10 @@ script_start1(Parent, Args) ->
scale_timetraps = ScaleTT,
create_priv_dir = CreatePrivDir,
starter = script},
-
+
%% check if log files should be refreshed or go on to run tests...
Result = run_or_refresh(Opts, Args),
+
%% send final results to starting process waiting in script_start/0
Parent ! {self(), Result}.
@@ -485,8 +493,11 @@ execute_one_spec(TS, Opts, Args) ->
case check_and_install_configfiles(AllConfig, TheLogDir, Opts) of
ok -> % read tests from spec
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
- do_run(Run, Skip, Opts#opts{config=AllConfig,
- logdir=TheLogDir}, Args);
+ Result = do_run(Run, Skip, Opts#opts{config=AllConfig,
+ logdir=TheLogDir,
+ current_testspec=TS}, Args),
+ ct_util:delete_testdata(testspec),
+ Result;
Error ->
Error
end.
@@ -577,7 +588,7 @@ combine_test_opts(TS, Specs, Opts) ->
Opts#opts{label = Label,
profile = Profile,
- testspecs = Specs,
+ testspec_files = Specs,
cover = Cover,
cover_stop = CoverStop,
logdir = which(logdir, LogDir),
@@ -702,7 +713,7 @@ script_start4(#opts{label = Label, profile = Profile,
logopts = LogOpts,
verbosity = Verbosity,
enable_builtin_hooks = EnableBuiltinHooks,
- logdir = LogDir, testspecs = Specs}, _Args) ->
+ logdir = LogDir, testspec_files = Specs}, _Args) ->
%% label - used by ct_logs
application:set_env(common_test, test_label, Label),
@@ -757,24 +768,10 @@ script_start4(Opts = #opts{tests = Tests}, Args) ->
%%% @doc Print usage information for <code>ct_run</code>.
script_usage() ->
io:format("\n\nUsage:\n\n"),
- io:format("Run tests in web based GUI:\n\n"
- "\tct_run -vts [-browser Browser]"
- "\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
- "\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]"
- "\n\t[-dir TestDir1 TestDir2 .. TestDirN] |"
- "\n\t[-suite Suite [-case Case]]"
- "\n\t[-logopts LogOpt1 LogOpt2 .. LogOptN]"
- "\n\t[-verbosity GenVLvl | [CategoryVLvl1 .. CategoryVLvlN]]"
- "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
- "\n\t[-no_auto_compile]"
- "\n\t[-abort_if_missing_suites]"
- "\n\t[-multiply_timetraps N]"
- "\n\t[-scale_timetraps]"
- "\n\t[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]"
- "\n\t[-basic_html]\n\n"),
io:format("Run tests from command line:\n\n"
"\tct_run [-dir TestDir1 TestDir2 .. TestDirN] |"
- "\n\t[-suite Suite1 Suite2 .. SuiteN [-case Case1 Case2 .. CaseN]]"
+ "\n\t[[-dir TestDir] -suite Suite1 Suite2 .. SuiteN"
+ "\n\t [[-group Groups1 Groups2 .. GroupsN] [-case Case1 Case2 .. CaseN]]]"
"\n\t[-step [config | keep_inactive]]"
"\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
"\n\t[-userconfig CallbackModule ConfigFile1 .. ConfigFileN]"
@@ -830,7 +827,22 @@ script_usage() ->
io:format("Run CT in interactive mode:\n\n"
"\tct_run -shell"
"\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
- "\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]\n\n").
+ "\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]\n\n"),
+ io:format("Run tests in web based GUI:\n\n"
+ "\tct_run -vts [-browser Browser]"
+ "\n\t[-config ConfigFile1 ConfigFile2 .. ConfigFileN]"
+ "\n\t[-decrypt_key Key] | [-decrypt_file KeyFile]"
+ "\n\t[-dir TestDir1 TestDir2 .. TestDirN] |"
+ "\n\t[-suite Suite [-case Case]]"
+ "\n\t[-logopts LogOpt1 LogOpt2 .. LogOptN]"
+ "\n\t[-verbosity GenVLvl | [CategoryVLvl1 .. CategoryVLvlN]]"
+ "\n\t[-include InclDir1 InclDir2 .. InclDirN]"
+ "\n\t[-no_auto_compile]"
+ "\n\t[-abort_if_missing_suites]"
+ "\n\t[-multiply_timetraps N]"
+ "\n\t[-scale_timetraps]"
+ "\n\t[-create_priv_dir auto_per_run | auto_per_tc | manual_per_tc]"
+ "\n\t[-basic_html]\n\n").
%%%-----------------------------------------------------------------
%%% @hidden
@@ -1023,10 +1035,10 @@ run_test2(StartOpts) ->
case proplists:get_value(include, StartOpts) of
undefined ->
[];
- Incl when is_list(hd(Incl)) ->
- Incl;
+ Incls when is_list(hd(Incls)) ->
+ [filename:absname(IDir) || IDir <- Incls];
Incl when is_list(Incl) ->
- [Incl]
+ [filename:absname(Incl)]
end,
case os:getenv("CT_INCLUDE_PATH") of
false ->
@@ -1102,7 +1114,7 @@ run_test2(StartOpts) ->
undefined ->
case lists:keysearch(prepared_tests, 1, StartOpts) of
{value,{_,{Run,Skip},Specs}} -> % use prepared tests
- run_prepared(Run, Skip, Opts#opts{testspecs = Specs},
+ run_prepared(Run, Skip, Opts#opts{testspec_files = Specs},
StartOpts);
false ->
run_dir(Opts, StartOpts)
@@ -1110,11 +1122,11 @@ run_test2(StartOpts) ->
Specs ->
Relaxed = get_start_opt(allow_user_terms, value, false, StartOpts),
%% using testspec(s) as input for test
- run_spec_file(Relaxed, Opts#opts{testspecs = Specs}, StartOpts)
+ run_spec_file(Relaxed, Opts#opts{testspec_files = Specs}, StartOpts)
end.
run_spec_file(Relaxed,
- Opts = #opts{testspecs = Specs},
+ Opts = #opts{testspec_files = Specs},
StartOpts) ->
Specs1 = case Specs of
[X|_] when is_integer(X) -> [Specs];
@@ -1153,7 +1165,10 @@ run_all_specs([{Specs,TS} | TSs], Opts, StartOpts, TotResult) ->
log_ts_names(Specs),
Combined = #opts{config = TSConfig} = combine_test_opts(TS, Specs, Opts),
AllConfig = merge_vals([Opts#opts.config, TSConfig]),
- try run_one_spec(TS, Combined#opts{config = AllConfig}, StartOpts) of
+ try run_one_spec(TS,
+ Combined#opts{config = AllConfig,
+ current_testspec=TS},
+ StartOpts) of
Result ->
run_all_specs(TSs, Opts, StartOpts, [Result | TotResult])
catch
@@ -1393,11 +1408,12 @@ run_testspec2(TestSpec) ->
EnvInclude++Opts#opts.include
end,
application:set_env(common_test, include, AllInclude),
+
LogDir1 = which(logdir,Opts#opts.logdir),
case check_and_install_configfiles(
Opts#opts.config, LogDir1, Opts) of
ok ->
- Opts1 = Opts#opts{testspecs = [],
+ Opts1 = Opts#opts{testspec_files = [],
logdir = LogDir1,
include = AllInclude},
{Run,Skip} = ct_testspec:prepare_tests(TS, node()),
@@ -1618,11 +1634,15 @@ groups_and_cases(Gs, Cs) ->
tests(TestDir, Suites, []) when is_list(TestDir), is_integer(hd(TestDir)) ->
[{?testdir(TestDir,Suites),ensure_atom(Suites),all}];
tests(TestDir, Suite, Cases) when is_list(TestDir), is_integer(hd(TestDir)) ->
+ [{?testdir(TestDir,Suite),ensure_atom(Suite),Cases}];
+tests([TestDir], Suite, Cases) when is_list(TestDir), is_integer(hd(TestDir)) ->
[{?testdir(TestDir,Suite),ensure_atom(Suite),Cases}].
tests([{Dir,Suite}],Cases) ->
[{?testdir(Dir,Suite),ensure_atom(Suite),Cases}];
tests(TestDir, Suite) when is_list(TestDir), is_integer(hd(TestDir)) ->
- tests(TestDir, ensure_atom(Suite), all).
+ tests(TestDir, ensure_atom(Suite), all);
+tests([TestDir], Suite) when is_list(TestDir), is_integer(hd(TestDir)) ->
+ tests(TestDir, ensure_atom(Suite), all).
tests(DirSuites) when is_list(DirSuites), is_tuple(hd(DirSuites)) ->
[{?testdir(Dir,Suite),ensure_atom(Suite),all} || {Dir,Suite} <- DirSuites];
tests(TestDir) when is_list(TestDir), is_integer(hd(TestDir)) ->
@@ -1704,6 +1724,9 @@ compile_and_run(Tests, Skip, Opts, Args) ->
ct_util:set_testdata({stylesheet,Opts#opts.stylesheet}),
%% save logopts
ct_util:set_testdata({logopts,Opts#opts.logopts}),
+ %% save info about current testspec (testspec record or undefined)
+ ct_util:set_testdata({testspec,Opts#opts.current_testspec}),
+
%% enable silent connections
case Opts#opts.silent_connections of
[] ->
@@ -1718,7 +1741,7 @@ compile_and_run(Tests, Skip, Opts, Args) ->
ct_logs:log("Silent connections", "~p", [Conns])
end
end,
- log_ts_names(Opts#opts.testspecs),
+ log_ts_names(Opts#opts.testspec_files),
TestSuites = suite_tuples(Tests),
{_TestSuites1,SuiteMakeErrors,AllMakeErrors} =
@@ -1967,22 +1990,7 @@ final_tests(Tests, Skip, Bad) ->
final_tests1([{TestDir,Suites,_}|Tests], Final, Skip, Bad) when
is_list(Suites), is_atom(hd(Suites)) ->
-% Separate =
-% fun(S,{DoSuite,Dont}) ->
-% case lists:keymember({TestDir,S},1,Bad) of
-% false ->
-% {[S|DoSuite],Dont};
-% true ->
-% SkipIt = {TestDir,S,"Make failed"},
-% {DoSuite,Dont++[SkipIt]}
-% end
-% end,
-
-% {DoSuites,Skip1} =
-% lists:foldl(Separate,{[],Skip},Suites),
-% Do = {TestDir,lists:reverse(DoSuites),all},
-
- Skip1 = [{TD,S,"Make failed"} || {{TD,S},_} <- Bad, S1 <- Suites,
+ Skip1 = [{TD,S,make_failed} || {{TD,S},_} <- Bad, S1 <- Suites,
S == S1, TD == TestDir],
Final1 = [{TestDir,S,all} || S <- Suites],
final_tests1(Tests, lists:reverse(Final1)++Final, Skip++Skip1, Bad);
@@ -1995,7 +2003,7 @@ final_tests1([{TestDir,all,all}|Tests], Final, Skip, Bad) ->
false ->
[]
end,
- Missing = [{TestDir,S,"Make failed"} || S <- MissingSuites],
+ Missing = [{TestDir,S,make_failed} || S <- MissingSuites],
Final1 = [{TestDir,all,all}|Final],
final_tests1(Tests, Final1, Skip++Missing, Bad);
@@ -2007,7 +2015,7 @@ final_tests1([{TestDir,Suite,GrsOrCs}|Tests], Final, Skip, Bad) when
is_list(GrsOrCs) ->
case lists:keymember({TestDir,Suite}, 1, Bad) of
true ->
- Skip1 = Skip ++ [{TestDir,Suite,all,"Make failed"}],
+ Skip1 = Skip ++ [{TestDir,Suite,all,make_failed}],
final_tests1(Tests, [{TestDir,Suite,all}|Final], Skip1, Bad);
false ->
GrsOrCs1 =
@@ -2134,6 +2142,14 @@ do_run_test(Tests, Skip, Opts0) ->
case check_and_add(Tests, [], []) of
{ok,AddedToPath} ->
ct_util:set_testdata({stats,{0,0,{0,0}}}),
+
+ %% test_server needs to know the include path too
+ InclPath = case application:get_env(common_test, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+ application:set_env(test_server, include, InclPath),
+
test_server_ctrl:start_link(local),
%% let test_server expand the test tuples and count no of cases
diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl
index babe73e575..b14731e74f 100644
--- a/lib/common_test/src/ct_telnet.erl
+++ b/lib/common_test/src/ct_telnet.erl
@@ -29,7 +29,9 @@
%% Command timeout = 10 sec (time to wait for a command to return)
%% Max no of reconnection attempts = 3
%% Reconnection interval = 5 sek (time to wait in between reconnection attempts)
-%% Keep alive = true (will send NOP to the server every 10 sec if connection is idle)</pre>
+%% Keep alive = true (will send NOP to the server every 8 sec if connection is idle)
+%% Polling limit = 0 (max number of times to poll to get a remaining string terminated)
+%% Polling interval = 1 sec (sleep time between polls)</pre>
%% <p>These parameters can be altered by the user with the following
%% configuration term:</p>
%% <pre>
@@ -37,7 +39,9 @@
%% {command_timeout,Millisec},
%% {reconnection_attempts,N},
%% {reconnection_interval,Millisec},
-%% {keep_alive,Bool}]}.</pre>
+%% {keep_alive,Bool},
+%% {poll_limit,N},
+%% {poll_interval,Millisec}]}.</pre>
%% <p><code>Millisec = integer(), N = integer()</code></p>
%% <p>Enter the <code>telnet_settings</code> term in a configuration
%% file included in the test and ct_telnet will retrieve the information
@@ -156,6 +160,8 @@
-define(RECONN_TIMEOUT,5000).
-define(DEFAULT_TIMEOUT,10000).
-define(DEFAULT_PORT,23).
+-define(POLL_LIMIT,0).
+-define(POLL_INTERVAL,1000).
-include("ct_util.hrl").
@@ -169,6 +175,8 @@
type,
target_mod,
keep_alive,
+ poll_limit=?POLL_LIMIT,
+ poll_interval=?POLL_INTERVAL,
extra,
conn_to=?DEFAULT_TIMEOUT,
com_to=?DEFAULT_TIMEOUT,
@@ -379,8 +387,15 @@ cmdf(Connection,CmdFormat,Args,Opts) when is_list(Args) ->
%%% Connection = ct_telnet:connection()
%%% Data = [string()]
%%% Reason = term()
-%%% @doc Get all data which has been received by the telnet client
-%%% since last command was sent.
+%%% @doc Get all data that has been received by the telnet client
+%%% since the last command was sent. Note that only newline terminated
+%%% strings are returned. If the last string received has not yet
+%%% been terminated, the connection may be polled automatically until
+%%% the string is complete. The polling feature is controlled
+%%% by the `poll_limit' and `poll_interval' config values and is
+%%% by default disabled (meaning the function will immediately
+%%% return all complete strings received and save a remaining
+%%% non-terminated string for a later `get_data' call).
get_data(Connection) ->
case get_handle(Connection) of
{ok,Pid} ->
@@ -471,7 +486,8 @@ expect(Connection,Patterns) ->
%%% Opts = [Opt]
%%% Opt = {idle_timeout,IdleTimeout} | {total_timeout,TotalTimeout} |
%%% repeat | {repeat,N} | sequence | {halt,HaltPatterns} |
-%%% ignore_prompt | no_prompt_check
+%%% ignore_prompt | no_prompt_check | wait_for_prompt |
+%%% {wait_for_prompt,Prompt}
%%% IdleTimeout = infinity | integer()
%%% TotalTimeout = infinity | integer()
%%% N = integer()
@@ -484,9 +500,9 @@ expect(Connection,Patterns) ->
%%%
%%% @doc Get data from telnet and wait for the expected pattern.
%%%
-%%% <p><code>Pattern</code> can be a POSIX regular expression. If more
-%%% than one pattern is given, the function returns when the first
-%%% match is found.</p>
+%%% <p><code>Pattern</code> can be a POSIX regular expression. The function
+%%% returns as soon as a pattern has been successfully matched (at least one,
+%%% in the case of multiple patterns).</p>
%%%
%%% <p><code>RxMatch</code> is a list of matched strings. It looks
%%% like this: <code>[FullMatch, SubMatch1, SubMatch2, ...]</code>
@@ -509,10 +525,13 @@ expect(Connection,Patterns) ->
%%% milliseconds, <code>{error,timeout}</code> is returned. The default
%%% value is <code>infinity</code> (i.e. no time limit).</p>
%%%
-%%% <p>The function will always return when a prompt is found, unless
-%%% any of the <code>ignore_prompt</code> or
-%%% <code>no_prompt_check</code> options are used, in which case it
-%%% will return when a match is found or after a timeout.</p>
+%%% <p>The function will return when a prompt is received, even if no
+%%% pattern has yet been matched. In this event,
+%%% <code>{error,{prompt,Prompt}}</code> is returned.
+%%% However, this behaviour may be modified with the
+%%% <code>ignore_prompt</code> or <code>no_prompt_check</code> option, which
+%%% tells <code>expect</code> to return only when a match is found or after a
+%%% timeout.</p>
%%%
%%% <p>If the <code>ignore_prompt</code> option is used,
%%% <code>ct_telnet</code> will ignore any prompt found. This option
@@ -526,6 +545,13 @@ expect(Connection,Patterns) ->
%%% is useful if, for instance, the <code>Pattern</code> itself
%%% matches the prompt.</p>
%%%
+%%% <p>The <code>wait_for_prompt</code> option forces <code>ct_telnet</code>
+%%% to wait until the prompt string has been received before returning
+%%% (even if a pattern has already been matched). This is equal to calling:
+%%% <code>expect(Conn, Patterns++[{prompt,Prompt}], [sequence|Opts])</code>.
+%%% Note that <code>idle_timeout</code> and <code>total_timeout</code>
+%%% may abort the operation of waiting for prompt.</p>
+%%%
%%% <p>The <code>repeat</code> option indicates that the pattern(s)
%%% shall be matched multiple times. If <code>N</code> is given, the
%%% pattern(s) will be matched <code>N</code> times, and the function
@@ -596,9 +622,12 @@ init(Name,{Ip,Port,Type},{TargetMod,KeepAlive,Extra}) ->
"Reconnection attempts: ~p\n"
"Reconnection interval: ~p\n"
"Connection timeout: ~p\n"
- "Keep alive: ~w",
+ "Keep alive: ~w\n"
+ "Poll limit: ~w\n"
+ "Poll interval: ~w",
[Ip,Port,S1#state.com_to,S1#state.reconns,
- S1#state.reconn_int,S1#state.conn_to,KeepAlive]),
+ S1#state.reconn_int,S1#state.conn_to,KeepAlive,
+ S1#state.poll_limit,S1#state.poll_interval]),
{ok,TelnPid,S1};
{'EXIT',Reason} ->
{error,Reason};
@@ -619,6 +648,10 @@ set_telnet_defaults([{reconnection_interval,RInt}|Ss],S) ->
set_telnet_defaults(Ss,S#state{reconn_int=RInt});
set_telnet_defaults([{keep_alive,_}|Ss],S) ->
set_telnet_defaults(Ss,S);
+set_telnet_defaults([{poll_limit,PL}|Ss],S) ->
+ set_telnet_defaults(Ss,S#state{poll_limit=PL});
+set_telnet_defaults([{poll_interval,PI}|Ss],S) ->
+ set_telnet_defaults(Ss,S#state{poll_interval=PI});
set_telnet_defaults([Unknown|Ss],S) ->
force_log(S,error,
"Bad element in telnet_settings: ~p",[Unknown]),
@@ -631,18 +664,21 @@ handle_msg({cmd,Cmd,Opts},State) ->
start_gen_log(heading(cmd,State#state.name)),
log(State,cmd,"Cmd: ~p",[Cmd]),
+ %% whatever is in the buffer from previous operations
+ %% will be ignored as we go ahead with this telnet cmd
+
debug_cont_gen_log("Throwing Buffer:",[]),
debug_log_lines(State#state.buffer),
case {State#state.type,State#state.prompt} of
- {ts,_} ->
+ {ts,_} ->
silent_teln_expect(State#state.name,
State#state.teln_pid,
State#state.buffer,
prompt,
State#state.prx,
[{idle_timeout,2000}]);
- {ip,false} ->
+ {ip,false} ->
silent_teln_expect(State#state.name,
State#state.teln_pid,
State#state.buffer,
@@ -706,10 +742,8 @@ handle_msg({send,Cmd,Opts},State) ->
handle_msg(get_data,State) ->
start_gen_log(heading(get_data,State#state.name)),
log(State,cmd,"Reading data...",[]),
- {ok,Data,Buffer} = teln_get_all_data(State#state.teln_pid,
- State#state.prx,
- State#state.buffer,
- [],[]),
+ {ok,Data,Buffer} = teln_get_all_data(State,State#state.buffer,[],[],
+ State#state.poll_limit),
log(State,recv,"Return: ~p",[{ok,Data}]),
end_gen_log(),
{{ok,Data},State#state{buffer=Buffer}};
@@ -944,16 +978,25 @@ teln_cmd(Pid,Cmd,Prx,Newline,Timeout) ->
ct_telnet_client:send_data(Pid,Cmd,Newline),
teln_receive_until_prompt(Pid,Prx,Timeout).
-teln_get_all_data(Pid,Prx,Data,Acc,LastLine) ->
+teln_get_all_data(State=#state{teln_pid=Pid,prx=Prx},Data,Acc,LastLine,Polls) ->
case check_for_prompt(Prx,LastLine++Data) of
{prompt,Lines,_PromptType,Rest} ->
- teln_get_all_data(Pid,Prx,Rest,[Lines|Acc],[]);
+ teln_get_all_data(State,Rest,[Lines|Acc],[],State#state.poll_limit);
{noprompt,Lines,LastLine1} ->
case ct_telnet_client:get_data(Pid) of
+ {ok,[]} when LastLine1 /= [], Polls > 0 ->
+ %% No more data from server but the last string is not
+ %% a complete line (maybe because of a slow connection),
+ timer:sleep(State#state.poll_interval),
+ NewPolls = if Polls == infinity -> infinity;
+ true -> Polls-1
+ end,
+ teln_get_all_data(State,[],[Lines|Acc],LastLine1,NewPolls);
{ok,[]} ->
{ok,lists:reverse(lists:append([Lines|Acc])),LastLine1};
{ok,Data1} ->
- teln_get_all_data(Pid,Prx,Data1,[Lines|Acc],LastLine1)
+ teln_get_all_data(State,Data1,[Lines|Acc],LastLine1,
+ State#state.poll_limit)
end
end.
@@ -978,7 +1021,7 @@ silent_teln_expect(Name,Pid,Data,Pattern,Prx,Opts) ->
put(silent,Old),
Result.
-%% teln_expect/5
+%% teln_expect/6
%%
%% This function implements the expect functionality over telnet. In
%% general there are three possible ways to go:
@@ -1000,10 +1043,12 @@ teln_expect(Name,Pid,Data,Pattern0,Prx,Opts) ->
end,
PromptCheck = get_prompt_check(Opts),
- Seq = get_seq(Opts),
- Pattern = convert_pattern(Pattern0,Seq),
- {IdleTimeout,TotalTimeout} = get_timeouts(Opts),
+ {WaitForPrompt,Pattern1,Opts1} = wait_for_prompt(Pattern0,Opts),
+
+ Seq = get_seq(Opts1),
+ Pattern2 = convert_pattern(Pattern1,Seq),
+ {IdleTimeout,TotalTimeout} = get_timeouts(Opts1),
EO = #eo{teln_pid=Pid,
prx=Prx,
@@ -1013,9 +1058,16 @@ teln_expect(Name,Pid,Data,Pattern0,Prx,Opts) ->
haltpatterns=HaltPatterns,
prompt_check=PromptCheck},
- case get_repeat(Opts) of
+ case get_repeat(Opts1) of
false ->
- case teln_expect1(Name,Pid,Data,Pattern,[],EO) of
+ case teln_expect1(Name,Pid,Data,Pattern2,[],EO) of
+ {ok,Matched,Rest} when WaitForPrompt ->
+ case lists:reverse(Matched) of
+ [{prompt,_},Matched1] ->
+ {ok,Matched1,Rest};
+ [{prompt,_}|Matched1] ->
+ {ok,lists:reverse(Matched1),Rest}
+ end;
{ok,Matched,Rest} ->
{ok,Matched,Rest};
{halt,Why,Rest} ->
@@ -1025,7 +1077,7 @@ teln_expect(Name,Pid,Data,Pattern0,Prx,Opts) ->
end;
N ->
EO1 = EO#eo{repeat=N},
- repeat_expect(Name,Pid,Data,Pattern,[],EO1)
+ repeat_expect(Name,Pid,Data,Pattern2,[],EO1)
end.
convert_pattern(Pattern,Seq)
@@ -1089,6 +1141,40 @@ get_ignore_prompt(Opts) ->
get_prompt_check(Opts) ->
not lists:member(no_prompt_check,Opts).
+wait_for_prompt(Pattern, Opts) ->
+ case lists:member(wait_for_prompt, Opts) of
+ true ->
+ wait_for_prompt1(prompt, Pattern,
+ lists:delete(wait_for_prompt,Opts));
+ false ->
+ case proplists:get_value(wait_for_prompt, Opts) of
+ undefined ->
+ {false,Pattern,Opts};
+ PromptStr ->
+ wait_for_prompt1({prompt,PromptStr}, Pattern,
+ proplists:delete(wait_for_prompt,Opts))
+ end
+ end.
+
+wait_for_prompt1(Prompt, [Ch|_] = Pattern, Opts) when is_integer(Ch) ->
+ wait_for_prompt2(Prompt, [Pattern], Opts);
+wait_for_prompt1(Prompt, Pattern, Opts) when is_list(Pattern) ->
+ wait_for_prompt2(Prompt, Pattern, Opts);
+wait_for_prompt1(Prompt, Pattern, Opts) ->
+ wait_for_prompt2(Prompt, [Pattern], Opts).
+
+wait_for_prompt2(Prompt, Pattern, Opts) ->
+ Pattern1 = case lists:reverse(Pattern) of
+ [prompt|_] -> Pattern;
+ [{prompt,_}|_] -> Pattern;
+ _ -> Pattern ++ [Prompt]
+ end,
+ Opts1 = case lists:member(sequence, Opts) of
+ true -> Opts;
+ false -> [sequence|Opts]
+ end,
+ {true,Pattern1,Opts1}.
+
%% Repeat either single or sequence. All match results are accumulated
%% and returned when a halt condition is fulllfilled.
repeat_expect(_Name,_Pid,Rest,_Pattern,Acc,#eo{repeat=0}) ->
@@ -1106,12 +1192,18 @@ repeat_expect(Name,Pid,Data,Pattern,Acc,EO) ->
teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO,
total_timeout=TotalTO}) ->
- ExpectFun = case EO#eo.seq of
+ %% TotalTO is a float value in this loop (unless it's 'infinity'),
+ %% but an integer value will be passed to the other functions
+ EOMod = if TotalTO /= infinity -> EO#eo{total_timeout=trunc(TotalTO)};
+ true -> EO
+ end,
+
+ ExpectFun = case EOMod#eo.seq of
true -> fun() ->
- seq_expect(Name,Pid,Data,Pattern,Acc,EO)
+ seq_expect(Name,Pid,Data,Pattern,Acc,EOMod)
end;
false -> fun() ->
- one_expect(Name,Pid,Data,Pattern,EO)
+ one_expect(Name,Pid,Data,Pattern,EOMod)
end
end,
case ExpectFun() of
@@ -1121,8 +1213,14 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO,
{halt,Why,Rest};
NotFinished ->
%% Get more data
- Fun = fun() -> get_data1(EO#eo.teln_pid) end,
- case timer:tc(ct_gen_conn, do_within_time, [Fun, IdleTO]) of
+ Fun = fun() -> get_data1(EOMod#eo.teln_pid) end,
+ BreakAfter = if TotalTO < IdleTO ->
+ %% use the integer value
+ EOMod#eo.total_timeout;
+ true ->
+ IdleTO
+ end,
+ case timer:tc(ct_gen_conn, do_within_time, [Fun,BreakAfter]) of
{_,{error,Reason}} ->
%% A timeout will occur when the telnet connection
%% is idle for EO#eo.idle_timeout milliseconds.
@@ -1131,13 +1229,15 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO,
case NotFinished of
{nomatch,Rest} ->
%% One expect
- teln_expect1(Name,Pid,Rest++Data1,Pattern,[],EO);
+ teln_expect1(Name,Pid,Rest++Data1,
+ Pattern,[],EOMod);
{continue,Patterns1,Acc1,Rest} ->
%% Sequence
- teln_expect1(Name,Pid,Rest++Data1,Patterns1,Acc1,EO)
+ teln_expect1(Name,Pid,Rest++Data1,
+ Patterns1,Acc1,EOMod)
end;
{Elapsed,{ok,Data1}} ->
- TVal = trunc(TotalTO - (Elapsed/1000)),
+ TVal = TotalTO - (Elapsed/1000),
if TVal =< 0 ->
{error,timeout};
true ->
@@ -1145,10 +1245,12 @@ teln_expect1(Name,Pid,Data,Pattern,Acc,EO=#eo{idle_timeout=IdleTO,
case NotFinished of
{nomatch,Rest} ->
%% One expect
- teln_expect1(Name,Pid,Rest++Data1,Pattern,[],EO1);
+ teln_expect1(Name,Pid,Rest++Data1,
+ Pattern,[],EO1);
{continue,Patterns1,Acc1,Rest} ->
%% Sequence
- teln_expect1(Name,Pid,Rest++Data1,Patterns1,Acc1,EO1)
+ teln_expect1(Name,Pid,Rest++Data1,
+ Patterns1,Acc1,EO1)
end
end
end
@@ -1165,7 +1267,7 @@ get_data1(Pid) ->
%% 1) Single expect.
%% First the whole data chunk is searched for a prompt (to avoid doing
%% a regexp match for the prompt at each line).
-%% If we are searching for anyting else, the datachunk is split into
+%% If we are searching for anything else, the datachunk is split into
%% lines and each line is matched against each pattern.
%% one_expect: split data chunk at prompts
@@ -1182,7 +1284,7 @@ one_expect(Name,Pid,Data,Pattern,EO) ->
log(name_or_pid(Name,Pid),"PROMPT: ~ts",[PromptType]),
{match,{prompt,PromptType},Rest};
[{prompt,_OtherPromptType}] ->
- %% Only searching for one specific prompt, not thisone
+ %% Only searching for one specific prompt, not this one
log_lines(Name,Pid,UptoPrompt),
{nomatch,Rest};
_ ->
@@ -1429,8 +1531,10 @@ check_for_prompt(Prx,Data) ->
split_lines(String) ->
split_lines(String,[],[]).
-split_lines([$\n|Rest],Line,Lines) ->
+split_lines([$\n|Rest],Line,Lines) when Line /= [] ->
split_lines(Rest,[],[lists:reverse(Line)|Lines]);
+split_lines([$\n|Rest],[],Lines) ->
+ split_lines(Rest,[],Lines);
split_lines([$\r|Rest],Line,Lines) ->
split_lines(Rest,Line,Lines);
split_lines([0|Rest],Line,Lines) ->
diff --git a/lib/common_test/src/ct_telnet_client.erl b/lib/common_test/src/ct_telnet_client.erl
index 36d33522a3..757ccc0aae 100644
--- a/lib/common_test/src/ct_telnet_client.erl
+++ b/lib/common_test/src/ct_telnet_client.erl
@@ -32,14 +32,14 @@
-module(ct_telnet_client).
-%% -define(debug, true).
+%%-define(debug, true).
-export([open/2, open/3, open/4, open/5, close/1]).
-export([send_data/2, send_data/3, get_data/1]).
-define(TELNET_PORT, 23).
-define(OPEN_TIMEOUT,10000).
--define(IDLE_TIMEOUT,10000).
+-define(IDLE_TIMEOUT,8000).
%% telnet control characters
-define(SE, 240).
@@ -111,11 +111,10 @@ get_data(Pid) ->
{ok,Data}
end.
-
%%%-----------------------------------------------------------------
%%% Internal functions
init(Parent, Server, Port, Timeout, KeepAlive, ConnName) ->
- case gen_tcp:connect(Server, Port, [list,{packet,0}], Timeout) of
+ case gen_tcp:connect(Server, Port, [list,{packet,0},{nodelay,true}], Timeout) of
{ok,Sock} ->
dbg("~p connected to: ~p (port: ~w, keep_alive: ~w)\n",
[ConnName,Server,Port,KeepAlive]),
@@ -146,7 +145,7 @@ loop(State, Sock, Acc) ->
ok
end;
{tcp,_,Msg0} ->
- dbg("tcp msg: ~tp~n",[Msg0]),
+ dbg("rcv tcp msg: ~tp~n",[Msg0]),
Msg = check_msg(Sock,Msg0,[]),
loop(State, Sock, [Msg | Acc]);
{send_data,Data} ->
@@ -180,6 +179,7 @@ loop(State, Sock, Acc) ->
NewState =
case State of
#state{keep_alive = true, get_data = 0} ->
+ dbg("sending NOP\n",[]),
if Acc == [] -> send([?IAC,?NOP], Sock,
State#state.conn_name);
true -> ok
@@ -225,15 +225,17 @@ loop(State, Sock, Acc) ->
gen_tcp:close(Sock),
Pid ! closed
after wait(State#state.keep_alive,?IDLE_TIMEOUT) ->
+ dbg("idle timeout\n",[]),
Data = lists:reverse(lists:append(Acc)),
case Data of
[] ->
+ dbg("sending NOP\n",[]),
send([?IAC,?NOP], Sock, State#state.conn_name),
loop(State, Sock, Acc);
_ when State#state.log_pos == length(Data)+1 ->
loop(State, Sock, Acc);
_ ->
- dbg("Idle timeout, printing ~tp\n",[Data]),
+ dbg("idle timeout, printing ~tp\n",[Data]),
Len = length(Data),
ct_telnet:log(State#state.conn_name,
general_io, "~ts",
@@ -391,7 +393,7 @@ cmd_dbg(Prefix,Cmd) ->
end.
timestamp() ->
- {MS,S,US} = now(),
+ {MS,S,US} = os:timestamp(),
{{Year,Month,Day}, {Hour,Min,Sec}} =
calendar:now_to_local_time({MS,S,US}),
MilliSec = trunc(US/1000),
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 10a9bdac67..10c3f2a938 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -27,6 +27,8 @@
collect_tests_from_list/2, collect_tests_from_list/3,
collect_tests_from_file/2, collect_tests_from_file/3]).
+-export([testspec_rec2list/1, testspec_rec2list/2]).
+
-include("ct_util.hrl").
-define(testspec_fields, record_info(fields, testspec)).
@@ -973,7 +975,8 @@ add_tests([Term={Tag,all_nodes,Data}|Ts],Spec) ->
should_be_added(Tag,Node,Data,Spec)],
add_tests(Tests++Ts,Spec);
invalid -> % ignore term
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
%% create one test entry per node in Nodes and reinsert
add_tests([{Tag,[],Data}|Ts],Spec) ->
@@ -1001,7 +1004,8 @@ add_tests([Term={Tag,NodeOrOther,Data}|Ts],Spec) ->
handle_data(Tag,Node,Data,Spec),
add_tests(Ts,mod_field(Spec,Tag,NodeIxData));
invalid -> % ignore term
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
false ->
add_tests([{Tag,all_nodes,{NodeOrOther,Data}}|Ts],Spec)
@@ -1012,13 +1016,15 @@ add_tests([Term={Tag,Data}|Ts],Spec) ->
valid ->
add_tests([{Tag,all_nodes,Data}|Ts],Spec);
invalid ->
- add_tests(Ts,Spec)
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Term]})
end;
%% some other data than a tuple
add_tests([Other|Ts],Spec) ->
case get(relaxed) of
- true ->
- add_tests(Ts,Spec);
+ true ->
+ Unknown = Spec#testspec.unknown,
+ add_tests(Ts,Spec#testspec{unknown=Unknown++[Other]});
false ->
throw({error,{undefined_term_in_spec,Other}})
end;
@@ -1149,6 +1155,24 @@ per_node([N|Ns],Tag,Data,Refs) ->
per_node([],_,_,_) ->
[].
+%% Change the testspec record "back" to a list of tuples
+testspec_rec2list(Rec) ->
+ {Terms,_} = lists:mapfoldl(fun(unknown, Pos) ->
+ {element(Pos, Rec),Pos+1};
+ (F, Pos) ->
+ {{F,element(Pos, Rec)},Pos+1}
+ end,2,?testspec_fields),
+ lists:flatten(Terms).
+
+%% Extract one or more values from a testspec record and
+%% return the result as a list of tuples
+testspec_rec2list(Field, Rec) when is_atom(Field) ->
+ [Term] = testspec_rec2list([Field], Rec),
+ Term;
+testspec_rec2list(Fields, Rec) ->
+ Terms = testspec_rec2list(Rec),
+ [{Field,proplists:get_value(Field, Terms)} || Field <- Fields].
+
%% read the value for FieldName in record Rec#testspec
read_field(Rec, FieldName) ->
catch lists:foldl(fun(F, Pos) when F == FieldName ->
diff --git a/lib/common_test/src/ct_util.hrl b/lib/common_test/src/ct_util.hrl
index 845bb55486..f4cf407856 100644
--- a/lib/common_test/src/ct_util.hrl
+++ b/lib/common_test/src/ct_util.hrl
@@ -55,6 +55,7 @@
create_priv_dir=[],
alias=[],
tests=[],
+ unknown=[],
merge_tests=true}).
-record(cover, {app=none,
diff --git a/lib/common_test/src/ct_webtool.erl b/lib/common_test/src/ct_webtool.erl
new file mode 100644
index 0000000000..b67a7c2a92
--- /dev/null
+++ b/lib/common_test/src/ct_webtool.erl
@@ -0,0 +1,1207 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2010. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ct_webtool).
+-behaviour(gen_server).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% The general idea is: %%
+%% %%
+%% %%
+%% 1. Scan through the path for *.tool files and find all the web %%
+%% based tools. Query each tool for configuration data. %%
+%% 2. Add Alias for Erlscript and html for each tool to %%
+%% the webserver configuration data. %%
+%% 3. Start the webserver. %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% API functions
+-export([start/0, start/2, stop/0]).
+
+%% Starting Webtool from a shell script
+-export([script_start/0, script_start/1]).
+
+%% Web api
+-export([started_tools/2, toolbar/2, start_tools/2, stop_tools/2]).
+
+%% API against other tools
+-export([is_localhost/0]).
+
+%% Debug export s
+-export([get_tools1/1]).
+-export([debug/1, stop_debug/0, debug_app/1]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-include_lib("kernel/include/file.hrl").
+-include_lib("stdlib/include/ms_transform.hrl").
+
+-record(state,{priv_dir,app_data,supvis,web_data,started=[]}).
+
+-define(MAX_NUMBER_OF_WEBTOOLS,256).
+-define(DEFAULT_PORT,8888).% must be >1024 or the user must be root on unix
+-define(DEFAULT_ADDR,{127,0,0,1}).
+
+-define(WEBTOOL_ALIAS,{ct_webtool,[{alias,{erl_alias,"/ct_webtool",[ct_webtool]}}]}).
+-define(HEADER,"Pragma:no-cache\r\n Content-type: text/html\r\n\r\n").
+-define(HTML_HEADER,"<HTML>\r\n<HEAD>\r\n<TITLE>WebTool</TITLE>\r\n</HEAD>\r\n<BODY BGCOLOR=\"#FFFFFF\">\r\n").
+-define(HTML_HEADER_RELOAD,"<HTML>\r\n<HEAD>\r\n<TITLE>WebTool
+ </TITLE>\r\n</HEAD>\r\n
+ <BODY BGCOLOR=\"#FFFFFF\" onLoad=reloadCompiledList()>\r\n").
+
+-define(HTML_END,"</BODY></HTML>").
+
+-define(SEND_URL_TIMEOUT,5000).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% For debugging only. %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Start tracing with
+%% debug(Functions).
+%% Functions = local | global | FunctionList
+%% FunctionList = [Function]
+%% Function = {FunctionName,Arity} | FunctionName |
+%% {Module, FunctionName, Arity} | {Module,FunctionName}
+debug(F) ->
+ ttb:tracer(all,[{file,"webtool.trc"}]), % tracing all nodes
+ ttb:p(all,[call,timestamp]),
+ MS = [{'_',[],[{return_trace},{message,{caller}}]}],
+ tp(F,MS),
+ ttb:ctp(?MODULE,stop_debug), % don't want tracing of the stop_debug func
+ ok.
+tp(local,MS) -> % all functions
+ ttb:tpl(?MODULE,MS);
+tp(global,MS) -> % all exported functions
+ ttb:tp(?MODULE,MS);
+tp([{M,F,A}|T],MS) -> % Other module
+ ttb:tpl(M,F,A,MS),
+ tp(T,MS);
+tp([{M,F}|T],MS) when is_atom(F) -> % Other module
+ ttb:tpl(M,F,MS),
+ tp(T,MS);
+tp([{F,A}|T],MS) -> % function/arity
+ ttb:tpl(?MODULE,F,A,MS),
+ tp(T,MS);
+tp([F|T],MS) -> % function
+ ttb:tpl(?MODULE,F,MS),
+ tp(T,MS);
+tp([],_MS) ->
+ ok.
+stop_debug() ->
+ ttb:stop([format]).
+
+debug_app(Mod) ->
+ ttb:tracer(all,[{file,"webtool_app.trc"},{handler,{fun out/4,true}}]),
+ ttb:p(all,[call,timestamp]),
+ MS = [{'_',[],[{return_trace},{message,{caller}}]}],
+ ttb:tp(Mod,MS),
+ ok.
+
+out(_,{trace_ts,Pid,call,MFA={M,F,A},{W,_,_},TS},_,S)
+ when W==webtool;W==mod_esi->
+ io:format("~w: (~p)~ncall ~s~n", [TS,Pid,ffunc(MFA)]),
+ [{M,F,length(A)}|S];
+out(_,{trace_ts,Pid,return_from,MFA,R,TS},_,[MFA|S]) ->
+ io:format("~w: (~p)~nreturned from ~s -> ~p~n", [TS,Pid,ffunc(MFA),R]),
+ S;
+out(_,_,_,_) ->
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Functions called via script. %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+script_start() ->
+ usage(),
+ halt().
+script_start([App]) ->
+ DefaultBrowser =
+ case os:type() of
+ {win32,_} -> iexplore;
+ _ -> firefox
+ end,
+ script_start([App,DefaultBrowser]);
+script_start([App,Browser]) ->
+ io:format("Starting webtool...\n"),
+ start(),
+ AvailableApps = get_applications(),
+ {OSType,_} = os:type(),
+ case lists:keysearch(App,1,AvailableApps) of
+ {value,{App,StartPage}} ->
+ io:format("Starting ~w...\n",[App]),
+ start_tools([],"app=" ++ atom_to_list(App)),
+ PortStr = integer_to_list(get_port()),
+ Url = case StartPage of
+ "/" ++ Page ->
+ "http://localhost:" ++ PortStr ++ "/" ++ Page;
+ _ ->
+ "http://localhost:" ++ PortStr ++ "/" ++ StartPage
+ end,
+ case Browser of
+ none ->
+ ok;
+ iexplore when OSType == win32->
+ io:format("Starting internet explorer...\n"),
+ {ok,R} = win32reg:open(""),
+ Key="\\local_machine\\SOFTWARE\\Microsoft\\IE Setup\\Setup",
+ win32reg:change_key(R,Key),
+ {ok,Val} = win32reg:value(R,"Path"),
+ IExplore=filename:join(win32reg:expand(Val),"iexplore.exe"),
+ os:cmd("\"" ++ IExplore ++ "\" " ++ Url);
+ _ when OSType == win32 ->
+ io:format("Starting ~w...\n",[Browser]),
+ os:cmd("\"" ++ atom_to_list(Browser) ++ "\" " ++ Url);
+ B when B==firefox; B==mozilla ->
+ io:format("Sending URL to ~w...",[Browser]),
+ BStr = atom_to_list(Browser),
+ SendCmd = BStr ++ " -raise -remote \'openUrl(" ++
+ Url ++ ")\'",
+ Port = open_port({spawn,SendCmd},[exit_status]),
+ receive
+ {Port,{exit_status,0}} ->
+ io:format("done\n"),
+ ok;
+ {Port,{exit_status,_Error}} ->
+ io:format(" not running, starting ~w...\n",
+ [Browser]),
+ os:cmd(BStr ++ " " ++ Url),
+ ok
+ after ?SEND_URL_TIMEOUT ->
+ io:format(" failed, starting ~w...\n",[Browser]),
+ erlang:port_close(Port),
+ os:cmd(BStr ++ " " ++ Url)
+ end;
+ _ ->
+ io:format("Starting ~w...\n",[Browser]),
+ os:cmd(atom_to_list(Browser) ++ " " ++ Url)
+ end,
+ ok;
+ false ->
+ stop(),
+ io:format("\n{error,{unknown_app,~p}}\n",[App]),
+ halt()
+ end.
+
+usage() ->
+ io:format("Starting webtool...\n"),
+ start(),
+ Apps = lists:map(fun({A,_}) -> A end,get_applications()),
+ io:format(
+ "\nUsage: start_webtool application [ browser ]\n"
+ "\nAvailable applications are: ~p\n"
+ "Default browser is \'iexplore\' (Internet Explorer) on Windows "
+ "or else \'firefox\'\n",
+ [Apps]),
+ stop().
+
+
+get_applications() ->
+ gen_server:call(ct_web_tool,get_applications).
+
+get_port() ->
+ gen_server:call(ct_web_tool,get_port).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Api functions to the genserver. %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%
+%----------------------------------------------------------------------
+
+start()->
+ start(standard_path,standard_data).
+
+start(Path,standard_data)->
+ case get_standard_data() of
+ {error,Reason} ->
+ {error,Reason};
+ Data ->
+ start(Path,Data)
+ end;
+
+start(standard_path,Data)->
+ Path=get_path(),
+ start(Path,Data);
+
+start(Path,Port) when is_integer(Port)->
+ Data = get_standard_data(Port),
+ start(Path,Data);
+
+start(Path,Data0)->
+ Data = Data0 ++ rest_of_standard_data(),
+ gen_server:start({local,ct_web_tool},ct_webtool,{Path,Data},[]).
+
+stop()->
+ gen_server:call(ct_web_tool,stoppit).
+
+%----------------------------------------------------------------------
+%Web Api functions called by the web
+%----------------------------------------------------------------------
+started_tools(Env,Input)->
+ gen_server:call(ct_web_tool,{started_tools,Env,Input}).
+
+toolbar(Env,Input)->
+ gen_server:call(ct_web_tool,{toolbar,Env,Input}).
+
+start_tools(Env,Input)->
+ gen_server:call(ct_web_tool,{start_tools,Env,Input}).
+
+stop_tools(Env,Input)->
+ gen_server:call(ct_web_tool,{stop_tools,Env,Input}).
+%----------------------------------------------------------------------
+%Support API for other tools
+%----------------------------------------------------------------------
+
+is_localhost()->
+ gen_server:call(ct_web_tool,is_localhost).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%%The gen_server callback functions that builds the webbpages %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+handle_call(get_applications,_,State)->
+ MS = ets:fun2ms(fun({Tool,{web_data,{_,Start}}}) -> {Tool,Start} end),
+ Tools = ets:select(State#state.app_data,MS),
+ {reply,Tools,State};
+
+handle_call(get_port,_,State)->
+ {value,{port,Port}}=lists:keysearch(port,1,State#state.web_data),
+ {reply,Port,State};
+
+handle_call({started_tools,_Env,_Input},_,State)->
+ {reply,started_tools_page(State),State};
+
+handle_call({toolbar,_Env,_Input},_,State)->
+ {reply,toolbar(),State};
+
+handle_call({start_tools,Env,Input},_,State)->
+ {NewState,Page}=start_tools_page(Env,Input,State),
+ {reply,Page,NewState};
+
+handle_call({stop_tools,Env,Input},_,State)->
+ {NewState,Page}=stop_tools_page(Env,Input,State),
+ {reply,Page,NewState};
+
+handle_call(stoppit,_From,Data)->
+ {stop,normal,ok,Data};
+
+handle_call(is_localhost,_From,Data)->
+ Result=case proplists:get_value(bind_address, Data#state.web_data) of
+ ?DEFAULT_ADDR ->
+ true;
+ _IpNumber ->
+ false
+ end,
+ {reply,Result,Data}.
+
+
+handle_info(_Message,State)->
+ {noreply,State}.
+
+handle_cast(_Request,State)->
+ {noreply,State}.
+
+code_change(_,State,_)->
+ {ok,State}.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% The other functions needed by the gen_server behaviour
+%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+% Start the gen_server
+%----------------------------------------------------------------------
+init({Path,Config})->
+ case filelib:is_dir(Path) of
+ true ->
+ {ok, Table} = get_tool_files_data(),
+ insert_app(?WEBTOOL_ALIAS, Table),
+ case ct_webtool_sup:start_link() of
+ {ok, Pid} ->
+ case start_webserver(Table, Path, Config) of
+ {ok, _} ->
+ print_url(Config),
+ {ok,#state{priv_dir=Path,
+ app_data=Table,
+ supvis=Pid,
+ web_data=Config}};
+ {error, Error} ->
+ {stop, {error, Error}}
+ end;
+ Error ->
+ {stop,Error}
+ end;
+ false ->
+ {stop, {error, error_dir}}
+ end.
+
+terminate(_Reason,Data)->
+ %%shut down the webbserver
+ shutdown_server(Data),
+ %%Shutdown the different tools that are started with application:start
+ shutdown_apps(Data),
+ %%Shutdown the supervisor and its children will die
+ shutdown_supervisor(Data),
+ ok.
+
+print_url(ConfigData)->
+ Server=proplists:get_value(server_name,ConfigData,"undefined"),
+ Port=proplists:get_value(port,ConfigData,"undefined"),
+ {A,B,C,D}=proplists:get_value(bind_address,ConfigData,"undefined"),
+ io:format("WebTool is available at http://~s:~w/~n",[Server,Port]),
+ io:format("Or http://~w.~w.~w.~w:~w/~n",[A,B,C,D,Port]).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%
+% begin build the pages
+%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+%The page that shows the started tools
+%----------------------------------------------------------------------
+started_tools_page(State)->
+ [?HEADER,?HTML_HEADER,started_tools(State),?HTML_END].
+
+toolbar()->
+ [?HEADER,?HTML_HEADER,toolbar_page(),?HTML_END].
+
+
+start_tools_page(_Env,Input,State)->
+ %%io:format("~n======= ~n ~p ~n============~n",[Input]),
+ case get_tools(Input) of
+ {tools,Tools}->
+ %%io:format("~n======= ~n ~p ~n============~n",[Tools]),
+ {ok,NewState}=handle_apps(Tools,State,start),
+ {NewState,[?HEADER,?HTML_HEADER_RELOAD,reload_started_apps(),
+ show_unstarted_apps(NewState),?HTML_END]};
+ _ ->
+ {State,[?HEADER,?HTML_HEADER,show_unstarted_apps(State),?HTML_END]}
+ end.
+
+stop_tools_page(_Env,Input,State)->
+ case get_tools(Input) of
+ {tools,Tools}->
+ {ok,NewState}=handle_apps(Tools,State,stop),
+ {NewState,[?HEADER,?HTML_HEADER_RELOAD,reload_started_apps(),
+ show_started_apps(NewState),?HTML_END]};
+ _ ->
+ {State,[?HEADER,?HTML_HEADER,show_started_apps(State),?HTML_END]}
+ end.
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% Functions that start and config the webserver
+%% 1. Collect the config data
+%% 2. Start webserver
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Start the webserver
+%----------------------------------------------------------------------
+start_webserver(Data,Path,Config)->
+ case get_conf_data(Data,Path,Config) of
+ {ok,Conf_data}->
+ %%io:format("Conf_data: ~p~n",[Conf_data]),
+ start_server(Conf_data);
+ {error,Error} ->
+ {error,{error_server_conf_file,Error}}
+ end.
+
+start_server(Conf_data)->
+ case inets:start(httpd, Conf_data, stand_alone) of
+ {ok,Pid}->
+ {ok,Pid};
+ Error->
+ {error,{server_error,Error}}
+ end.
+
+%----------------------------------------------------------------------
+% Create config data for the webserver
+%----------------------------------------------------------------------
+get_conf_data(Data,Path,Config)->
+ Aliases=get_aliases(Data),
+ ServerRoot = filename:join([Path,"root"]),
+ MimeTypesFile = filename:join([ServerRoot,"conf","mime.types"]),
+ case httpd_conf:load_mime_types(MimeTypesFile) of
+ {ok,MimeTypes} ->
+ Config1 = Config ++ Aliases,
+ Config2 = [{server_root,ServerRoot},
+ {document_root,filename:join([Path,"root/doc"])},
+ {mime_types,MimeTypes} |
+ Config1],
+ {ok,Config2};
+ Error ->
+ Error
+ end.
+
+%----------------------------------------------------------------------
+% Control the path for *.tools files
+%----------------------------------------------------------------------
+get_tool_files_data()->
+ Tools=get_tools1(code:get_path()),
+ %%io:format("Data : ~p ~n",[Tools]),
+ get_file_content(Tools).
+
+%----------------------------------------------------------------------
+%Control that the data in the file really is erlang terms
+%----------------------------------------------------------------------
+get_file_content(Tools)->
+ Get_data=fun({tool,ToolData}) ->
+ %%io:format("Data : ~p ~n",[ToolData]),
+ case proplists:get_value(config_func,ToolData) of
+ {M,F,A}->
+ case catch apply(M,F,A) of
+ {'EXIT',_} ->
+ bad_data;
+ Data when is_tuple(Data) ->
+ Data;
+ _->
+ bad_data
+ end;
+ _ ->
+ bad_data
+ end
+ end,
+ insert_file_content([X ||X<-lists:map(Get_data,Tools),X/=bad_data]).
+
+%----------------------------------------------------------------------
+%Insert the data from the file in to the ets:table
+%----------------------------------------------------------------------
+insert_file_content(Content)->
+ Table=ets:new(app_data,[bag]),
+ lists:foreach(fun(X)->
+ insert_app(X,Table)
+ end,Content),
+ {ok,Table}.
+
+%----------------------------------------------------------------------
+%Control that we got a a tuple of a atom and a list if so add the
+%elements in the list to the ets:table
+%----------------------------------------------------------------------
+insert_app({Name,Key_val_list},Table) when is_list(Key_val_list),is_atom(Name)->
+ %%io:format("ToolData: ~p: ~p~n",[Name,Key_val_list]),
+ lists:foreach(
+ fun({alias,{erl_alias,Alias,Mods}}) ->
+ Key_val = {erl_script_alias,{Alias,Mods}},
+ %%io:format("Insert: ~p~n",[Key_val]),
+ ets:insert(Table,{Name,Key_val});
+ (Key_val_pair)->
+ %%io:format("Insert: ~p~n",[Key_val_pair]),
+ ets:insert(Table,{Name,Key_val_pair})
+ end,
+ Key_val_list);
+
+insert_app(_,_)->
+ ok.
+
+%----------------------------------------------------------------------
+% Select all the alias in the database
+%----------------------------------------------------------------------
+get_aliases(Data)->
+ MS = ets:fun2ms(fun({_,{erl_script_alias,Alias}}) ->
+ {erl_script_alias,Alias};
+ ({_,{alias,Alias}}) ->
+ {alias,Alias}
+ end),
+ ets:select(Data,MS).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Helper functions %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+get_standard_data(Port)->
+ [
+ {port,Port},
+ {bind_address,?DEFAULT_ADDR},
+ {server_name,"localhost"}
+ ].
+
+get_standard_data()->
+ case get_free_port(?DEFAULT_PORT,?MAX_NUMBER_OF_WEBTOOLS) of
+ {error,Reason} -> {error,Reason};
+ Port ->
+ [
+ {port,Port},
+ {bind_address,?DEFAULT_ADDR},
+ {server_name,"localhost"}
+ ]
+ end.
+
+get_free_port(_Port,0) ->
+ {error,no_free_port_found};
+get_free_port(Port,N) ->
+ case gen_tcp:connect("localhost",Port,[]) of
+ {error, _Reason} ->
+ Port;
+ {ok,Sock} ->
+ gen_tcp:close(Sock),
+ get_free_port(Port+1,N-1)
+ end.
+
+rest_of_standard_data() ->
+ [
+ %% Do not allow the server to be crashed by malformed http-request
+ {max_header_siz,1024},
+ {max_header_action,reply414},
+ %% Go on a straight ip-socket
+ {com_type,ip_comm},
+ %% Do not change the order of these module names!!
+ {modules,[mod_alias,
+ mod_auth,
+ mod_esi,
+ mod_actions,
+ mod_cgi,
+ mod_include,
+ mod_dir,
+ mod_get,
+ mod_head,
+ mod_log,
+ mod_disk_log]},
+ {directory_index,["index.html"]},
+ {default_type,"text/plain"}
+ ].
+
+
+get_path()->
+ code:priv_dir(webtool).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+% These functions is used to shutdown the webserver
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% Shut down the webbserver
+%----------------------------------------------------------------------
+shutdown_server(State)->
+ {Addr,Port} = get_addr_and_port(State#state.web_data),
+ inets:stop(httpd,{Addr,Port}).
+
+get_addr_and_port(Config) ->
+ Addr = proplists:get_value(bind_address,Config,?DEFAULT_ADDR),
+ Port = proplists:get_value(port,Config,?DEFAULT_PORT),
+ {Addr,Port}.
+
+%----------------------------------------------------------------------
+% Select all apps in the table and close them
+%----------------------------------------------------------------------
+shutdown_apps(State)->
+ Data=State#state.app_data,
+ MS = ets:fun2ms(fun({_,{start,HowToStart}}) -> HowToStart end),
+ lists:foreach(fun(Start_app)->
+ stop_app(Start_app)
+ end,
+ ets:select(Data,MS)).
+
+%----------------------------------------------------------------------
+%Shuts down the supervisor that supervises tools that is not
+%Designed as applications
+%----------------------------------------------------------------------
+shutdown_supervisor(State)->
+ %io:format("~n==================~n"),
+ ct_webtool_sup:stop(State#state.supvis).
+ %io:format("~n==================~n").
+
+%----------------------------------------------------------------------
+%close the individual apps.
+%----------------------------------------------------------------------
+stop_app({child,_Real_name})->
+ ok;
+
+stop_app({app,Real_name})->
+ application:stop(Real_name);
+
+stop_app({func,_Start,Stop})->
+ case Stop of
+ {M,F,A} ->
+ catch apply(M,F,A);
+ _NoStop ->
+ ok
+ end.
+
+
+
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% These functions creates the webpage where the user can select if
+%% to start apps or to stop apps
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+toolbar_page()->
+ "<TABLE>
+ <TR>
+ <TD>
+ <B>Select Action</B>
+ </TD>
+ </TR>
+ <TR>
+ <TD>
+ <A HREF=\"./start_tools\" TARGET=right> Start Tools</A>
+ </TD>
+ </TR>
+ <TR>
+ <TD>
+ <A HREF=\"./stop_tools\" TARGET=right> Stop Tools</A>
+ </TD>
+ </TR>
+ </TABLE>".
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%% These functions creates the webbpage that shows the started apps
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%----------------------------------------------------------------------
+% started_tools(State)->String (html table)
+% State is a record of type state
+%----------------------------------------------------------------------
+started_tools(State)->
+ Names=get_started_apps(State#state.app_data,State#state.started),
+ "<TABLE BORDER=1 WIDTH=100%>
+ "++ make_rows(Names,[],0) ++"
+ </TABLE>".
+%----------------------------------------------------------------------
+%get_started_apps(Data,Started)-> [{web_name,link}]
+%selects the started apps from the ets table of apps.
+%----------------------------------------------------------------------
+
+get_started_apps(Data,Started)->
+ SelectData=fun({Name,Link}) ->
+ {Name,Link}
+ end,
+ MS = lists:map(fun(A) -> {{A,{web_data,'$1'}},[],['$1']} end,Started),
+
+ [{"WebTool","/tool_management.html"} |
+ [SelectData(X) || X <- ets:select(Data,MS)]].
+
+%----------------------------------------------------------------------
+% make_rows(List,Result,Fields)-> String (The rows of a htmltable
+% List a list of tupler discibed above
+% Result an accumulator for the result
+% Field, counter that counts the number of cols in each row.
+%----------------------------------------------------------------------
+make_rows([],Result,Fields)->
+ Result ++ fill_out(Fields);
+make_rows([Data|Paths],Result,Field)when Field==0->
+ make_rows(Paths,Result ++ "<TR>" ++ make_field(Data),Field+1);
+
+make_rows([Path|Paths],Result,Field)when Field==4->
+ make_rows(Paths,Result ++ make_field(Path) ++ "</TR>",0);
+
+make_rows([Path|Paths],Result,Field)->
+ make_rows(Paths,Result ++ make_field(Path),Field+1).
+
+%----------------------------------------------------------------------
+% make_fields(Path)-> String that is a field i a html table
+% Path is a name url tuple {Name,url}
+%----------------------------------------------------------------------
+make_field(Path)->
+ "<TD WIDTH=20%>" ++ get_name(Path) ++ "</TD>".
+
+
+%----------------------------------------------------------------------
+%get_name({Nae,Url})->String that represents a <A> tag in html.
+%----------------------------------------------------------------------
+get_name({Name,Url})->
+ "<A HREF=\"" ++ Url ++ "\" TARGET=app_frame>" ++ Name ++ "</A>".
+
+
+%----------------------------------------------------------------------
+% fill_out(Nr)-> String, that represent Nr fields in a html-table.
+%----------------------------------------------------------------------
+fill_out(Nr)when Nr==0->
+ [];
+fill_out(Nr)when Nr==4->
+ "<TD WIDTH=\"20%\" >&nbsp</TD></TR>";
+
+fill_out(Nr)->
+ "<TD WIDTH=\"20%\">&nbsp</TD>" ++ fill_out(Nr+1).
+
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%
+%%These functions starts applicatons and builds the page showing tools
+%%to start
+%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------------------------------------
+%Controls whether the user selected a tool to start
+%----------------------------------------------------------------------
+get_tools(Input)->
+ case httpd:parse_query(Input) of
+ []->
+ no_tools;
+ Tools->
+ FormatData=fun({_Name,Data}) -> list_to_atom(Data) end,
+ SelectData=
+ fun({Name,_Data}) -> string:equal(Name,"app") end,
+ {tools,[FormatData(X)||X<-Tools,SelectData(X)]}
+ end.
+
+%----------------------------------------------------------------------
+% Selects the data to start the applications the user has ordered
+% starting of
+%----------------------------------------------------------------------
+handle_apps([],State,_Cmd)->
+ {ok,State};
+
+handle_apps([Tool|Tools],State,Cmd)->
+ case ets:match_object(State#state.app_data,{Tool,{start,'_'}}) of
+ []->
+ Started = case Cmd of
+ start ->
+ [Tool|State#state.started];
+ stop ->
+ lists:delete(Tool,State#state.started)
+ end,
+ {ok,#state{priv_dir=State#state.priv_dir,
+ app_data=State#state.app_data,
+ supvis=State#state.supvis,
+ web_data=State#state.web_data,
+ started=Started}};
+ ToStart ->
+ case handle_apps2(ToStart,State,Cmd) of
+ {ok,NewState}->
+ handle_apps(Tools,NewState,Cmd);
+ _->
+ handle_apps(Tools,State,Cmd)
+ end
+ end.
+
+%----------------------------------------------------------------------
+%execute every start or stop data about a tool.
+%----------------------------------------------------------------------
+handle_apps2([{Name,Start_data}],State,Cmd)->
+ case handle_app({Name,Start_data},State#state.app_data,State#state.supvis,Cmd) of
+ ok->
+ Started = case Cmd of
+ start ->
+ [Name|State#state.started];
+ stop ->
+
+ lists:delete(Name,State#state.started)
+ end,
+ {ok,#state{priv_dir=State#state.priv_dir,
+ app_data=State#state.app_data,
+ supvis=State#state.supvis,
+ web_data=State#state.web_data,
+ started=Started}};
+ _->
+ error
+ end;
+
+handle_apps2([{Name,Start_data}|Rest],State,Cmd)->
+ case handle_app({Name,Start_data},State#state.app_data,State#state.supvis,Cmd)of
+ ok->
+ handle_apps2(Rest,State,Cmd);
+ _->
+ error
+ end.
+
+
+%----------------------------------------------------------------------
+% Handle start and stop of applications
+%----------------------------------------------------------------------
+
+handle_app({Name,{start,{func,Start,Stop}}},Data,_Pid,Cmd)->
+ Action = case Cmd of
+ start ->
+ Start;
+ _ ->
+ Stop
+ end,
+ case Action of
+ {M,F,A} ->
+ case catch apply(M,F,A) of
+ {'EXIT',_} = Exit->
+ %%! Here the tool disappears from the webtool interface!!
+ io:format("\n=======ERROR (webtool, line ~w) =======\n"
+ "Could not start application \'~p\'\n\n"
+ "~w:~w(~s) ->\n"
+ "~p\n\n",
+ [?LINE,Name,M,F,format_args(A),Exit]),
+ ets:delete(Data,Name);
+ _OK->
+ ok
+ end;
+ _NoStart ->
+ ok
+ end;
+
+
+handle_app({Name,{start,{child,ChildSpec}}},Data,Pid,Cmd)->
+ case Cmd of
+ start ->
+ case catch supervisor:start_child(Pid,ChildSpec) of
+ {ok,_}->
+ ok;
+ {ok,_,_}->
+ ok;
+ {error,Reason}->
+ %%! Here the tool disappears from the webtool interface!!
+ io:format("\n=======ERROR (webtool, line ~w) =======\n"
+ "Could not start application \'~p\'\n\n"
+ "supervisor:start_child(~p,~p) ->\n"
+ "~p\n\n",
+ [?LINE,Name,Pid,ChildSpec,{error,Reason}]),
+ ets:delete(Data,Name);
+ Error ->
+ %%! Here the tool disappears from the webtool interface!!
+ io:format("\n=======ERROR (webtool, line ~w) =======\n"
+ "Could not start application \'~p\'\n\n"
+ "supervisor:start_child(~p,~p) ->\n"
+ "~p\n\n",
+ [?LINE,Name,Pid,ChildSpec,Error]),
+ ets:delete(Data,Name)
+ end;
+ stop ->
+ case catch supervisor:terminate_child(websup,element(1,ChildSpec)) of
+ ok ->
+ supervisor:delete_child(websup,element(1,ChildSpec));
+ _ ->
+ error
+ end
+ end;
+
+
+
+handle_app({Name,{start,{app,Real_name}}},Data,_Pid,Cmd)->
+ case Cmd of
+ start ->
+ case application:start(Real_name,temporary) of
+ ok->
+ io:write(Name),
+ ok;
+ {error,{already_started,_}}->
+ %% Remove it from the database so we dont start
+ %% anything already started
+ ets:match_delete(Data,{Name,{start,{app,Real_name}}}),
+ ok;
+ {error,_Reason}=Error->
+ %%! Here the tool disappears from the webtool interface!!
+ io:format("\n=======ERROR (webtool, line ~w) =======\n"
+ "Could not start application \'~p\'\n\n"
+ "application:start(~p,~p) ->\n"
+ "~p\n\n",
+ [?LINE,Name,Real_name,temporary,Error]),
+ ets:delete(Data,Name)
+ end;
+
+ stop ->
+ application:stop(Real_name)
+ end;
+
+%----------------------------------------------------------------------
+% If the data is incorrect delete the app
+%----------------------------------------------------------------------
+handle_app({Name,Incorrect},Data,_Pid,Cmd)->
+ %%! Here the tool disappears from the webtool interface!!
+ io:format("\n=======ERROR (webtool, line ~w) =======\n"
+ "Could not ~w application \'~p\'\n\n"
+ "Incorrect data: ~p\n\n",
+ [?LINE,Cmd,Name,Incorrect]),
+ ets:delete(Data,Name).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% this functions creates the page that shows the unstarted tools %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+reload_started_apps()->
+ "<script>
+ function reloadCompiledList()
+ {
+ parent.parent.top1.document.location.href=\"/webtool/webtool/started_tools\";
+ }
+ </script>".
+
+show_unstarted_apps(State)->
+ "<TABLE HEIGHT=100% WIDTH=100% BORDER=0>
+ <TR HEIGHT=80%><TD ALIGN=\"center\" VALIGN=\"middle\">
+ <FORM NAME=\"stop_apps\" ACTION=\"/webtool/webtool/start_tools\" >
+ <TABLE BORDER=1 WIDTH=60%>
+ <TR BGCOLOR=\"#8899AA\">
+ <TD ALIGN=CENTER COLSPAN=2><FONT SIZE=4>Available Tools<FONT></TD>
+ </TR>
+ <TR>
+ <TD WIDTH=50%>
+ <TABLE BORDER=0>
+ "++ list_available_apps(State)++"
+ <TR><TD COLSPAN=2>&nbsp;</TD></TR>
+ <TR>
+ <TD COLSPAN=2 ALIGN=\"center\">
+ <INPUT TYPE=submit VALUE=\"Start\">
+ </TD>
+ </TR>
+ </TABLE>
+ </TD>
+ <TD>
+ To Start a Tool:
+ <UL>
+ <LI>Select the
+ checkbox for each tool to
+ start.</LI>
+ <LI>Click on the
+ button marked <EM>Start</EM>.</LI></UL>
+ </TD>
+ </TR>
+ </TABLE>
+ </FORM>
+ </TD></TR>
+ <TR><TD>&nbsp;</TD></TR>
+ </TABLE>".
+
+
+
+list_available_apps(State)->
+ MS = ets:fun2ms(fun({Tool,{web_data,{Name,_}}}) -> {Tool,Name} end),
+ Unstarted_apps=
+ lists:filter(
+ fun({Tool,_})->
+ false==lists:member(Tool,State#state.started)
+ end,
+ ets:select(State#state.app_data,MS)),
+ case Unstarted_apps of
+ []->
+ "<TR><TD>All tools are started</TD></TR>";
+ _->
+ list_apps(Unstarted_apps)
+ end.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% these functions creates the page that shows the started apps %%
+%% the user can select to shutdown %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+show_started_apps(State)->
+ "<TABLE HEIGHT=100% WIDTH=100% BORDER=0>
+ <TR HEIGHT=80%><TD ALIGN=\"center\" VALIGN=\"middle\">
+ <FORM NAME=\"stop_apps\" ACTION=\"/webtool/webtool/stop_tools\" >
+ <TABLE BORDER=1 WIDTH=60%>
+ <TR BGCOLOR=\"#8899AA\">
+ <TD ALIGN=CENTER COLSPAN=2><FONT SIZE=4>Started Tools<FONT></TD>
+ </TR>
+ <TR>
+ <TD WIDTH=50%>
+ <TABLE BORDER=0>
+ "++ list_started_apps(State)++"
+ <TR><TD COLSPAN=2>&nbsp;</TD></TR>
+ <TR>
+ <TD COLSPAN=2 ALIGN=\"center\">
+ <INPUT TYPE=submit VALUE=\"Stop\">
+ </TD>
+ </TR>
+ </TABLE>
+ </TD>
+ <TD>
+ Stop a Tool:
+ <UL>
+ <LI>Select the
+ checkbox for each tool to
+ stop.</LI>
+ <LI>Click on the
+ button marked <EM>Stop</EM>.</LI></UL>
+ </TD>
+ </TR>
+ </TABLE>
+ </FORM>
+ </TD></TR>
+ <TR><TD>&nbsp;</TD></TR>
+ </TABLE>".
+
+list_started_apps(State)->
+ MS = lists:map(fun(A) -> {{A,{web_data,{'$1','_'}}},[],[{{A,'$1'}}]} end,
+ State#state.started),
+ Started_apps= ets:select(State#state.app_data,MS),
+ case Started_apps of
+ []->
+ "<TR><TD>No tool is started yet.</TD></TR>";
+ _->
+ list_apps(Started_apps)
+ end.
+
+
+list_apps(Apps) ->
+ lists:map(fun({Tool,Name})->
+ "<TR><TD>
+ <INPUT TYPE=\"checkbox\" NAME=\"app\" VALUE=\""
+ ++ atom_to_list(Tool) ++ "\">
+ " ++ Name ++ "
+ </TD></TR>"
+ end,
+ Apps).
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% %%
+%% Collecting the data from the *.tool files %%
+%% %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%----------------------------------------
+% get_tools(Dirs) => [{M,F,A},{M,F,A}...{M,F,A}]
+% Dirs - [string()] Directory names
+% Calls get_tools2/2 recursively for a number of directories
+% to retireve the configuration data for the web based tools.
+%----------------------------------------
+get_tools1(Dirs)->
+ get_tools1(Dirs,[]).
+
+get_tools1([Dir|Rest],Data) when is_list(Dir) ->
+ Tools=case filename:basename(Dir) of
+ %% Dir is an 'ebin' directory, check in '../priv' as well
+ "ebin" ->
+ [get_tools2(filename:join(filename:dirname(Dir),"priv")) |
+ get_tools2(Dir)];
+ _ ->
+ get_tools2(Dir)
+ end,
+ get_tools1(Rest,[Tools|Data]);
+
+get_tools1([],Data) ->
+ lists:flatten(Data).
+
+%----------------------------------------
+% get_tools2(Directory) => DataList
+% DataList : [WebTuple]|[]
+% WebTuple: {tool,[{web,M,F,A}]}
+%
+%----------------------------------------
+get_tools2(Dir)->
+ get_tools2(tool_files(Dir),[]).
+
+get_tools2([ToolFile|Rest],Data) ->
+ case get_tools3(ToolFile) of
+ {tool,WebData} ->
+ get_tools2(Rest,[{tool,WebData}|Data]);
+ {error,_Reason} ->
+ get_tools2(Rest,Data);
+ nodata ->
+ get_tools2(Rest,Data)
+ end;
+
+get_tools2([],Data) ->
+ Data.
+
+%----------------------------------------
+% get_tools3(ToolFile) => {ok,Tool}|{error,Reason}|nodata
+% Tool: {tool,[KeyValTuple]}
+% ToolFile - string() A .tool file
+% Now we have the file get the data and sort it out
+%----------------------------------------
+get_tools3(ToolFile) ->
+ case file:consult(ToolFile) of
+ {error,open} ->
+ {error,nofile};
+ {error,read} ->
+ {error,format};
+ {ok,[{version,"1.2"},ToolInfo]} when is_list(ToolInfo)->
+ webdata(ToolInfo);
+ {ok,[{version,_Vsn},_Info]} ->
+ {error,old_version};
+ {ok,_Other} ->
+ {error,format}
+ end.
+
+
+%----------------------------------------------------------------------
+% webdata(TupleList)-> ToolTuple| nodata
+% ToolTuple: {tool,[{config_func,{M,F,A}}]}
+%
+% There are a little unneccesary work in this format but it is extendable
+%----------------------------------------------------------------------
+webdata(TupleList)->
+ case proplists:get_value(config_func,TupleList,nodata) of
+ {M,F,A} ->
+ {tool,[{config_func,{M,F,A}}]};
+ _ ->
+ nodata
+ end.
+
+
+%=============================================================================
+% Functions for getting *.tool configuration files
+%=============================================================================
+
+%----------------------------------------
+% tool_files(Dir) => ToolFiles
+% Dir - string() Directory name
+% ToolFiles - [string()]
+% Return the list of all files in Dir ending with .tool (appended to Dir)
+%----------------------------------------
+tool_files(Dir) ->
+ case file:list_dir(Dir) of
+ {ok,Files} ->
+ filter_tool_files(Dir,Files);
+ {error,_Reason} ->
+ []
+ end.
+
+%----------------------------------------
+% filter_tool_files(Dir,Files) => ToolFiles
+% Dir - string() Directory name
+% Files, ToolFiles - [string()] File names
+% Filters out the files in Files ending with .tool and append them to Dir
+%----------------------------------------
+filter_tool_files(_Dir,[]) ->
+ [];
+filter_tool_files(Dir,[File|Rest]) ->
+ case filename:extension(File) of
+ ".tool" ->
+ [filename:join(Dir,File)|filter_tool_files(Dir,Rest)];
+ _ ->
+ filter_tool_files(Dir,Rest)
+ end.
+
+
+%%%-----------------------------------------------------------------
+%%% format functions
+ffunc({M,F,A}) when is_list(A) ->
+ io_lib:format("~w:~w(~s)\n",[M,F,format_args(A)]);
+ffunc({M,F,A}) when is_integer(A) ->
+ io_lib:format("~w:~w/~w\n",[M,F,A]).
+
+format_args([]) ->
+ "";
+format_args(Args) ->
+ Str = lists:append(["~p"|lists:duplicate(length(Args)-1,",~p")]),
+ io_lib:format(Str,Args).
diff --git a/lib/common_test/src/ct_webtool_sup.erl b/lib/common_test/src/ct_webtool_sup.erl
new file mode 100644
index 0000000000..1d612a2d18
--- /dev/null
+++ b/lib/common_test/src/ct_webtool_sup.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ct_webtool_sup).
+
+-behaviour(supervisor).
+
+%% External exports
+-export([start_link/0,stop/1]).
+
+%% supervisor callbacks
+-export([init/1]).
+
+%%%----------------------------------------------------------------------
+%%% API
+%%%----------------------------------------------------------------------
+start_link() ->
+ supervisor:start_link({local,ct_websup},ct_webtool_sup, []).
+
+stop(Pid)->
+ exit(Pid,normal).
+%%%----------------------------------------------------------------------
+%%% Callback functions from supervisor
+%%%----------------------------------------------------------------------
+
+%%----------------------------------------------------------------------
+%% Func: init/1
+%% Returns: {ok, {SupFlags, [ChildSpec]}} |
+%% ignore |
+%% {error, Reason}
+%%----------------------------------------------------------------------
+init(_StartArgs) ->
+ %%Child1 =
+ %%Child2 ={webcover_backend,{webcover_backend,start_link,[]},permanent,2000,worker,[webcover_backend]},
+ %%{ok,{{simple_one_for_one,5,10},[Child1]}}.
+ {ok,{{one_for_one,100,10},[]}}.
+
+%%%----------------------------------------------------------------------
+%%% Internal functions
+%%%----------------------------------------------------------------------
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/lib/common_test/src/cth_surefire.erl b/lib/common_test/src/cth_surefire.erl
index bb12171ea7..3deaefe0e9 100644
--- a/lib/common_test/src/cth_surefire.erl
+++ b/lib/common_test/src/cth_surefire.erl
@@ -59,6 +59,8 @@
-define(default_report,"junit_report.xml").
-define(suite_log,"suite.log.html").
+-define(now, os:timestamp()).
+
%% Number of dirs from log root to testcase log file.
%% ct_run.<node>.<timestamp>/<test_name>/run.<timestamp>/<tc_log>.html
-define(log_depth,3).
@@ -77,11 +79,11 @@ init(Path, Opts) ->
axis = proplists:get_value(axis,Opts,[]),
properties = proplists:get_value(properties,Opts,[]),
url_base = proplists:get_value(url_base,Opts),
- timer = now() }.
+ timer = ?now }.
pre_init_per_suite(Suite,SkipOrFail,State) when is_tuple(SkipOrFail) ->
{SkipOrFail, init_tc(State#state{curr_suite = Suite,
- curr_suite_ts = now()},
+ curr_suite_ts = ?now},
SkipOrFail) };
pre_init_per_suite(Suite,Config,#state{ test_cases = [] } = State) ->
TcLog = proplists:get_value(tc_logfile,Config),
@@ -96,7 +98,7 @@ pre_init_per_suite(Suite,Config,#state{ test_cases = [] } = State) ->
end,
{Config, init_tc(State#state{ filepath = Path,
curr_suite = Suite,
- curr_suite_ts = now(),
+ curr_suite_ts = ?now,
curr_log_dir = CurrLogDir},
Config) };
pre_init_per_suite(Suite,Config,State) ->
@@ -169,9 +171,9 @@ do_tc_skip(Res, State) ->
State#state{ test_cases = [NewTC | tl(TCs)]}.
init_tc(State, Config) when is_list(Config) == false ->
- State#state{ timer = now(), tc_log = "" };
+ State#state{ timer = ?now, tc_log = "" };
init_tc(State, Config) ->
- State#state{ timer = now(),
+ State#state{ timer = ?now,
tc_log = proplists:get_value(tc_logfile, Config, [])}.
end_tc(Func, Config, Res, State) when is_atom(Func) ->
@@ -194,7 +196,7 @@ end_tc(Name, _Config, _Res, State = #state{ curr_suite = Suite,
ClassName = atom_to_list(Suite),
PGroup = string:join([ atom_to_list(Group)||
Group <- lists:reverse(Groups)],"."),
- TimeTakes = io_lib:format("~f",[timer:now_diff(now(),TS) / 1000000]),
+ TimeTakes = io_lib:format("~f",[timer:now_diff(?now,TS) / 1000000]),
State#state{ test_cases = [#testcase{ log = Log,
url = Url,
timestamp = now_to_string(TS),
@@ -209,7 +211,7 @@ close_suite(#state{ test_cases = [] } = State) ->
State;
close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) ->
{Total,Fail,Skip} = count_tcs(TCs,0,0,0),
- TimeTaken = timer:now_diff(now(),State#state.curr_suite_ts) / 1000000,
+ TimeTaken = timer:now_diff(?now,State#state.curr_suite_ts) / 1000000,
SuiteLog = filename:join(State#state.curr_log_dir,?suite_log),
SuiteUrl = make_url(UrlBase,SuiteLog),
Suite = #testsuite{ name = atom_to_list(State#state.curr_suite),
diff --git a/lib/common_test/src/vts.erl b/lib/common_test/src/vts.erl
index b340c6fdd1..ab13e7d0ee 100644
--- a/lib/common_test/src/vts.erl
+++ b/lib/common_test/src/vts.erl
@@ -63,21 +63,21 @@
%%%-----------------------------------------------------------------
%%% User API
start() ->
- webtool:start(),
- webtool:start_tools([],"app=vts").
+ ct_webtool:start(),
+ ct_webtool:start_tools([],"app=vts").
init_data(ConfigFiles,EvHandlers,LogDir,LogOpts,Tests) ->
call({init_data,ConfigFiles,EvHandlers,LogDir,LogOpts,Tests}).
stop() ->
- webtool:stop_tools([],"app=vts"),
- webtool:stop().
+ ct_webtool:stop_tools([],"app=vts"),
+ ct_webtool:stop().
report(What,Data) ->
call({report,What,Data}).
%%%-----------------------------------------------------------------
-%%% Return config data used by webtool
+%%% Return config data used by ct_webtool
config_data() ->
{ok,LogDir} =
case lists:keysearch(logdir,1,init:get_arguments()) of
diff --git a/lib/common_test/test/ct_auto_compile_SUITE.erl b/lib/common_test/test/ct_auto_compile_SUITE.erl
index cc546ed30d..3e4da31ab4 100644
--- a/lib/common_test/test/ct_auto_compile_SUITE.erl
+++ b/lib/common_test/test/ct_auto_compile_SUITE.erl
@@ -108,6 +108,8 @@ ac_spec(Config) when is_list(Config) ->
PrivDir = ?config(priv_dir, Config),
file:copy(filename:join(DataDir, "bad_SUITE.erl"),
filename:join(PrivDir, "bad_SUITE.erl")),
+ Suite = filename:join(DataDir, "dummy_SUITE"),
+ compile:file(Suite, [{outdir,PrivDir}]),
TestSpec = [{label,ac_spec},
{auto_compile,false},
{suites,PrivDir,all}],
@@ -160,28 +162,34 @@ events_to_check(Test, N) ->
test_events(ac_flag) ->
[
- {ct_test_support_eh,start_logging,{'DEF','RUNDIR'}},
- {ct_test_support_eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {ct_test_support_eh,start_info,{1,1,3}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,init_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
- {ct_test_support_eh,test_stats,{1,1,{1,0}}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,end_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
- {ct_test_support_eh,test_done,{'DEF','STOP_TIME'}},
- {ct_test_support_eh,stop_logging,[]}
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,start_info,{1,1,3}},
+ {?eh,tc_start,{ct_framework,error_in_suite}},
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'bad_SUITE can not be compiled or loaded'}}}},
+ {?eh,tc_start,{dummy_SUITE,init_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
+ {?eh,test_stats,{1,1,{1,0}}},
+ {?eh,tc_start,{dummy_SUITE,end_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,stop_logging,[]}
];
test_events(ac_spec) ->
[
- {ct_test_support_eh,start_logging,{'DEF','RUNDIR'}},
- {ct_test_support_eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {ct_test_support_eh,start_info,{1,1,3}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,init_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
- {ct_test_support_eh,test_stats,{1,1,{1,0}}},
- {ct_test_support_eh,tc_start,{dummy_SUITE,end_per_suite}},
- {ct_test_support_eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
- {ct_test_support_eh,test_done,{'DEF','STOP_TIME'}},
- {ct_test_support_eh,stop_logging,[]}
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,start_info,{1,1,3}},
+ {?eh,tc_start,{ct_framework,error_in_suite}},
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'bad_SUITE can not be compiled or loaded'}}}},
+ {?eh,tc_start,{dummy_SUITE,init_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,init_per_suite,ok}},
+ {?eh,test_stats,{1,1,{1,0}}},
+ {?eh,tc_start,{dummy_SUITE,end_per_suite}},
+ {?eh,tc_done,{dummy_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,stop_logging,[]}
].
diff --git a/lib/common_test/test/ct_config_SUITE_data/config/test/config_dynamic_SUITE.erl b/lib/common_test/test/ct_config_SUITE_data/config/test/config_dynamic_SUITE.erl
index 8ee12a2e4d..ef1fd63905 100644
--- a/lib/common_test/test/ct_config_SUITE_data/config/test/config_dynamic_SUITE.erl
+++ b/lib/common_test/test/ct_config_SUITE_data/config/test/config_dynamic_SUITE.erl
@@ -35,7 +35,7 @@
%% which will return the list with the following variables:
%% localtime = the erlang:localtime() result in list [{date, Date}, {time, Time}]
%% node = erlang:node() - can be compared in the testcase
-%% now = erlang:now() - easier to compare than localtime()
+%% now = os:timestamp() - easier to compare than localtime()
%% config_server_pid - pid of the config server, should NOT change!
%% config_server_vsn - .19
%% config_server_iteration - a number of iteration config_server's loop done
@@ -73,7 +73,7 @@ test_get_known_variable(_)->
test_localtime_update(_)->
Seconds = 5,
LT1 = ct:get_config(localtime),
- timer:sleep(Seconds*1000),
+ ct:sleep(Seconds*1000),
LT2 = ct:reload_config(localtime),
case is_diff_ok(LT1, LT2, Seconds) of
{false, Actual, Exp}->
diff --git a/lib/common_test/test/ct_config_SUITE_data/config/test/config_server.erl b/lib/common_test/test/ct_config_SUITE_data/config/test/config_server.erl
index 8463fea645..e65d6584b1 100644
--- a/lib/common_test/test/ct_config_SUITE_data/config/test/config_server.erl
+++ b/lib/common_test/test/ct_config_SUITE_data/config/test/config_server.erl
@@ -73,7 +73,7 @@ loop(Iteration)->
[{localtime, [{date, D}, {time, T}]},
{node, erlang:node()},
{config_server_iteration, Iteration},
- {now, erlang:now()},
+ {now, os:timestamp()},
{config_server_pid, self()},
{config_server_vsn, ?vsn}],
Config2 = if Iteration rem 2 == 0->
diff --git a/lib/common_test/test/ct_cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE.erl
index 87ba4ae1b9..1dab425509 100644
--- a/lib/common_test/test/ct_cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE.erl
@@ -77,7 +77,11 @@ all() ->
ct_cover_add_remove_nodes,
otp_9956,
cross,
- export_import
+ export_import,
+ relative_incl_dirs,
+ absolute_incl_dirs,
+ relative_excl_dirs,
+ absolute_excl_dirs
].
%%--------------------------------------------------------------------
@@ -215,6 +219,45 @@ export_import(Config) ->
check_calls(Events2,2),
ok.
+relative_incl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = [{incl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+absolute_incl_dirs(Config) ->
+ false = check_cover(Config),
+ CoverSpec = [{incl_dirs, [?config(data_dir, Config)]}],
+ CoverFile = create_cover_file(abs_incl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_incl_dirs, default, Opts, Config),
+ check_calls(Events, 1),
+ ok.
+
+relative_excl_dirs(Config) ->
+ false = check_cover(Config),
+ RelDir = rel_path(?config(priv_dir, Config), ?config(data_dir, Config)),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [RelDir]}],
+ CoverFile = create_cover_file(rel_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(rel_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
+absolute_excl_dirs(Config) ->
+ false = check_cover(Config),
+ AbsDir = ?config(data_dir, Config),
+ CoverSpec = default_cover_file_content() ++ [{excl_dirs, [AbsDir]}],
+ CoverFile = create_cover_file(abs_excl_dirs, CoverSpec, Config),
+ Opts = [{cover, CoverFile}],
+ {ok, Events} = run_test(abs_excl_dirs, default_no_cover, Opts, Config),
+ check_no_cover_compiled(Events),
+ ok.
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
@@ -288,23 +331,36 @@ get_log_dirs(Events) ->
{ct_test_support_eh,
{event,start_logging,_Node,LogDir}} <- Events].
+%% Check if a module was compiled without cover
+check_no_cover_compiled(Events) ->
+ check_no_cover_compiled(Events, ?mod).
+check_no_cover_compiled(Events, Mod) ->
+ [ {error, {not_cover_compiled, Mod}} = analyse_log(CoverLog, Mod)
+ || CoverLog <- cover_logs(Events) ].
+
%% Check that each coverlog includes N calls to ?mod:foo/0
check_calls(Events,N) ->
check_calls(Events,{?mod,foo,0},N).
check_calls(Events,MFA,N) ->
- CoverLogs = [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)],
- do_check_logs(CoverLogs,MFA,N).
+ do_check_logs(cover_logs(Events),MFA,N).
do_check_logs([CoverLog|CoverLogs],{Mod,_,_} = MFA,N) ->
- {ok,_} = cover:start(),
- ok = cover:import(CoverLog),
- {ok,Calls} = cover:analyse(Mod,calls,function),
- ok = cover:stop(),
+ {ok, Calls} = analyse_log(CoverLog, Mod),
{MFA,N} = lists:keyfind(MFA,1,Calls),
do_check_logs(CoverLogs,MFA,N);
do_check_logs([],_,_) ->
ok.
+cover_logs(Events) ->
+ [filename:join(D,"all.coverdata") || D <- get_log_dirs(Events)].
+
+analyse_log(CoverLog, Mod) ->
+ {ok, _} = cover:start(),
+ ok = cover:import(CoverLog),
+ Result = cover:analyse(Mod, calls, function),
+ ok = cover:stop(),
+ Result.
+
fullname(Name) ->
{ok,Host} = inet:gethostname(),
list_to_atom(atom_to_list(Name) ++ "@" ++ Host).
@@ -333,3 +389,12 @@ start_slave(Name,Args) ->
{boot_timeout,10}, % extending some timers for slow test hosts
{init_timeout,10},
{startup_timeout,10}]).
+
+rel_path(From, To) ->
+ Segments = do_rel_path(filename:split(From), filename:split(To)),
+ filename:join(Segments).
+
+do_rel_path([Seg|RestA], [Seg|RestB]) ->
+ do_rel_path(RestA, RestB);
+do_rel_path(PathA, PathB) ->
+ lists:duplicate(length(PathA), "..") ++ PathB.
diff --git a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
index 83d368c53d..789e48bd96 100644
--- a/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
+++ b/lib/common_test/test/ct_cover_SUITE_data/cover_SUITE.erl
@@ -71,6 +71,10 @@ default(_Config) ->
cover_test_mod:foo(),
ok.
+default_no_cover(_Config) ->
+ cover_test_mod:foo(),
+ ok.
+
slave(_Config) ->
cover_compiled = code:which(cover_test_mod),
cover_test_mod:foo(),
diff --git a/lib/common_test/test/ct_error_SUITE.erl b/lib/common_test/test/ct_error_SUITE.erl
index ecf231529a..8464225284 100644
--- a/lib/common_test/test/ct_error_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE.erl
@@ -1466,7 +1466,8 @@ test_events(misc_errors) ->
{failed,{error,{suite_failed,this_is_expected}}}}},
{?eh,test_stats,{0,5,{0,0}}},
{?eh,tc_start,{misc_error_1_SUITE,killed_by_signal_1}},
- {?eh,tc_done,{misc_error_1_SUITE,killed_by_signal_1,i_die_now}},
+ {?eh,tc_done,{misc_error_1_SUITE,killed_by_signal_1,
+ {failed,{'EXIT',i_die_now}}}},
{?eh,test_stats,{0,6,{0,0}}},
{?eh,tc_start,{misc_error_1_SUITE,killed_by_signal_2}},
{?eh,tc_done,{misc_error_1_SUITE,killed_by_signal_2,
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_12_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_12_SUITE.erl
index 806d3caf72..0ff8659269 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_12_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_12_SUITE.erl
@@ -26,10 +26,10 @@ init_per_testcase(_, Config) ->
Config.
end_per_testcase(tc2, _Config) ->
- timer:sleep(2000),
+ ct:sleep(2000),
exit(this_should_not_be_printed);
end_per_testcase(tc4, _Config) ->
- timer:sleep(2000),
+ ct:sleep(2000),
exit(this_should_not_be_printed);
end_per_testcase(_, _) ->
ok.
@@ -42,7 +42,7 @@ tc1() ->
put('$test_server_framework_test',
fun(init_tc, _Default) ->
ct:pal("init_tc(~p): Night time...",[self()]),
- timer:sleep(2000),
+ ct:sleep(2000),
ct:pal("init_tc(~p): Day time!",[self()]),
exit(this_should_not_be_printed);
(_, Default) -> Default
@@ -67,7 +67,7 @@ tc3(_) ->
put('$test_server_framework_test',
fun(end_tc, _Default) ->
ct:pal("end_tc(~p): Night time...",[self()]),
- timer:sleep(1000),
+ ct:sleep(1000),
ct:pal("end_tc(~p): Day time!",[self()]);
(_, Default) -> Default
end),
@@ -78,7 +78,7 @@ tc4() ->
put('$test_server_framework_test',
fun(end_tc, _Default) ->
ct:pal("end_tc(~p): Night time...",[self()]),
- timer:sleep(1000),
+ ct:sleep(1000),
ct:pal("end_tc(~p): Day time!",[self()]);
(_, Default) -> Default
end),
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_13_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_13_SUITE.erl
index c8a3c1d15e..cfc0babb68 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_13_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_13_SUITE.erl
@@ -26,7 +26,7 @@ init_per_suite() ->
put('$test_server_framework_test',
fun(end_tc, _Default) ->
ct:pal("end_tc(~p): Night time...",[self()]),
- timer:sleep(1000),
+ ct:sleep(1000),
ct:pal("end_tc(~p): Day time!",[self()]);
(_, Default) -> Default
end),
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_14_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_14_SUITE.erl
index 960d0f61b0..54b09e78c6 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_14_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_14_SUITE.erl
@@ -29,7 +29,7 @@ end_per_suite() ->
put('$test_server_framework_test',
fun(end_tc, _Default) ->
ct:pal("end_tc(~p): Night time...",[self()]),
- timer:sleep(1000),
+ ct:sleep(1000),
ct:pal("end_tc(~p): Day time!",[self()]);
(_, Default) -> Default
end),
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
index 08c57887ef..0d93e46501 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_3_SUITE.erl
@@ -36,7 +36,7 @@ suite() ->
%% Reason = term()
%%--------------------------------------------------------------------
init_per_suite(Config) ->
- timer:sleep(5000),
+ ct:sleep(5000),
exit(shouldnt_happen).
% Config.
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_7_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_7_SUITE.erl
index 9cd5b6ad29..d95f3b235b 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_7_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_7_SUITE.erl
@@ -43,7 +43,7 @@ init_per_suite(Config) ->
%% Config0 = Config1 = [tuple()]
%%--------------------------------------------------------------------
end_per_suite(Config) ->
- timer:sleep(5000),
+ ct:sleep(5000),
ok.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_8_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_8_SUITE.erl
index 25993833d7..d8f0c48034 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_8_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/cfg_error_8_SUITE.erl
@@ -57,7 +57,7 @@ init_per_group(g1, Config) ->
Config;
init_per_group(g2, Config) ->
ct:comment("init_per_group(g2) timeout"),
- timer:sleep(5000),
+ ct:sleep(5000),
Config;
init_per_group(g3, _Config) ->
badmatch = 42;
@@ -80,7 +80,7 @@ end_per_group(g11, _Config) ->
ok;
end_per_group(g12, _Config) ->
ct:comment("end_per_group(g6) timeout"),
- timer:sleep(5000),
+ ct:sleep(5000),
ok;
end_per_group(_GroupName, _Config) ->
ok.
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_1_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_1_SUITE.erl
index a98382965f..1451a4119e 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_1_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_1_SUITE.erl
@@ -111,7 +111,7 @@ end_per_testcase1(tc2, Config) ->
ct:pal("end_per_testcase(tc2): ~p", [Config]),
tc2 = ?config(tc, Config),
{failed,timetrap_timeout} = ?config(tc_status, Config),
- timer:sleep(2000);
+ ct:sleep(2000);
end_per_testcase1(tc3, Config) ->
ct:pal("end_per_testcase(tc3): ~p", [Config]),
@@ -123,7 +123,7 @@ end_per_testcase1(tc4, Config) ->
ct:pal("end_per_testcase(tc4): ~p", [Config]),
tc4 = ?config(tc, Config),
{failed,{testcase_aborted,testing_end_conf}} = ?config(tc_status, Config),
- timer:sleep(2000);
+ ct:sleep(2000);
end_per_testcase1(tc5, Config) ->
ct:pal("end_per_testcase(tc5): ~p", [Config]),
@@ -182,29 +182,29 @@ all() ->
[tc1, tc2, tc3, tc4, tc5, tc6, tc7, tc8, tc9].
tc1(_) ->
- timer:sleep(2000),
+ ct:sleep(2000),
ok.
tc2(_) ->
- timer:sleep(2000).
+ ct:sleep(2000).
tc3(_) ->
spawn(ct, abort_current_testcase, [testing_end_conf]),
- timer:sleep(2000),
+ ct:sleep(2000),
ok.
tc4(_) ->
spawn(ct, abort_current_testcase, [testing_end_conf]),
- timer:sleep(2000),
+ ct:sleep(2000),
ok.
tc5(_) ->
- timer:sleep(2000),
+ ct:sleep(2000),
ok.
tc6(_) ->
spawn(ct, abort_current_testcase, [testing_end_conf]),
- timer:sleep(2000).
+ ct:sleep(2000).
tc7(_) ->
sleep(2000),
@@ -220,5 +220,5 @@ tc9(_) ->
%%%-----------------------------------------------------------------
sleep(T) ->
- timer:sleep(T),
+ ct:sleep(T),
ok.
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_2_SUITE.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_2_SUITE.erl
index a77d06815e..d926fc55a4 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_2_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_2_SUITE.erl
@@ -141,12 +141,13 @@ tc3() ->
[{timetrap,{seconds,2}}].
tc3(_) ->
- T0 = now(),
+ T0 = erlang:monotonic_time(),
ct:timetrap(infinity),
N = list_to_integer(ct:get_config(multiply)),
ct:comment(io_lib:format("Sleeping for ~w sec...", [4*N])),
ct:sleep(4000),
- Diff = timer:now_diff(now(), T0),
+ T1 = erlang:monotonic_time(),
+ Diff = erlang:convert_time_unit(T1-T0, native, micro_seconds),
if ((Diff < (N*4000000)) or (Diff > (N*4500000))) ->
exit(not_expected);
true ->
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_helper.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_helper.erl
index 1389acca11..a9ea0be847 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_helper.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/timetrap_helper.erl
@@ -3,5 +3,5 @@
-export([sleep/1]).
sleep(T) ->
- timer:sleep(T),
+ ct:sleep(T),
ok.
diff --git a/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl b/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl
index 446dd8bfdf..d5b3e0035a 100644
--- a/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl
+++ b/lib/common_test/test/ct_error_SUITE_data/error/test/verify_config.erl
@@ -81,7 +81,7 @@ init(Id, Opts) ->
-spec id(Opts :: proplists:proplist()) ->
Id :: term().
id(Opts) ->
- now().
+ os:timestamp().
%% @doc Called before init_per_suite is called. Note that this callback is
%% only called if the CTH is added before init_per_suite is run (eg. in a test
diff --git a/lib/common_test/test/ct_event_handler_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE.erl
index b534a7141d..b759424e46 100644
--- a/lib/common_test/test/ct_event_handler_SUITE.erl
+++ b/lib/common_test/test/ct_event_handler_SUITE.erl
@@ -29,6 +29,7 @@
-compile(export_all).
-include_lib("common_test/include/ct.hrl").
+-include_lib("common_test/src/ct_util.hrl").
%-include_lib("common_test/include/ct_event.hrl").
@@ -59,7 +60,7 @@ end_per_testcase(TestCase, Config) ->
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [start_stop, results].
+ [start_stop, results, event_mgrs].
groups() ->
[].
@@ -156,18 +157,28 @@ results(Config) when is_list(Config) ->
TestEvents =
[{eh_A,start_logging,{'DEF','RUNDIR'}},
{eh_A,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
- {eh_A,start_info,{1,1,3}},
+ {eh_A,start_info,{1,1,5}},
{eh_A,tc_start,{eh_11_SUITE,init_per_suite}},
{eh_A,tc_done,{eh_11_SUITE,init_per_suite,ok}},
- {eh_A,tc_start,{eh_11_SUITE,tc1}},
- {eh_A,tc_done,{eh_11_SUITE,tc1,ok}},
- {eh_A,test_stats,{1,0,{0,0}}},
- {eh_A,tc_start,{eh_11_SUITE,tc2}},
- {eh_A,tc_done,{eh_11_SUITE,tc2,{skipped,"Skipped"}}},
- {eh_A,test_stats,{1,0,{1,0}}},
- {eh_A,tc_start,{eh_11_SUITE,tc3}},
- {eh_A,tc_done,{eh_11_SUITE,tc3,{failed,{error,'Failing'}}}},
- {eh_A,test_stats,{1,1,{1,0}}},
+ [{eh_A,tc_start,{eh_11_SUITE,{init_per_group,g1,[]}}},
+ {eh_A,tc_done,{eh_11_SUITE,{init_per_group,g1,[]},ok}},
+ {eh_A,tc_start,{eh_11_SUITE,tc1}},
+ {eh_A,tc_done,{eh_11_SUITE,tc1,ok}},
+ {eh_A,test_stats,{1,0,{0,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,tc2}},
+ {eh_A,tc_done,{eh_11_SUITE,tc2,ok}},
+ {eh_A,test_stats,{2,0,{0,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,tc3}},
+ {eh_A,tc_done,{eh_11_SUITE,tc3,{skipped,"Skip"}}},
+ {eh_A,test_stats,{2,0,{1,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,tc4}},
+ {eh_A,tc_done,{eh_11_SUITE,tc4,{skipped,"Skipped"}}},
+ {eh_A,test_stats,{2,0,{2,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,tc5}},
+ {eh_A,tc_done,{eh_11_SUITE,tc5,{failed,{error,'Failing'}}}},
+ {eh_A,test_stats,{2,1,{2,0}}},
+ {eh_A,tc_start,{eh_11_SUITE,{end_per_group,g1,[]}}},
+ {eh_A,tc_done,{eh_11_SUITE,{end_per_group,g1,[]},ok}}],
{eh_A,tc_start,{eh_11_SUITE,end_per_suite}},
{eh_A,tc_done,{eh_11_SUITE,end_per_suite,ok}},
{eh_A,test_done,{'DEF','STOP_TIME'}},
@@ -176,5 +187,10 @@ results(Config) when is_list(Config) ->
ok = ct_test_support:verify_events(TestEvents++TestEvents, Events, Config).
+event_mgrs(_) ->
+ ?CT_EVMGR_REF = ct:get_event_mgr_ref(),
+ ?CT_MEVMGR_REF = ct_master:get_event_mgr_ref().
+
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
diff --git a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
index 16b7129993..14ea12d579 100644
--- a/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
+++ b/lib/common_test/test/ct_event_handler_SUITE_data/event_handling_1/test/eh_11_SUITE.erl
@@ -32,100 +32,36 @@
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
-%%--------------------------------------------------------------------
-%% Function: suite() -> Info
-%%
-%% Info = [tuple()]
-%% List of key/value pairs.
-%%
-%% Description: Returns list of tuples to set default properties
-%% for the suite.
-%%
-%% Note: The suite/0 function is only meant to be used to return
-%% default data values, not perform any other operations.
-%%--------------------------------------------------------------------
suite() ->
[
{timetrap,{seconds,10}}
].
-%%--------------------------------------------------------------------
-%% Function: init_per_suite(Config0) ->
-%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
-%%
-%% Config0 = Config1 = [tuple()]
-%% A list of key/value pairs, holding the test case configuration.
-%% Reason = term()
-%% The reason for skipping the suite.
-%%
-%% Description: Initialization before the suite.
-%%
-%% Note: This function is free to add any key/value pairs to the Config
-%% variable, but should NOT alter/remove any existing entries.
-%%--------------------------------------------------------------------
init_per_suite(Config) ->
Config.
-%%--------------------------------------------------------------------
-%% Function: end_per_suite(Config0) -> void() | {save_config,Config1}
-%%
-%% Config0 = Config1 = [tuple()]
-%% A list of key/value pairs, holding the test case configuration.
-%%
-%% Description: Cleanup after the suite.
-%%--------------------------------------------------------------------
end_per_suite(_Config) ->
- ok.
+ %% should report ok as result to event handler
+ done.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ %% should report ok as result to event handler
+ void.
-%%--------------------------------------------------------------------
-%% Function: init_per_testcase(TestCase, Config0) ->
-%% Config1 | {skip,Reason} | {skip_and_save,Reason,Config1}
-%%
-%% TestCase = atom()
-%% Name of the test case that is about to run.
-%% Config0 = Config1 = [tuple()]
-%% A list of key/value pairs, holding the test case configuration.
-%% Reason = term()
-%% The reason for skipping the test case.
-%%
-%% Description: Initialization before each test case.
-%%
-%% Note: This function is free to add any key/value pairs to the Config
-%% variable, but should NOT alter/remove any existing entries.
-%%--------------------------------------------------------------------
init_per_testcase(_TestCase, Config) ->
Config.
-%%--------------------------------------------------------------------
-%% Function: end_per_testcase(TestCase, Config0) ->
-%% void() | {save_config,Config1}
-%%
-%% TestCase = atom()
-%% Name of the test case that is finished.
-%% Config0 = Config1 = [tuple()]
-%% A list of key/value pairs, holding the test case configuration.
-%%
-%% Description: Cleanup after each test case.
-%%--------------------------------------------------------------------
end_per_testcase(_TestCase, _Config) ->
- ok.
+ true.
-%%--------------------------------------------------------------------
-%% Function: all() -> TestCases | {skip,Reason}
-%%
-%% TestCases = [TestCase | {sequence,SeqName}]
-%% TestCase = atom()
-%% Name of a test case.
-%% SeqName = atom()
-%% Name of a test case sequence.
-%% Reason = term()
-%% The reason for skipping all test cases.
-%%
-%% Description: Returns the list of test cases that are to be executed.
-%%--------------------------------------------------------------------
-all() ->
- [tc1, tc2, tc3].
+groups() ->
+ [{g1, [], [tc1, tc2, tc3, tc4, tc5]}].
+all() ->
+ [{group,g1}].
%%--------------------------------------------------------------------
%% TEST CASES
@@ -134,8 +70,15 @@ all() ->
tc1(_Config) ->
ok.
-tc2(_Config) ->
- {skip,"Skipped"}.
+tc2(_Config) ->
+ %% should report ok as result to event handler
+ 42.
+
+tc3(_Config) ->
+ {skip,"Skip"}.
+
+tc4(_Config) ->
+ {skipped,"Skipped"}.
-tc3(_Config) ->
+tc5(_Config) ->
exit('Failing').
diff --git a/lib/common_test/test/ct_gen_conn_SUITE_data/conn_SUITE.erl b/lib/common_test/test/ct_gen_conn_SUITE_data/conn_SUITE.erl
index 1344878675..96dd80e4e8 100644
--- a/lib/common_test/test/ct_gen_conn_SUITE_data/conn_SUITE.erl
+++ b/lib/common_test/test/ct_gen_conn_SUITE_data/conn_SUITE.erl
@@ -73,23 +73,23 @@ handles_to_multi_conn_pids(_Config) ->
{true,true} = {is_process_alive(Handle3),is_process_alive(ConnPid3)},
ok = proto:close(Handle1),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle1),is_process_alive(ConnPid1)},
{true,true} = {is_process_alive(Handle2),is_process_alive(ConnPid2)},
ok = proto:kill_conn_proc(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
{true,false} = {is_process_alive(Handle2),is_process_alive(ConnPid2)},
ConnPid2x = ct_gen_conn:get_conn_pid(Handle2),
true = is_process_alive(ConnPid2x),
ok = proto:close(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle2),is_process_alive(ConnPid2x)},
application:set_env(ct_test, reconnect, false),
ok = proto:kill_conn_proc(Handle3),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle3),is_process_alive(ConnPid3)},
ok.
@@ -116,23 +116,23 @@ handles_to_single_conn_pids(_Config) ->
ct:pal("CONNS = ~n~p", [Conns]),
ok = proto:close(Handle1),
- timer:sleep(100),
+ ct:sleep(100),
{false,true} = {is_process_alive(Handle1),is_process_alive(ConnPid)},
ok = proto:kill_conn_proc(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
NewConnPid = ct_gen_conn:get_conn_pid(Handle2),
NewConnPid = ct_gen_conn:get_conn_pid(Handle3),
true = is_process_alive(Handle2),
true = is_process_alive(Handle3),
ok = proto:close(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
{false,true} = {is_process_alive(Handle2),is_process_alive(NewConnPid)},
application:set_env(ct_test, reconnect, false),
ok = proto:kill_conn_proc(Handle3),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle3),is_process_alive(NewConnPid)},
ok.
@@ -158,29 +158,29 @@ names_to_multi_conn_pids(_Config) ->
Handle1 = proto:open(mconn1),
ok = proto:close(mconn1),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle1),is_process_alive(ConnPid1)},
ok = proto:kill_conn_proc(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
Handle2 = proto:open(mconn2), % should've been reconnected already
{true,false} = {is_process_alive(Handle2),is_process_alive(ConnPid2)},
ConnPid2x = ct_gen_conn:get_conn_pid(Handle2),
true = is_process_alive(ConnPid2x),
ok = proto:close(mconn2),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle2),is_process_alive(ConnPid2x)},
Handle2y = proto:open(mconn2),
ConnPid2y = ct_gen_conn:get_conn_pid(Handle2y),
{true,true} = {is_process_alive(Handle2y),is_process_alive(ConnPid2y)},
ok = proto:close(mconn2),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle2y),is_process_alive(ConnPid2y)},
application:set_env(ct_test, reconnect, false),
ok = proto:kill_conn_proc(Handle3),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle3),is_process_alive(ConnPid3)},
ok.
@@ -211,11 +211,11 @@ names_to_single_conn_pids(_Config) ->
ct:pal("CONNS on ~p = ~n~p", [ConnPid,Conns]),
ok = proto:close(sconn1),
- timer:sleep(100),
+ ct:sleep(100),
{false,true} = {is_process_alive(Handle1),is_process_alive(ConnPid)},
ok = proto:kill_conn_proc(Handle2),
- timer:sleep(100),
+ ct:sleep(100),
{true,false} = {is_process_alive(Handle2),is_process_alive(ConnPid)},
Handle2 = proto:open(sconn2), % should've been reconnected already
NewConnPid = ct_gen_conn:get_conn_pid(Handle2),
@@ -227,12 +227,12 @@ names_to_single_conn_pids(_Config) ->
ct:pal("CONNS on ~p = ~n~p", [NewConnPid,Conns1]),
ok = proto:close(sconn2),
- timer:sleep(100),
+ ct:sleep(100),
{false,true} = {is_process_alive(Handle2),is_process_alive(NewConnPid)},
application:set_env(ct_test, reconnect, false),
ok = proto:kill_conn_proc(Handle3),
- timer:sleep(100),
+ ct:sleep(100),
{false,false} = {is_process_alive(Handle3),is_process_alive(NewConnPid)},
ok.
diff --git a/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl
index 804f722081..bfdc78639e 100644
--- a/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl
+++ b/lib/common_test/test/ct_group_leader_SUITE_data/group_leader_SUITE.erl
@@ -258,14 +258,14 @@ gen_io(Label, N, Acc) ->
%% (via ct logging functions) from an external process which has a
%% different group leader than the test cases.
unexp1(Config) ->
- timer:sleep(1000),
+ ct:sleep(1000),
gen_unexp_io(),
- timer:sleep(1000),
+ ct:sleep(1000),
check_unexp_io(Config),
ok.
unexp2(_) ->
- timer:sleep(2000),
+ ct:sleep(2000),
ok.
gen_unexp_io() ->
diff --git a/lib/common_test/test/ct_groups_test_1_SUITE.erl b/lib/common_test/test/ct_groups_test_1_SUITE.erl
index e520a72227..d5de949554 100644
--- a/lib/common_test/test/ct_groups_test_1_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_1_SUITE.erl
@@ -302,7 +302,7 @@ test_events(groups_suite_1) ->
{?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_11_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}];
@@ -410,7 +410,7 @@ test_events(groups_suite_2) ->
{?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_12_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_12_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}];
@@ -505,7 +505,7 @@ test_events(groups_suites_1) ->
{?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_11_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{?eh,tc_start,{groups_12_SUITE,init_per_suite}},
{?eh,tc_done,{groups_12_SUITE,init_per_suite,ok}},
@@ -596,7 +596,7 @@ test_events(groups_suites_1) ->
{?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_12_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_12_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}];
@@ -691,7 +691,7 @@ test_events(groups_dir_1) ->
{?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_11_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{?eh,tc_start,{groups_12_SUITE,init_per_suite}},
{?eh,tc_done,{groups_12_SUITE,init_per_suite,ok}},
@@ -782,7 +782,7 @@ test_events(groups_dir_1) ->
{?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_12_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_12_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}];
@@ -878,7 +878,7 @@ test_events(groups_dirs_1) ->
{?eh,tc_done,{groups_11_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_11_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{?eh,tc_start,{groups_12_SUITE,init_per_suite}},
{?eh,tc_done,{groups_12_SUITE,init_per_suite,ok}},
@@ -969,7 +969,7 @@ test_events(groups_dirs_1) ->
{?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_12_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_12_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,ok}},
{?eh,tc_start,{groups_21_SUITE,init_per_suite}},
{?eh,tc_done,{groups_21_SUITE,init_per_suite,ok}},
@@ -1089,7 +1089,7 @@ test_events(groups_dirs_1) ->
{groups_21_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_21_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_21_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_21_SUITE,end_per_suite,ok}},
{?eh,tc_start,{groups_22_SUITE,init_per_suite}},
{?eh,tc_done,{groups_22_SUITE,init_per_suite,ok}},
@@ -1223,6 +1223,6 @@ test_events(groups_dirs_1) ->
{groups_22_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_22_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_22_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_22_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}].
diff --git a/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_12_SUITE.erl b/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_12_SUITE.erl
index ec90ef95d1..6f49f9a957 100644
--- a/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_12_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_1_SUITE_data/groups_1/test/groups_12_SUITE.erl
@@ -278,7 +278,7 @@ testcase_5a(Config) ->
%% increase chance the done event will come
%% during execution of subgroup (could be
%% tricky to handle)
- timer:sleep(3),
+ ct:sleep(3),
ok.
testcase_5b() ->
[].
diff --git a/lib/common_test/test/ct_groups_test_2_SUITE.erl b/lib/common_test/test/ct_groups_test_2_SUITE.erl
index 8b0de98709..f41395e028 100644
--- a/lib/common_test/test/ct_groups_test_2_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_2_SUITE.erl
@@ -302,7 +302,7 @@ test_events(empty_group) ->
{?eh,tc_done,
{groups_22_SUITE,{end_per_group,test_group_8,[]},ok}}],
{?eh,tc_start,{groups_22_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_22_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_22_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
].
diff --git a/lib/common_test/test/ct_groups_test_2_SUITE_data/groups_2/groups_22_SUITE.erl b/lib/common_test/test/ct_groups_test_2_SUITE_data/groups_2/groups_22_SUITE.erl
index 154c676d7e..80bb5ba69b 100644
--- a/lib/common_test/test/ct_groups_test_2_SUITE_data/groups_2/groups_22_SUITE.erl
+++ b/lib/common_test/test/ct_groups_test_2_SUITE_data/groups_2/groups_22_SUITE.erl
@@ -293,7 +293,7 @@ testcase_5a(Config) ->
%% increase chance the done event will come
%% during execution of subgroup (could be
%% tricky to handle)
- timer:sleep(3),
+ ct:sleep(3),
ok.
testcase_5b() ->
[].
diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl
index c8fc4bd59b..d5ad8312e6 100644
--- a/lib/common_test/test/ct_hooks_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE.erl
@@ -1075,7 +1075,37 @@ test_events(fail_n_skip_with_minimal_cth) ->
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
{?eh,tc_start,{'_',init_per_suite}},
-
+
+ {parallel,
+ [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group1,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group1,[parallel]},ok}},
+ {parallel,
+ [{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group2,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{init_per_group,
+ group2,[parallel]},ok}},
+ %% Verify that 'skip' as well as 'skipped' works
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case2}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case2,{skipped,"skip it"}}},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case3}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case3,{skipped,"skip it"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,[{test_case2,group2},
+ {tc_user_skip,{skipped,"skip it"}},
+ []]}},
+ {?eh,cth,{empty_cth,on_tc_skip,[{test_case3,group2},
+ {tc_user_skip,{skipped,"skip it"}},
+ []]}},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group2,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,group2,
+ [parallel]},ok}}]},
+ {?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group1,[parallel]}}},
+ {?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
+ group1,[parallel]},ok}}]},
+
{?eh,tc_done,{'_',end_per_suite,ok}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,stop_logging,[]}
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
index b2f22d8257..7b84c246ca 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_cth_fail_one_skip_one_SUITE.erl
@@ -41,6 +41,8 @@ end_per_group(_Group,_Config) ->
init_per_testcase(test_case2, Config) ->
{skip,"skip it"};
+init_per_testcase(test_case3, Config) ->
+ {skipped,"skip it"};
init_per_testcase(_TestCase, Config) ->
Config.
@@ -48,7 +50,9 @@ end_per_testcase(_TestCase, _Config) ->
ok.
groups() ->
- [{group1,[parallel],[{group2,[parallel],[test_case1,test_case2,test_case3]}]}].
+ [{group1,[parallel],
+ [{group2,[parallel],
+ [test_case1,test_case2,test_case3,test_case4]}]}].
all() ->
[{group,group1}].
@@ -62,3 +66,6 @@ test_case2(Config) ->
test_case3(Config) ->
ok.
+
+test_case4(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_update_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_update_config_SUITE.erl
index 3c1f5669e8..f8c8725602 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_update_config_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_update_config_SUITE.erl
@@ -26,21 +26,23 @@
-include("ct.hrl").
+-define(now, os:timestamp()).
+
%% Test server callback functions
init_per_suite(Config) ->
- [{init_per_suite,now()}|Config].
+ [{init_per_suite,?now}|Config].
end_per_suite(_Config) ->
ok.
init_per_testcase(_TestCase, Config) ->
- [{init_per_testcase,now()}|Config].
+ [{init_per_testcase,?now}|Config].
end_per_testcase(_TestCase, _Config) ->
ok.
init_per_group(GroupName, Config) ->
- [{init_per_group,now()}|Config].
+ [{init_per_group,?now}|Config].
end_per_group(GroupName, Config) ->
ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/cth_log_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/cth_log_SUITE.erl
index 18dd07e87e..80ce248418 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/cth_log_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/cth_log_SUITE.erl
@@ -50,7 +50,7 @@ init_per_suite(Config) ->
end_per_suite(Config) ->
Gen = proplists:get_value(gen, Config),
exit(Gen, kill),
- timer:sleep(100),
+ ct:sleep(100),
ok.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
index 6caac7e447..5f8eae1f70 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
@@ -75,6 +75,7 @@
init(Id, Opts) ->
gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, init, [Id, Opts]}}),
+ ct:log("~w:init called", [?MODULE]),
{ok,Opts}.
%% @doc The ID is used to uniquly identify an CTH instance, if two CTH's
@@ -85,7 +86,8 @@ init(Id, Opts) ->
id(Opts) ->
gen_event:notify(?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, id, [Opts]}}),
- now().
+ ct:log("~w:id called", [?MODULE]),
+ os:timestamp().
%% @doc Called before init_per_suite is called. Note that this callback is
%% only called if the CTH is added before init_per_suite is run (eg. in a test
@@ -100,6 +102,7 @@ pre_init_per_suite(Suite,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_suite,
[Suite,Config,State]}}),
+ ct:log("~w:pre_init_per_suite(~w) called", [?MODULE,Suite]),
{Config, State}.
%% @doc Called after init_per_suite.
@@ -114,6 +117,7 @@ post_init_per_suite(Suite,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_suite,
[Suite,Config,Return,State]}}),
+ ct:log("~w:post_init_per_suite(~w) called", [?MODULE,Suite]),
{Return, State}.
%% @doc Called before end_per_suite. The config/state can be changed here,
@@ -127,6 +131,7 @@ pre_end_per_suite(Suite,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_suite,
[Suite,Config,State]}}),
+ ct:log("~w:pre_end_per_suite(~w) called", [?MODULE,Suite]),
{Config, State}.
%% @doc Called after end_per_suite. Note that the config cannot be
@@ -141,6 +146,7 @@ post_end_per_suite(Suite,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_suite,
[Suite,Config,Return,State]}}),
+ ct:log("~w:post_end_per_suite(~w) called", [?MODULE,Suite]),
{Return, State}.
%% @doc Called before each init_per_group.
@@ -154,6 +160,7 @@ pre_init_per_group(Group,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_group,
[Group,Config,State]}}),
+ ct:log("~w:pre_init_per_group(~w) called", [?MODULE,Group]),
{Config, State}.
%% @doc Called after each init_per_group.
@@ -168,6 +175,7 @@ post_init_per_group(Group,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_group,
[Group,Config,Return,State]}}),
+ ct:log("~w:post_init_per_group(~w) called", [?MODULE,Group]),
{Return, State}.
%% @doc Called after each end_per_group. The config/state can be changed here,
@@ -181,6 +189,7 @@ pre_end_per_group(Group,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_group,
[Group,Config,State]}}),
+ ct:log("~w:pre_end_per_group(~w) called", [?MODULE,Group]),
{Config, State}.
%% @doc Called after each end_per_group. Note that the config cannot be
@@ -195,6 +204,7 @@ post_end_per_group(Group,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_group,
[Group,Config,Return,State]}}),
+ ct:log("~w:post_end_per_group(~w) called", [?MODULE,Group]),
{Return, State}.
%% @doc Called before each test case.
@@ -208,6 +218,7 @@ pre_init_per_testcase(TC,Config,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_testcase,
[TC,Config,State]}}),
+ ct:log("~w:pre_init_per_testcase(~w) called", [?MODULE,TC]),
{Config, State}.
%% @doc Called after each test case. Note that the config cannot be
@@ -222,6 +233,7 @@ post_end_per_testcase(TC,Config,Return,State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_testcase,
[TC,Config,Return,State]}}),
+ ct:log("~w:post_end_per_testcase(~w) called", [?MODULE,TC]),
{Return, State}.
%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group,
@@ -237,6 +249,7 @@ on_tc_fail(TC, Reason, State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_fail,
[TC,Reason,State]}}),
+ ct:log("~w:on_tc_fail(~w) called", [?MODULE,TC]),
State.
%% @doc Called when a test case is skipped by either user action
@@ -253,6 +266,7 @@ on_tc_skip(TC, Reason, State) ->
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_skip,
[TC,Reason,State]}}),
+ ct:log("~w:on_tc_skip(~w) called", [?MODULE,TC]),
State.
%% @doc Called when the scope of the CTH is done, this depends on
@@ -274,4 +288,5 @@ terminate(State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, terminate, [State]}}),
+ ct:log("~w:terminate called", [?MODULE]),
ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
index 30721a6b3a..436470f46d 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
@@ -28,10 +28,14 @@
%% CT Hooks
-export([init/2]).
-export([terminate/1]).
+-export([on_tc_skip/3]).
init(Id, Opts) ->
empty_cth:init(Id, Opts).
+on_tc_skip(TC, Reason, State) ->
+ empty_cth:on_tc_skip(TC,Reason,State).
+
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
index 2ee0d7da9c..55a1b9a130 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
@@ -24,6 +24,7 @@
-include_lib("common_test/src/ct_util.hrl").
-include_lib("common_test/include/ct_event.hrl").
+-define(now, os:timestamp()).
%% CT Hooks
-compile(export_all).
@@ -33,44 +34,44 @@ init(Id, Opts) ->
pre_init_per_suite(Suite, Config, State) ->
empty_cth:pre_init_per_suite(Suite,Config,State),
- {[{pre_init_per_suite,now()}|Config],State}.
+ {[{pre_init_per_suite,?now}|Config],State}.
post_init_per_suite(Suite,Config,Return,State) ->
empty_cth:post_init_per_suite(Suite,Config,Return,State),
- {[{post_init_per_suite,now()}|Return],State}.
+ {[{post_init_per_suite,?now}|Return],State}.
pre_end_per_suite(Suite,Config,State) ->
empty_cth:pre_end_per_suite(Suite,Config,State),
- {[{pre_end_per_suite,now()}|Config],State}.
+ {[{pre_end_per_suite,?now}|Config],State}.
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State),
- NewConfig = [{post_end_per_suite,now()}|Config],
+ NewConfig = [{post_end_per_suite,?now}|Config],
{NewConfig,NewConfig}.
pre_init_per_group(Group,Config,State) ->
empty_cth:pre_init_per_group(Group,Config,State),
- {[{pre_init_per_group,now()}|Config],State}.
+ {[{pre_init_per_group,?now}|Config],State}.
post_init_per_group(Group,Config,Return,State) ->
empty_cth:post_init_per_group(Group,Config,Return,State),
- {[{post_init_per_group,now()}|Return],State}.
+ {[{post_init_per_group,?now}|Return],State}.
pre_end_per_group(Group,Config,State) ->
empty_cth:pre_end_per_group(Group,Config,State),
- {[{pre_end_per_group,now()}|Config],State}.
+ {[{pre_end_per_group,?now}|Config],State}.
post_end_per_group(Group,Config,Return,State) ->
empty_cth:post_end_per_group(Group,Config,Return,State),
- {[{post_end_per_group,now()}|Config],State}.
+ {[{post_end_per_group,?now}|Config],State}.
pre_init_per_testcase(TC,Config,State) ->
empty_cth:pre_init_per_testcase(TC,Config,State),
- {[{pre_init_per_testcase,now()}|Config],State}.
+ {[{pre_init_per_testcase,?now}|Config],State}.
post_end_per_testcase(TC,Config,Return,State) ->
empty_cth:post_end_per_testcase(TC,Config,Return,State),
- {[{post_end_per_testcase,now()}|Config],State}.
+ {[{post_end_per_testcase,?now}|Config],State}.
on_tc_fail(TC, Reason, State) ->
empty_cth:on_tc_fail(TC,Reason,State).
diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl
index 332e54d1a7..e26ed4089a 100644
--- a/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl
+++ b/lib/common_test/test/ct_netconfc_SUITE_data/netconfc1_SUITE.erl
@@ -150,7 +150,7 @@ hello_from_server_first(Config) ->
{ok,Client} = ct_netconfc:only_open(?DEFAULT_SSH_OPTS(DataDir)),
ct:sleep(500),
?NS:expect(hello),
- ?ok = ct_netconfc:hello(Client),
+ ?ok = ct_netconfc:hello(Client, [{capability, ["urn:com:ericsson:ebase:1.1.0"]}], infinity),
?NS:expect_do_reply('close-session',close,ok),
?ok = ct_netconfc:close_session(Client),
ok.
@@ -204,7 +204,7 @@ hello_required_exists(Config) ->
?NS:expect_do_reply('close-session',close,ok),
?ok = ct_netconfc:close_session(my_named_connection),
- timer:sleep(500),
+ ct:sleep(500),
%% Then check that it can be used again after the first is closed
{ok,_Client2} = open_configured_success(my_named_connection,DataDir),
@@ -486,8 +486,18 @@ action(Config) ->
DataDir = ?config(data_dir,Config),
{ok,Client} = open_success(DataDir),
Data = [{myactionreturn,[{xmlns,"myns"}],["value"]}],
- ?NS:expect_reply(action,{data,Data}),
- {ok,Data} = ct_netconfc:action(Client,{myaction,[{xmlns,"myns"}],[]}),
+ %% test either to receive {data,Data} or {ok,Data},
+ %% both need to be handled
+ ct:log("Client will receive {~w,~p}", [data,Data]),
+ ct:log("Expecting ~p", [{ok, Data}]),
+ ?NS:expect_reply(action,{data, Data}),
+ {ok, Data} = ct_netconfc:action(Client,{myaction,[{xmlns,"myns"}],[]}),
+
+ ct:log("Client will receive {~w,~p}", [ok,Data]),
+ ct:log("Expecting ~p", [ok]),
+ ?NS:expect_reply(action,{ok, Data}),
+ ok = ct_netconfc:action(Client,{myaction,[{xmlns,"myns"}],[]}),
+
?NS:expect_do_reply('close-session',close,ok),
?ok = ct_netconfc:close_session(Client),
ok.
@@ -654,10 +664,10 @@ receive_chunked_data(Config) ->
%% Spawn a process which will wait a bit for the client to send
%% the request (below), then order the server to the chunks of the
%% rpc-reply one by one.
- spawn(fun() -> timer:sleep(500),?NS:hupp(send,Part1),
- timer:sleep(100),?NS:hupp(send,Part2),
- timer:sleep(100),?NS:hupp(send,Part3),
- timer:sleep(100),?NS:hupp(send,Part4)
+ spawn(fun() -> ct:sleep(500),?NS:hupp(send,Part1),
+ ct:sleep(100),?NS:hupp(send,Part2),
+ ct:sleep(100),?NS:hupp(send,Part3),
+ ct:sleep(100),?NS:hupp(send,Part4)
end),
%% Order server to expect a get - then the process above will make
@@ -702,8 +712,8 @@ timeout_receive_chunked_data(Config) ->
%% Spawn a process which will wait a bit for the client to send
%% the request (below), then order the server to the chunks of the
%% rpc-reply one by one.
- spawn(fun() -> timer:sleep(500),?NS:hupp(send,Part1),
- timer:sleep(100),?NS:hupp(send,Part2)
+ spawn(fun() -> ct:sleep(500),?NS:hupp(send,Part1),
+ ct:sleep(100),?NS:hupp(send,Part2)
end),
%% Order server to expect a get - then the process above will make
@@ -748,9 +758,9 @@ close_while_waiting_for_chunked_data(Config) ->
%% Spawn a process which will wait a bit for the client to send
%% the request (below), then order the server to the chunks of the
%% rpc-reply one by one.
- spawn(fun() -> timer:sleep(500),?NS:hupp(send,Part1),
- timer:sleep(100),?NS:hupp(send,Part2),
- timer:sleep(100),?NS:hupp(kill)
+ spawn(fun() -> ct:sleep(500),?NS:hupp(send,Part1),
+ ct:sleep(100),?NS:hupp(send,Part2),
+ ct:sleep(100),?NS:hupp(kill)
end),
%% Order server to expect a get - then the process above will make
@@ -766,7 +776,7 @@ connection_crash(Config) ->
%% Test that if the test survives killing the connection
%% process. Earlier this caused ct_util_server to terminate, and
%% this aborting the complete test run.
- spawn(fun() -> timer:sleep(500),exit(Client,kill) end),
+ spawn(fun() -> ct:sleep(500),exit(Client,kill) end),
?NS:expect(get),
{error,{closed,killed}}=ct_netconfc:get(Client,{server,[{xmlns,"myns"}],[]}),
ok.
diff --git a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl
index f7c7b891bb..e4bc396536 100644
--- a/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl
+++ b/lib/common_test/test/ct_netconfc_SUITE_data/ns.erl
@@ -351,7 +351,7 @@ check_expected(SessionId,ConnRef,Msg) ->
do(ConnRef, Do),
reply(ConnRef,Reply);
error ->
- timer:sleep(1000),
+ ct:sleep(1000),
exit({error,{got_unexpected,SessionId,Msg,ets:tab2list(ns_tab)}})
end.
@@ -548,8 +548,13 @@ make_msg({hello,SessionId,Stuff}) ->
SessionIdXml/binary,"</hello>">>);
make_msg(ok) ->
xml(rpc_reply("<ok/>"));
+
+make_msg({ok,Data}) ->
+ xml(rpc_reply(from_simple({ok,Data})));
+
make_msg({data,Data}) ->
xml(rpc_reply(from_simple({data,Data})));
+
make_msg(event) ->
xml(<<"<notification xmlns=\"",?NETCONF_NOTIF_NAMESPACE,"\">"
"<eventTime>2012-06-14T14:50:54+02:00</eventTime>"
diff --git a/lib/common_test/test/ct_pre_post_test_io_SUITE.erl b/lib/common_test/test/ct_pre_post_test_io_SUITE.erl
index 5de1ecc2bd..1e6018f442 100644
--- a/lib/common_test/test/ct_pre_post_test_io_SUITE.erl
+++ b/lib/common_test/test/ct_pre_post_test_io_SUITE.erl
@@ -91,27 +91,27 @@ pre_post_io(Config) ->
spawn(fun() ->
ct:pal("CONTROLLER: Started!", []),
%% --- test run 1 ---
- timer:sleep(3000),
+ ct:sleep(3000),
ct:pal("CONTROLLER: Handle remote events = true", []),
ok = ct_test_support:ct_rpc({cth_log_redirect,
handle_remote_events,
[true]}, Config),
- timer:sleep(2000),
+ ct:sleep(2000),
ct:pal("CONTROLLER: Proceeding with test run #1!", []),
ok = ct_test_support:ct_rpc({cth_ctrl,proceed,[]}, Config),
- timer:sleep(6000),
+ ct:sleep(6000),
ct:pal("CONTROLLER: Proceeding with shutdown #1!", []),
ok = ct_test_support:ct_rpc({cth_ctrl,proceed,[]}, Config),
%% --- test run 2 ---
- timer:sleep(3000),
+ ct:sleep(3000),
ct:pal("CONTROLLER: Handle remote events = true", []),
ok = ct_test_support:ct_rpc({cth_log_redirect,
handle_remote_events,
[true]}, Config),
- timer:sleep(2000),
+ ct:sleep(2000),
ct:pal("CONTROLLER: Proceeding with test run #2!", []),
ok = ct_test_support:ct_rpc({cth_ctrl,proceed,[]}, Config),
- timer:sleep(6000),
+ ct:sleep(6000),
ct:pal("CONTROLLER: Proceeding with shutdown #2!", []),
ok = ct_test_support:ct_rpc({cth_ctrl,proceed,[]}, Config)
end),
diff --git a/lib/common_test/test/ct_repeat_1_SUITE.erl b/lib/common_test/test/ct_repeat_1_SUITE.erl
index e37aeb196c..50e07608f6 100644
--- a/lib/common_test/test/ct_repeat_1_SUITE.erl
+++ b/lib/common_test/test/ct_repeat_1_SUITE.erl
@@ -220,8 +220,7 @@ test_events(repeat_cs_and_grs) ->
{?eh,test_stats,{1,1,{0,0}}},
[{?eh,tc_done,{repeat_1_SUITE,{init_per_group,gr_fail_result,[]},ok}},
{?eh,test_stats,{2,1,{0,0}}},
- {?eh,tc_done,{repeat_1_SUITE,{end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {?eh,tc_done,{repeat_1_SUITE,{end_per_group,gr_fail_result,[]},ok}}],
{?eh,test_stats,{3,1,{0,0}}},
[{?eh,tc_done,{repeat_1_SUITE,{init_per_group,gr_fail_init,[]},
{failed,{error,fails_on_purpose}}}},
@@ -242,8 +241,7 @@ test_events(repeat_cs_and_grs) ->
{?eh,test_stats,{5,2,{0,1}}},
[{?eh,tc_done,{repeat_1_SUITE,{init_per_group,gr_fail_result,[]},ok}},
{?eh,test_stats,{6,2,{0,1}}},
- {?eh,tc_done,{repeat_1_SUITE,{end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {?eh,tc_done,{repeat_1_SUITE,{end_per_group,gr_fail_result,[]},ok}}],
{?eh,test_stats,{7,2,{0,1}}},
[{?eh,tc_done,{repeat_1_SUITE,{init_per_group,gr_fail_init,[]},
{failed,{error,fails_on_purpose}}}},
@@ -289,8 +287,7 @@ test_events(repeat_seq) ->
{init_per_group,gr_fail_result,[]},ok}},
{?eh,test_stats,{4,2,{0,2}}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result,[]},ok}}],
{?eh,tc_auto_skip,{repeat_1_SUITE,{tc_ok_2,repeat_seq_2},
{group_result,gr_fail_result,failed}}},
{?eh,test_stats,{4,2,{0,3}}},
@@ -402,8 +399,7 @@ test_events(repeat_gr_until_any_ok) ->
[{?eh,tc_done,{repeat_1_SUITE,
{init_per_group,gr_fail_result,[]},ok}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,tc_fail_1,
{failed,{error,{{badmatch,2},'_'}}}}},
{?eh,test_stats,{1,1,{0,0}}},
@@ -418,8 +414,7 @@ test_events(repeat_gr_until_any_ok) ->
[{?eh,tc_done,{repeat_1_SUITE,
{init_per_group,gr_fail_result_then_ok,[]},ok}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result_then_ok,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result_then_ok,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,
{end_per_group,repeat_gr_until_any_ok_1,
[{repeat_until_any_ok,3}]},ok}}],
@@ -441,8 +436,7 @@ test_events(repeat_gr_until_any_ok) ->
{init_per_group,repeat_gr_until_any_ok_2,
[{repeat_until_any_ok,3}]},ok}},
[{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,tc_fail_1,
{failed,{error,{{badmatch,2},'_'}}}}},
{?eh,test_stats,{5,5,{0,2}}},
@@ -675,8 +669,7 @@ test_events(repeat_gr_until_any_fail) ->
{repeat_1_SUITE,{end_per_group,gr_ok_then_fail_result,[]}}},
{?eh,tc_done,
{repeat_1_SUITE,
- {end_per_group,gr_ok_then_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_ok_then_fail_result,[]},ok}}],
{?eh,tc_start,{repeat_1_SUITE,tc_ok_2}},
{?eh,tc_done,{repeat_1_SUITE,tc_ok_2,ok}},
{?eh,test_stats,{8,0,{0,0}}},
@@ -938,8 +931,7 @@ test_events(repeat_gr_until_all_ok) ->
{?eh,tc_done,{repeat_1_SUITE,tc_ok_1,ok}},
{?eh,test_stats,{3,1,{0,0}}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result_then_ok,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result_then_ok,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,
{end_per_group,repeat_gr_until_all_ok_1,
[{repeat_until_all_ok,3}]},ok}}],
@@ -1113,8 +1105,7 @@ test_events(repeat_gr_until_all_fail) ->
gr_ok_then_fail_result,[]},ok}},
{?eh,test_stats,{3,3,{0,2}}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_ok_then_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_ok_then_fail_result,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,
{end_per_group,repeat_gr_until_all_fail_1,
[{repeat_until_all_fail,2}]},ok}}],
@@ -1148,8 +1139,7 @@ test_events(repeat_gr_until_all_fail) ->
{init_per_group,repeat_gr_until_all_fail_3,
[{repeat_until_all_fail,3}]},ok}},
[{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,tc_ok_then_fail_1,ok}},
{?eh,test_stats,{6,5,{0,3}}},
{?eh,tc_done,{repeat_1_SUITE,
@@ -1159,8 +1149,7 @@ test_events(repeat_gr_until_all_fail) ->
{init_per_group,repeat_gr_until_all_fail_3,
[{repeat_until_all_fail,2}]},ok}},
[{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_fail_result,[]},ok}}],
{?eh,tc_done,{repeat_1_SUITE,tc_ok_then_fail_1,
{failed,{error,failing_this_time}}}},
{?eh,test_stats,{7,6,{0,3}}},
@@ -1263,8 +1252,7 @@ test_events(repeat_seq_until_any_fail) ->
{init_per_group,repeat_seq_until_any_fail_4,
[{repeat_until_any_fail,2},sequence]},ok}},
[{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_ok_then_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_ok_then_fail_result,[]},ok}}],
{?eh,tc_auto_skip,{repeat_1_SUITE,{tc_ok_1,gr_ok_1},
{group_result,gr_ok_then_fail_result,failed}}},
{?eh,test_stats,{19,1,{0,3}}},
@@ -1473,8 +1461,7 @@ test_events(repeat_shuffled_seq_until_any_fail) ->
[{?eh,tc_start,{repeat_1_SUITE,
{end_per_group,gr_ok_then_fail_result,[]}}},
{?eh,tc_done,{repeat_1_SUITE,
- {end_per_group,gr_ok_then_fail_result,[]},
- {return_group_result,failed}}}],
+ {end_per_group,gr_ok_then_fail_result,[]},ok}}],
{?eh,tc_start,{repeat_1_SUITE,
{end_per_group,repeat_shuffled_seq_until_any_fail_4,
[{shuffle,repeated},{repeat_until_any_fail,2},sequence]}}},
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl
index 3fd5943691..3d7049a9c4 100644
--- a/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/a_test/r1_SUITE.erl
@@ -68,7 +68,7 @@ end_per_testcase(_Case, Config) ->
%%%-----------------------------------------------------------------
%%% Test cases
tc1(_Config) ->
- timer:sleep(10000),
+ ct:sleep(10000),
ok.
tc2(_Config) ->
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl
index dc9abc2863..e4f6e7dcc1 100644
--- a/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE_data/b_test/r2_SUITE.erl
@@ -68,7 +68,7 @@ end_per_testcase(_Case, Config) ->
%%%-----------------------------------------------------------------
%%% Test cases
tc1(_Config) ->
- %% timer:sleep(3000),
+ %% ct:sleep(3000),
ok.
tc2(_Config) ->
diff --git a/lib/common_test/test/ct_sequence_1_SUITE.erl b/lib/common_test/test/ct_sequence_1_SUITE.erl
index 5a775a1117..4055cd789e 100644
--- a/lib/common_test/test/ct_sequence_1_SUITE.erl
+++ b/lib/common_test/test/ct_sequence_1_SUITE.erl
@@ -182,8 +182,7 @@ test_events(subgroup_return_fail) ->
{?eh,test_stats,{0,1,{0,0}}},
{?eh,tc_start,
{subgroups_1_SUITE,{end_per_group,return_fail,[]}}},
- {?eh,tc_done,{subgroups_1_SUITE,{end_per_group,return_fail,[]},
- {return_group_result,failed}}}],
+ {?eh,tc_done,{subgroups_1_SUITE,{end_per_group,return_fail,[]},ok}}],
{?eh,tc_auto_skip,
{subgroups_1_SUITE,{ok_tc,ok_group},
{group_result,return_fail,failed}}},
@@ -191,8 +190,7 @@ test_events(subgroup_return_fail) ->
{?eh,tc_start,
{subgroups_1_SUITE,{end_per_group,subgroup_return_fail,[sequence]}}},
{?eh,tc_done,
- {subgroups_1_SUITE,{end_per_group,subgroup_return_fail,[sequence]},
- {return_group_result,failed}}}],
+ {subgroups_1_SUITE,{end_per_group,subgroup_return_fail,[sequence]},ok}}],
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -221,8 +219,7 @@ test_events(subgroup_init_fail) ->
{?eh,test_stats,{0,0,{0,2}}},
{?eh,tc_start,{subgroups_1_SUITE,{end_per_group,subgroup_init_fail,[sequence]}}},
{?eh,tc_done,{subgroups_1_SUITE,
- {end_per_group,subgroup_init_fail,[sequence]},
- {return_group_result,failed}}}],
+ {end_per_group,subgroup_init_fail,[sequence]},ok}}],
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -245,8 +242,7 @@ test_events(subgroup_after_failed_case) ->
{?eh,tc_start,{subgroups_1_SUITE,
{end_per_group,subgroup_after_failed_case,[sequence]}}},
{?eh,tc_done,{subgroups_1_SUITE,
- {end_per_group,subgroup_after_failed_case,[sequence]},
- {return_group_result,failed}}}],
+ {end_per_group,subgroup_after_failed_case,[sequence]},ok}}],
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -266,16 +262,14 @@ test_events(case_after_subgroup_return_fail) ->
{?eh,tc_done,{subgroups_1_SUITE,failing_tc,{failed,{error,{{badmatch,3},'_'}}}}},
{?eh,test_stats,{0,1,{0,0}}},
{?eh,tc_start,{subgroups_1_SUITE,{end_per_group,return_fail,[]}}},
- {?eh,tc_done,{subgroups_1_SUITE,{end_per_group,return_fail,[]},
- {return_group_result,failed}}}],
+ {?eh,tc_done,{subgroups_1_SUITE,{end_per_group,return_fail,[]},ok}}],
{?eh,tc_auto_skip,{subgroups_1_SUITE,{ok_tc,case_after_subgroup_return_fail},
{group_result,return_fail,failed}}},
{?eh,test_stats,{0,1,{0,1}}},
{?eh,tc_start,{subgroups_1_SUITE,
{end_per_group,case_after_subgroup_return_fail,[sequence]}}},
{?eh,tc_done,{subgroups_1_SUITE,
- {end_per_group,case_after_subgroup_return_fail,[sequence]},
- {return_group_result,failed}}}],
+ {end_per_group,case_after_subgroup_return_fail,[sequence]},ok}}],
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -310,8 +304,7 @@ test_events(case_after_subgroup_fail_init) ->
{?eh,tc_start,{subgroups_1_SUITE,
{end_per_group,case_after_subgroup_fail_init,[sequence]}}},
{?eh,tc_done,{subgroups_1_SUITE,
- {end_per_group,case_after_subgroup_fail_init,[sequence]},
- {return_group_result,failed}}}],
+ {end_per_group,case_after_subgroup_fail_init,[sequence]},ok}}],
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
].
diff --git a/lib/common_test/test/ct_shell_SUITE.erl b/lib/common_test/test/ct_shell_SUITE.erl
index 4b8c43d800..70c0ab8127 100644
--- a/lib/common_test/test/ct_shell_SUITE.erl
+++ b/lib/common_test/test/ct_shell_SUITE.erl
@@ -93,7 +93,7 @@ start_interactive(Config) ->
test_server:format(Level,
"ct_util_server not stopped on ~p yet, waiting 5 s...~n",
[CTNode]),
- timer:sleep(5000),
+ ct:sleep(5000),
undefined = rpc:call(CTNode, erlang, whereis, [ct_util_server])
end,
Events = ct_test_support:get_events(ERPid, Config),
diff --git a/lib/common_test/test/ct_skip_SUITE_data/skip/test/auto_skip_4_SUITE.erl b/lib/common_test/test/ct_skip_SUITE_data/skip/test/auto_skip_4_SUITE.erl
index 825846cd55..89e202a404 100644
--- a/lib/common_test/test/ct_skip_SUITE_data/skip/test/auto_skip_4_SUITE.erl
+++ b/lib/common_test/test/ct_skip_SUITE_data/skip/test/auto_skip_4_SUITE.erl
@@ -72,7 +72,7 @@ end_per_group(_GroupName, _Config) ->
%% Reason = term()
%%--------------------------------------------------------------------
init_per_testcase(tc1, Config) ->
- timer:sleep(5000),
+ ct:sleep(5000),
Config;
init_per_testcase(_TestCase, Config) ->
Config.
diff --git a/lib/common_test/test/ct_smoke_test_SUITE.erl b/lib/common_test/test/ct_smoke_test_SUITE.erl
index 49b38361e2..6077946c33 100644
--- a/lib/common_test/test/ct_smoke_test_SUITE.erl
+++ b/lib/common_test/test/ct_smoke_test_SUITE.erl
@@ -480,7 +480,7 @@ events(Test) when Test == dir1 ; Test == dir2 ;
{Suite,tc4,{skipped,"Skipping this one"}}},
{?eh,test_stats,{7,0,{1,0}}},
{?eh,tc_start,{Suite,end_per_suite}},
- {?eh,tc_done,{Suite,end_per_suite,ips_data}},
+ {?eh,tc_done,{Suite,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -517,7 +517,7 @@ events(Test) when Test == dir1_2 ; Test == suite11_21 ->
{happy_11_SUITE,tc4,{skipped,"Skipping this one"}}},
{?eh,test_stats,{7,0,{1,0}}},
{?eh,tc_start,{happy_11_SUITE,end_per_suite}},
- {?eh,tc_done,{happy_11_SUITE,end_per_suite,ips_data}},
+ {?eh,tc_done,{happy_11_SUITE,end_per_suite,ok}},
{?eh,tc_start,{happy_21_SUITE,init_per_suite}},
{?eh,tc_done,{happy_21_SUITE,init_per_suite,ok}},
{?eh,tc_start,{happy_21_SUITE,tc1}},
@@ -546,7 +546,7 @@ events(Test) when Test == dir1_2 ; Test == suite11_21 ->
{happy_21_SUITE,tc4,{skipped,"Skipping this one"}}},
{?eh,test_stats,{14,0,{2,0}}},
{?eh,tc_start,{happy_21_SUITE,end_per_suite}},
- {?eh,tc_done,{happy_21_SUITE,end_per_suite,ips_data}},
+ {?eh,tc_done,{happy_21_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -563,7 +563,7 @@ events(Test) when Test == tc111 ; Test == tc211 ->
{?eh,tc_done,{Suite,tc1,ok}},
{?eh,test_stats,{1,0,{0,0}}},
{?eh,tc_start,{Suite,end_per_suite}},
- {?eh,tc_done,{Suite,end_per_suite,ips_data}},
+ {?eh,tc_done,{Suite,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -582,7 +582,7 @@ events(tc111_112) ->
{?eh,tc_done,{happy_11_SUITE,tc2,ok}},
{?eh,test_stats,{2,0,{0,0}}},
{?eh,tc_start,{happy_11_SUITE,end_per_suite}},
- {?eh,tc_done,{happy_11_SUITE,end_per_suite,ips_data}},
+ {?eh,tc_done,{happy_11_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
].
diff --git a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl
index e20832e1e7..07f7bf02e4 100644
--- a/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl
+++ b/lib/common_test/test/ct_snmp_SUITE_data/snmp_SUITE.erl
@@ -117,12 +117,12 @@ break(_Config) ->
start_stop(Config) ->
ok = ct_snmp:start(Config,snmp1,snmp_app1),
- timer:sleep(1000),
+ ct:sleep(1000),
{snmp,_,_} = lists:keyfind(snmp,1,application:which_applications()),
[_|_] = filelib:wildcard("*/*.conf",?config(priv_dir,Config)),
ok = ct_snmp:stop(Config),
- timer:sleep(1000),
+ ct:sleep(1000),
false = lists:keyfind(snmp,1,application:which_applications()),
[] = filelib:wildcard("*/*.conf",?config(priv_dir,Config)),
ok.
diff --git a/lib/common_test/test/ct_telnet_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE.erl
index 84e69c2b54..62cb821ede 100644
--- a/lib/common_test/test/ct_telnet_SUITE.erl
+++ b/lib/common_test/test/ct_telnet_SUITE.erl
@@ -203,7 +203,9 @@ telnet_config(_, LogType) ->
{command_timeout,10000},
{reconnection_attempts,0},
{reconnection_interval,0},
- {keep_alive,true}]} |
+ {keep_alive,true},
+ {poll_limit,10},
+ {poll_interval,1000}]} |
if LogType == legacy ->
[{ct_conn_log,[]}];
true ->
diff --git a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
index 0ddb4e9b00..9dc9095f47 100644
--- a/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
+++ b/lib/common_test/test/ct_telnet_SUITE_data/ct_telnet_own_server_SUITE.erl
@@ -40,10 +40,12 @@ all() ->
expect,
expect_repeat,
expect_sequence,
+ expect_wait_until_prompt,
expect_error_prompt,
expect_error_timeout1,
expect_error_timeout2,
expect_error_timeout3,
+ total_timeout_less_than_idle,
no_prompt_check,
no_prompt_check_repeat,
no_prompt_check_sequence,
@@ -80,6 +82,8 @@ end_per_group(_GroupName, Config) ->
expect(_) ->
{ok, Handle} = ct_telnet:open(telnet_server_conn1),
ok = ct_telnet:send(Handle, "echo ayt"),
+ {ok,["ayt"]} = ct_telnet:expect(Handle, "ayt"),
+ ok = ct_telnet:send(Handle, "echo ayt"),
{ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]),
ok = ct_telnet:close(Handle),
ok.
@@ -102,6 +106,21 @@ expect_sequence(_) ->
ok = ct_telnet:close(Handle),
ok.
+%% Check that expect can wait for delayed prompt
+expect_wait_until_prompt(_) ->
+ {ok, Handle} = ct_telnet:open(telnet_server_conn1),
+ Timeouts = [{idle_timeout,5000},{total_timeout,7000}],
+
+ ok = ct_telnet:send(Handle, "echo_delayed_prompt 3000 xxx"),
+ {ok,["xxx"]} =
+ ct_telnet:expect(Handle, "xxx",
+ [wait_for_prompt|Timeouts]),
+ ok = ct_telnet:send(Handle, "echo_delayed_prompt 3000 yyy zzz"),
+ {ok,[["yyy"],["zzz"]]} =
+ ct_telnet:expect(Handle, ["yyy","zzz"],
+ [{wait_for_prompt,"> "}|Timeouts]),
+ ok.
+
%% Check that expect returns when a prompt is found, even if pattern
%% is not matched.
expect_error_prompt(_) ->
@@ -134,9 +153,32 @@ expect_error_timeout2(_) ->
expect_error_timeout3(_) ->
{ok, Handle} = ct_telnet:open(telnet_server_conn1),
ok = ct_telnet:send(Handle, "echo_loop 5000 xxx"),
+
+ T0 = now(),
{error,timeout} = ct_telnet:expect(Handle, ["yyy"],
[{idle_timeout,infinity},
- {total_timeout,3000}]),
+ {total_timeout,2001}]),
+ Diff = trunc(timer:now_diff(now(),T0)/1000),
+ {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)},
+
+ ok = ct_telnet:send(Handle, "echo ayt"),
+ {ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]),
+ ok = ct_telnet:close(Handle),
+ ok.
+
+%% OTP-12335: If total_timeout < idle_timeout, expect will never timeout
+%% until after idle_timeout, which is incorrect.
+total_timeout_less_than_idle(_) ->
+ {ok, Handle} = ct_telnet:open(telnet_server_conn1),
+ ok = ct_telnet:send(Handle, "echo_no_prompt xxx"),
+
+ T0 = now(),
+ {error,timeout} = ct_telnet:expect(Handle, ["yyy"],
+ [{idle_timeout,5000},
+ {total_timeout,2001}]),
+ Diff = trunc(timer:now_diff(now(),T0)/1000),
+ {_,true} = {Diff, (Diff >= 2000) and (Diff =< 4000)},
+
ok = ct_telnet:send(Handle, "echo ayt"),
{ok,["ayt"]} = ct_telnet:expect(Handle, ["ayt"]),
ok = ct_telnet:close(Handle),
@@ -259,14 +301,14 @@ large_string(_) ->
%% yield the same result as the single request case.
ok = ct_telnet:send(Handle, "echo_sep "++BigString),
- timer:sleep(1000),
+ ct:sleep(1000),
{ok,Data1} = ct_telnet:get_data(Handle),
ct:log("[GET DATA #1] Received ~w chars: ~s",
[length(lists:flatten(Data1)),Data1]),
VerifyStr = [C || C <- lists:flatten(Data1), C/=$ , C/=$\r, C/=$\n, C/=$>],
ok = ct_telnet:send(Handle, "echo_sep "++BigString),
- timer:sleep(50),
+ ct:sleep(50),
{ok,Data2} = ct_telnet:get_data(Handle),
ct:log("[GET DATA #2] Received ~w chars: ~s", [length(lists:flatten(Data2)),Data2]),
VerifyStr = [C || C <- lists:flatten(Data2), C/=$ , C/=$\r, C/=$\n, C/=$>],
@@ -292,7 +334,7 @@ server_speaks(_) ->
"echo_no_prompt This is the second message"),
%% Let ct_telnet_client get an idle timeout. This should print the
%% two messages to the log. Note that the buffers are not flushed here!
- timer:sleep(15000),
+ ct:sleep(15000),
ok = ct_telnet_client:send_data(Backdoor,
"echo_no_prompt This is the third message"),
{ok,_} = ct_telnet:expect(Handle, ["first.*second.*third"],
@@ -302,7 +344,7 @@ server_speaks(_) ->
ok = ct_telnet_client:send_data(Backdoor,
"echo_no_prompt This is the fourth message"),
%% give the server time to respond
- timer:sleep(2000),
+ ct:sleep(2000),
%% closing the connection should print last message in log
ok = ct_telnet:close(Handle),
ok.
@@ -314,11 +356,11 @@ server_disconnects(_) ->
ok = ct_telnet:send(Handle, "disconnect_after 1500"),
%% wait until the get_data operation (triggered by send/2) times out
%% before sending the msg
- timer:sleep(500),
+ ct:sleep(500),
ok = ct_telnet:send(Handle, "echo_no_prompt This is the message"),
%% when the server closes the connection, the last message should be
%% printed in the log
- timer:sleep(3000),
+ ct:sleep(3000),
_ = ct_telnet:close(Handle),
ok.
diff --git a/lib/common_test/test/ct_test_server_if_1_SUITE.erl b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
index b6ef3062d4..214cb60c0d 100644
--- a/lib/common_test/test/ct_test_server_if_1_SUITE.erl
+++ b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
@@ -236,14 +236,13 @@ test_events(ts_if_1) ->
{ts_if_2_SUITE,end_per_suite,
{failed,{error,{suite0_failed,{exited,suite0_goes_boom}}}}}},
-
{?eh,tc_start,{ct_framework,error_in_suite}},
- {?eh,test_stats,{2,6,{4,7}}},
-
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'ts_if_3_SUITE:all/0 is missing'}}}},
{?eh,tc_start,{ct_framework,error_in_suite}},
- {?eh,test_stats,{2,7,{4,7}}},
-
+ {?eh,tc_done,{ct_framework,error_in_suite,
+ {failed,{error,'Bad return value from ts_if_4_SUITE:all/0'}}}},
{?eh,tc_start,{ts_if_5_SUITE,init_per_suite}},
{?eh,tc_done,{ts_if_5_SUITE,init_per_suite,
@@ -252,7 +251,7 @@ test_events(ts_if_1) ->
{?eh,tc_auto_skip,
{ts_if_5_SUITE,my_test_case,
{require_failed_in_suite0,{not_available,undef_variable}}}},
- {?eh,test_stats,{2,7,{4,8}}},
+ {?eh,test_stats,{2,5,{4,8}}},
{?eh,tc_auto_skip,
{ts_if_5_SUITE,end_per_suite,
{require_failed_in_suite0,{not_available,undef_variable}}}},
@@ -264,7 +263,7 @@ test_events(ts_if_1) ->
{?eh,tc_auto_skip,
{ts_if_6_SUITE,tc1,
{failed,{error,{suite0_failed,{exited,suite0_byebye}}}}}},
- {?eh,test_stats,{2,7,{4,9}}},
+ {?eh,test_stats,{2,5,{4,9}}},
{?eh,tc_auto_skip,
{ct_framework,end_per_suite,
{failed,{error,{suite0_failed,{exited,suite0_byebye}}}}}},
@@ -274,13 +273,13 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_done,
{ts_if_7_SUITE,tc1,{auto_skipped,{testcase0_failed,bad_return_value}}}},
- {?eh,test_stats,{2,7,{4,10}}},
+ {?eh,test_stats,{2,5,{4,10}}},
{?eh,tc_done,{ts_if_7_SUITE,
{init_per_group,g1,[]},
{auto_skipped,{group0_failed,bad_return_value}}}},
{?eh,tc_auto_skip,
{ts_if_7_SUITE,{tc2,g1},{group0_failed,bad_return_value}}},
- {?eh,test_stats,{2,7,{4,11}}},
+ {?eh,test_stats,{2,5,{4,11}}},
{?eh,tc_auto_skip,
{ts_if_7_SUITE,{end_per_group,g1},{group0_failed,bad_return_value}}},
@@ -288,7 +287,7 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ts_if_7_SUITE,{init_per_group,g2,[]},ok}},
{?eh,tc_done,{ts_if_7_SUITE,tc2,
{auto_skipped,{testcase0_failed,bad_return_value}}}},
- {?eh,test_stats,{2,7,{4,12}}},
+ {?eh,test_stats,{2,5,{4,12}}},
{?eh,tc_start,{ts_if_7_SUITE,{end_per_group,g2,[]}}},
{?eh,tc_done,{ts_if_7_SUITE,{end_per_group,g2,[]},ok}}],
@@ -300,17 +299,17 @@ test_events(ts_if_1) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_start,{ts_if_8_SUITE,tc1}},
{?eh,tc_done,{ts_if_8_SUITE,tc1,{failed,{error,failed_on_purpose}}}},
- {?eh,test_stats,{2,8,{4,12}}},
+ {?eh,test_stats,{2,6,{4,12}}},
{?eh,tc_start,{ct_framework,end_per_suite}},
{?eh,tc_done,{ct_framework,end_per_suite,ok}},
{?eh,tc_user_skip,{skipped_by_spec_1_SUITE,all,"should be skipped"}},
- {?eh,test_stats,{2,8,{5,12}}},
+ {?eh,test_stats,{2,6,{5,12}}},
{?eh,tc_start,{skipped_by_spec_2_SUITE,init_per_suite}},
{?eh,tc_done,{skipped_by_spec_2_SUITE,init_per_suite,ok}},
{?eh,tc_user_skip,{skipped_by_spec_2_SUITE,tc1,"should be skipped"}},
- {?eh,test_stats,{2,8,{6,12}}},
+ {?eh,test_stats,{2,6,{6,12}}},
{?eh,tc_start,{skipped_by_spec_2_SUITE,end_per_suite}},
{?eh,tc_done,{skipped_by_spec_2_SUITE,end_per_suite,ok}},
diff --git a/lib/common_test/test/ct_test_server_if_1_SUITE_data/test_server_if/test/ts_if_1_SUITE.erl b/lib/common_test/test/ct_test_server_if_1_SUITE_data/test_server_if/test/ts_if_1_SUITE.erl
index 06fa6ac638..d30feb0bd2 100644
--- a/lib/common_test/test/ct_test_server_if_1_SUITE_data/test_server_if/test/ts_if_1_SUITE.erl
+++ b/lib/common_test/test/ct_test_server_if_1_SUITE_data/test_server_if/test/ts_if_1_SUITE.erl
@@ -76,7 +76,7 @@ end_per_group(_GroupName, _Config) ->
%% Reason = term()
%%--------------------------------------------------------------------
init_per_testcase(tc1, Config) ->
- timer:sleep(5000),
+ ct:sleep(5000),
Config;
init_per_testcase(tc8, _Config) ->
{skip,"tc8 skipped"};
@@ -92,7 +92,7 @@ init_per_testcase(_TestCase, Config) ->
%% Config0 = Config1 = [tuple()]
%%--------------------------------------------------------------------
end_per_testcase(tc2, Config) ->
- timer:sleep(5000);
+ ct:sleep(5000);
end_per_testcase(tc12, Config) ->
ct:comment("end_per_testcase(tc12) called!"),
ct:pal("end_per_testcase(tc12) called!", []),
@@ -146,7 +146,7 @@ tc2(_) ->
timeout_in_end_per_testcase.
tc3(_) ->
- timer:sleep(5000).
+ ct:sleep(5000).
tc4(_) ->
exit(failed_on_purpose).
@@ -186,7 +186,7 @@ gtc2(_) ->
tc12(_) ->
F = fun() -> ct:abort_current_testcase('stopping tc12') end,
spawn(F),
- timer:sleep(1000),
+ ct:sleep(1000),
exit(should_have_been_aborted).
tc13(_) ->
diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl
index 746469584d..ffef8187f3 100644
--- a/lib/common_test/test/ct_test_support.erl
+++ b/lib/common_test/test/ct_test_support.erl
@@ -51,12 +51,12 @@ init_per_suite(Config) ->
init_per_suite(Config, 50).
init_per_suite(Config, Level) ->
+ ScaleFactor = test_server:timetrap_scale_factor(),
case os:type() of
{win32, _} ->
%% Extend timeout to 1 hour for windows as starting node
%% can take a long time there
- test_server:timetrap( 60*60*1000 *
- test_server:timetrap_scale_factor());
+ test_server:timetrap( 60*60*1000 * ScaleFactor );
_ ->
ok
end,
@@ -67,6 +67,16 @@ init_per_suite(Config, Level) ->
_ ->
ok
end,
+
+ {Mult,Scale} = test_server_ctrl:get_timetrap_parameters(),
+ test_server:format(Level, "Timetrap multiplier: ~w~n", [Mult]),
+ if Scale == true ->
+ test_server:format(Level, "Timetrap scale factor: ~w~n",
+ [ScaleFactor]);
+ true ->
+ ok
+ end,
+
start_slave(Config, Level).
start_slave(Config, Level) ->
@@ -277,10 +287,13 @@ run_ct_run_test(Opts,Config) ->
Level = proplists:get_value(trace_level, Config),
test_server:format(Level, "~n[RUN #1] Calling ct:run_test(~p) on ~p~n",
[Opts, CTNode]),
- T0 = now(),
+
+ T0 = erlang:monotonic_time(),
CtRunTestResult = rpc:call(CTNode, ct, run_test, [Opts]),
+ T1 = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(T1-T0, native, milli_seconds),
test_server:format(Level, "~n[RUN #1] Got return value ~p after ~p ms~n",
- [CtRunTestResult,trunc(timer:now_diff(now(), T0)/1000)]),
+ [CtRunTestResult,Elapsed]),
case rpc:call(CTNode, erlang, whereis, [ct_util_server]) of
undefined ->
ok;
@@ -303,10 +316,12 @@ run_ct_script_start(Opts, Config) ->
[common_test, run_test_start_opts, Opts1]),
test_server:format(Level, "[RUN #2] Calling ct_run:script_start() on ~p~n",
[CTNode]),
- T0 = now(),
+ T0 = erlang:monotonic_time(),
ExitStatus = rpc:call(CTNode, ct_run, script_start, []),
+ T1 = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(T1-T0, native, milli_seconds),
test_server:format(Level, "[RUN #2] Got exit status value ~p after ~p ms~n",
- [ExitStatus,trunc(timer:now_diff(now(), T0)/1000)]),
+ [ExitStatus,Elapsed]),
ExitStatus.
check_result({_Ok,Failed,{_UserSkipped,_AutoSkipped}},1,_Opts)
@@ -398,7 +413,7 @@ ct_rpc({M,F,A}, Config) ->
%%%-----------------------------------------------------------------
%%% random_error/1
random_error(Config) when is_list(Config) ->
- random:seed(now()),
+ random:seed(os:timestamp()),
Gen = fun(0,_) -> ok; (N,Fun) -> Fun(N-1, Fun) end,
Gen(random:uniform(100), Gen),
@@ -1340,12 +1355,7 @@ delete_old_logs(_, Config) ->
delete_dirs(LogDir) ->
Now = calendar:datetime_to_gregorian_seconds(calendar:local_time()),
- SaveTime = case os:getenv("CT_SAVE_OLD_LOGS") of
- false ->
- 28800;
- SaveTime0 ->
- list_to_integer(SaveTime0)
- end,
+ SaveTime = list_to_integer(os:getenv("CT_SAVE_OLD_LOGS", "28800")),
Deadline = Now - SaveTime,
Dirs = filelib:wildcard(filename:join(LogDir,"ct_run*")),
Dirs2Del =
diff --git a/lib/common_test/test/ct_testspec_1_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE.erl
index c2670316b6..bc19283a47 100644
--- a/lib/common_test/test/ct_testspec_1_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_1_SUITE.erl
@@ -795,7 +795,7 @@ test_events(skip_all_groups) ->
{?eh,test_stats,{0,0,{12,0}}},
{?eh,tc_user_skip,{groups_11_SUITE,{end_per_group,test_group_4},"SKIPPED!"}},
{?eh,tc_start,{groups_11_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
];
@@ -840,7 +840,7 @@ test_events(skip_group) ->
{?eh,test_stats,{2,0,{6,0}}},
{?eh,tc_user_skip,{groups_11_SUITE,{end_per_group,test_group_2},
"SKIPPED!"}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
];
@@ -876,7 +876,7 @@ test_events(skip_group_all_testcases) ->
{?eh,test_stats,{0,0,{4,0}}},
{?eh,tc_user_skip,{groups_11_SUITE,{end_per_group,test_group_1b},
"SKIPPED!"}},
- {?eh,tc_done,{groups_11_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_11_SUITE,end_per_suite,ok}},
{negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
];
@@ -1065,7 +1065,7 @@ test_events(skip_subgroup) ->
{?eh,tc_done,{groups_12_SUITE,{end_per_group,test_group_4,[]},ok}}],
{?eh,tc_start,{groups_12_SUITE,end_per_suite}},
- {?eh,tc_done,{groups_12_SUITE,end_per_suite,init}},
+ {?eh,tc_done,{groups_12_SUITE,end_per_suite,ok}},
{negative,{?eh,tc_start,'_'},{?eh,stop_logging,'_'}}
];
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl
index 69c06f9b83..e5de5df1a8 100644
--- a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_1/groups_12_SUITE.erl
@@ -285,7 +285,7 @@ testcase_5a(Config) ->
%% increase chance the done event will come
%% during execution of subgroup (could be
%% tricky to handle)
- timer:sleep(3),
+ ct:sleep(3),
ok.
testcase_5b() ->
[].
diff --git a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl
index cd517876df..dcd361d658 100644
--- a/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_1_SUITE_data/groups_2/groups_22_SUITE.erl
@@ -278,7 +278,7 @@ testcase_5a(Config) ->
%% increase chance the done event will come
%% during execution of subgroup (could be
%% tricky to handle)
- timer:sleep(3),
+ ct:sleep(3),
ok.
testcase_5b() ->
[].
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
index b8216c3596..cfc6fa93d7 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t11_SUITE.erl
@@ -41,8 +41,12 @@ suite() ->
%% @end
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+
+ TCName = ct:get_config(tcname),
+ CfgFiles = ct:get_config(file,undefined,[all]),
+
%% verify that expected config file can be read
- case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of
+ case {TCName,CfgFiles} of
{start_separate,[cfg11]} -> ok;
{start_join,[cfg11,cfg21]} -> ok;
{incl_separate1,[cfg11]} -> ok;
@@ -56,6 +60,28 @@ init_per_suite(Config) ->
_ -> ok
end,
+
+ %% test the get_testspec_terms functionality
+ if CfgFiles /= undefined ->
+ TSTerms = case ct:get_testspec_terms() of
+ undefined -> exit('testspec should not be undefined');
+ Result -> Result
+ end,
+ true = lists:keymember(config, 1, TSTerms),
+ {config,TSCfgFiles} = ct:get_testspec_terms(config),
+ [{config,TSCfgFiles},{tests,Tests}] =
+ ct:get_testspec_terms([config,tests]),
+ CfgNames = [list_to_atom(filename:basename(TSCfgFile)) ||
+ {Node,TSCfgFile} <- TSCfgFiles, Node == node()],
+ true = (length(CfgNames) == length(CfgFiles)),
+ [true = lists:member(CfgName,CfgFiles) || CfgName <- CfgNames],
+ true = lists:any(fun({{_Node,_Dir},Suites}) ->
+ lists:keymember(?MODULE, 1, Suites)
+ end, Tests);
+ true ->
+ ok
+ end,
+
Config.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
index 7c51aca246..c3faebbd64 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests1/t12_SUITE.erl
@@ -55,7 +55,7 @@ init_per_suite(Config) ->
{incl_both2,[cfg11,cfg12,cfg21]} -> ok;
{incl_both2,[cfg21]} -> ok;
_ -> ok
- end,
+ end,
Config.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
index 36c1b4279b..e189b168c7 100644
--- a/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_3_SUITE_data/tests2/t21_SUITE.erl
@@ -41,8 +41,11 @@ suite() ->
%% @end
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+ TCName = ct:get_config(tcname),
+ CfgFiles = ct:get_config(file,undefined,[all]),
+
%% verify that expected config file can be read
- case {ct:get_config(tcname),ct:get_config(file,undefined,[all])} of
+ case {TCName,CfgFiles} of
{start_separate,[cfg11]} -> ok;
{start_join,[cfg11,cfg21]} -> ok;
{incl_separate1,[cfg11]} -> ok;
@@ -55,6 +58,28 @@ init_per_suite(Config) ->
{incl_both2,[cfg11]} -> ok;
_ -> ok
end,
+
+ %% test the get_testspec_terms functionality
+ if CfgFiles /= undefined ->
+ TSTerms = case ct:get_testspec_terms() of
+ undefined -> exit('testspec should not be undefined');
+ Result -> Result
+ end,
+ true = lists:keymember(config, 1, TSTerms),
+ {config,TSCfgFiles} = ct:get_testspec_terms(config),
+ [{config,TSCfgFiles},{tests,Tests}] =
+ ct:get_testspec_terms([config,tests]),
+ CfgNames = [list_to_atom(filename:basename(TSCfgFile)) ||
+ {Node,TSCfgFile} <- TSCfgFiles, Node == node()],
+ true = (length(CfgNames) == length(CfgFiles)),
+ [true = lists:member(CfgName,CfgFiles) || CfgName <- CfgNames],
+ true = lists:any(fun({{_Node,_Dir},Suites}) ->
+ lists:keymember(?MODULE, 1, Suites)
+ end, Tests);
+ true ->
+ ok
+ end,
+
Config.
%%--------------------------------------------------------------------
diff --git a/lib/common_test/test/telnet_server.erl b/lib/common_test/test/telnet_server.erl
index d25ee62d38..107d98d72c 100644
--- a/lib/common_test/test/telnet_server.erl
+++ b/lib/common_test/test/telnet_server.erl
@@ -59,7 +59,7 @@ init(Opts) ->
accept(State),
ok = gen_tcp:close(LSock),
dbg("telnet_server closed the listen socket ~p\n", [LSock]),
- timer:sleep(1000),
+ ct:sleep(1000),
ok.
listen(0, _Port, _Opts) ->
@@ -68,7 +68,7 @@ listen(Retries, Port, Opts) ->
case gen_tcp:listen(Port, Opts) of
{error,eaddrinuse} ->
dbg("Listen port not released, trying again..."),
- timer:sleep(5000),
+ ct:sleep(5000),
listen(Retries-1, Port, Opts);
Ok = {ok,_LSock} ->
Ok;
@@ -220,7 +220,7 @@ do_handle_data("echo_sep " ++ Data,State) ->
Msgs = string:tokens(Data," "),
lists:foreach(fun(Msg) ->
send(Msg,State),
- timer:sleep(10)
+ ct:sleep(10)
end, Msgs),
send("\r\n> ",State),
{ok,State};
@@ -242,6 +242,12 @@ do_handle_data("echo_loop " ++ Data,State) ->
ReturnData = string:join(Lines,"\n"),
send_loop(list_to_integer(TStr),ReturnData,State),
{ok,State};
+do_handle_data("echo_delayed_prompt "++Data,State) ->
+ [MsStr|EchoData] = string:tokens(Data, " "),
+ send(string:join(EchoData,"\n"),State),
+ ct:sleep(list_to_integer(MsStr)),
+ send("\r\n> ",State),
+ {ok,State};
do_handle_data("disconnect_after " ++WaitStr,State) ->
Wait = list_to_integer(string:strip(WaitStr,right,$\n)),
dbg("Server will close connection in ~w ms...", [Wait]),
@@ -284,15 +290,15 @@ send(Data,State) ->
send_loop(T,Data,State) ->
dbg("Server sending ~p in loop for ~w ms...~n",[Data,T]),
- send_loop(now(),T,Data,State).
+ send_loop(os:timestamp(),T,Data,State).
send_loop(T0,T,Data,State) ->
- ElapsedMS = trunc(timer:now_diff(now(),T0)/1000),
+ ElapsedMS = trunc(timer:now_diff(os:timestamp(),T0)/1000),
if ElapsedMS >= T ->
ok;
true ->
send(Data,State),
- timer:sleep(500),
+ ct:sleep(500),
send_loop(T0,T,Data,State)
end.
@@ -314,7 +320,7 @@ dbg(_F,_A) ->
io:format("[telnet_server, ~s]\n" ++ _F,[TS|_A]).
timestamp() ->
- {MS,S,US} = now(),
+ {MS,S,US} = os:timestamp(),
{{Year,Month,Day}, {Hour,Min,Sec}} =
calendar:now_to_local_time({MS,S,US}),
MilliSec = trunc(US/1000),
diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk
index 849edc15e1..ff2bd20ab3 100644
--- a/lib/common_test/vsn.mk
+++ b/lib/common_test/vsn.mk
@@ -1 +1 @@
-COMMON_TEST_VSN = 1.9
+COMMON_TEST_VSN = 1.11
diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml
index 5fccdcdcb5..a271729c82 100644
--- a/lib/compiler/doc/src/compile.xml
+++ b/lib/compiler/doc/src/compile.xml
@@ -32,15 +32,15 @@
<modulesummary>Erlang Compiler</modulesummary>
<description>
<p>This module provides an interface to the standard Erlang
- compiler. It can generate either a new file which contains
- the object code, or return a binary which can be loaded directly.
+ compiler. It can generate either a new file, which contains
+ the object code, or return a binary, which can be loaded directly.
</p>
</description>
<funcs>
<func>
<name>file(File)</name>
- <fsummary>Compile a file</fsummary>
+ <fsummary>Compiles a file.</fsummary>
<desc>
<p>Is the same as
<c>file(File, [verbose,report_errors,report_warnings])</c>.
@@ -50,7 +50,7 @@
<func>
<name>file(File, Options) -> CompRet</name>
- <fsummary>Compile a file</fsummary>
+ <fsummary>Compiles a file.</fsummary>
<type>
<v>CompRet = ModRet | BinRet | ErrRet</v>
<v>ModRet = {ok,ModuleName} | {ok,ModuleName,Warnings}</v>
@@ -64,39 +64,38 @@
<p>Returns <c>{ok,ModuleName}</c> if successful, or <c>error</c>
if there are errors. An object code file is created if
- the compilation succeeds with no errors. It is considered
+ the compilation succeeds without errors. It is considered
to be an error if the module name in the source code is
not the same as the basename of the output file.</p>
- <p><marker id="type-option"/>Here follows first all elements of <c>Options</c> that in
- some way control the behavior of the compiler.</p>
+ <p><marker id="type-option"/>Available options:</p>
<taglist>
<tag><c>basic_validation</c></tag>
<item>
- <p>This option is fast way to test whether a module will
- compile successfully (mainly useful for code generators
- that want to verify the code they emit). No code will
+ <p>This option is a fast way to test whether a module will
+ compile successfully. This is useful for code generators
+ that want to verify the code that they emit. No code is
generated. If warnings are enabled, warnings generated by
the <c>erl_lint</c> module (such as warnings for unused
- variables and functions) will be returned too.</p>
+ variables and functions) are also returned.</p>
- <p>Use the <c>strong_validation</c> option to generate all
+ <p>Use option <c>strong_validation</c> to generate all
warnings that the compiler would generate.</p>
</item>
<tag><c>strong_validation</c></tag>
<item>
- <p>Similar to the <c>basic_validation</c> option, no code
- will be generated, but more compiler passes will be run
- to ensure also warnings generated by the optimization
- passes are generated (such as clauses that will not match
+ <p>Similar to option <c>basic_validation</c>. No code
+ is generated, but more compiler passes are run
+ to ensure that warnings generated by the optimization
+ passes are generated (such as clauses that will not match,
or expressions that are guaranteed to fail with an
- exception at run-time).</p>
+ exception at runtime).</p>
</item>
<tag><c>binary</c></tag>
<item>
- <p>Causes the compiler to return the object code in a
+ <p>The compiler returns the object code in a
binary instead of creating an object file. If successful,
the compiler returns <c>{ok,ModuleName,Binary}</c>.</p>
</item>
@@ -105,7 +104,9 @@
<item>
<p>The compiler will emit informational warnings about binary
matching optimizations (both successful and unsuccessful).
- See the <em>Efficiency Guide</em> for further information.</p>
+ For more information, see the section about
+ <seealso marker="doc/efficiency_guide:binaryhandling#bin_opt_info">bin_opt_info</seealso>
+ in the Efficiency Guide.</p>
</item>
<tag><c>compressed</c></tag>
@@ -117,20 +118,19 @@
<tag><c>debug_info</c></tag>
<item>
<marker id="debug_info"></marker>
- <p>Include debug information in the form of abstract code
+ <p>Includes debug information in the form of abstract code
(see
<seealso marker="erts:absform">The Abstract Format</seealso>
in ERTS User's Guide) in the compiled beam module. Tools
- such as Debugger, Xref and Cover require the debug
- information to be included.</p>
+ such as <c>Debugger</c>, <c>Xref</c>, and <c>Cover</c> require
+ the debug information to be included.</p>
<p><em>Warning</em>: Source code can be reconstructed from
the debug information. Use encrypted debug information
- (see below) to prevent this.</p>
+ (<c>encrypt_debug_info</c>) to prevent this.</p>
- <p>See
- <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>
- for details.</p>
+ <p>For details, see
+ <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>.</p>
</item>
<tag><c>{debug_info_key,KeyString}</c></tag>
@@ -138,65 +138,61 @@
<tag><c>{debug_info_key,{Mode,KeyString}}</c></tag>
<item>
<marker id="debug_info_key"></marker>
- <p>Include debug information, but encrypt it, so that it
+ <p>Includes debug information, but encrypts it so that it
cannot be accessed without supplying the key. (To give
- the <c>debug_info</c> option as well is allowed, but is
+ option <c>debug_info</c> as well is allowed, but
not necessary.) Using this option is a good way to always
have the debug information available during testing, yet
- protect the source code.</p>
+ protecting the source code.</p>
<p><c>Mode</c> is the type of crypto algorithm to be used
- for encrypting the debug information. The default type --
- and currently the only type -- is <c>des3_cbc</c>.</p>
- <p>See
- <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>
- for details.</p>
+ for encrypting the debug information. The default
+ (and currently the only) type is <c>des3_cbc</c>.</p>
+ <p>For details, see
+ <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>.</p>
</item>
<tag><c>encrypt_debug_info</c></tag>
<item>
<marker id="encrypt_debug_info"></marker>
- <p>Like the <c>debug_info_key</c> option above, except that
- the key will be read from an <c>.erlang.crypt</c> file.
+ <p>Similar to the <c>debug_info_key</c> option, but
+ the key is read from an <c>.erlang.crypt</c> file.
</p>
- <p>See
- <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>
- for details.</p>
+ <p>For details, see
+ <seealso marker="stdlib:beam_lib#debug_info">beam_lib(3)</seealso>.</p>
</item>
<tag><c>makedep</c></tag>
<item>
- <p>Produce a Makefile rule to track headers dependencies.
+ <p>Produces a Makefile rule to track headers dependencies.
No object file is produced.
</p>
<p>By default, this rule is written to
- <c><![CDATA[<File>.Pbeam]]></c>. However, if the option
+ <c><![CDATA[<File>.Pbeam]]></c>. However, if option
<c>binary</c> is set, nothing is written and the rule is
returned in <c>Binary</c>.
</p>
- <p>For instance, if one has the following module:
+ <p>For example, if you have the following module:
</p>
<code>
-module(module).
-include_lib("eunit/include/eunit.hrl").
--include("header.hrl").
- </code>
- <p>Here is the Makefile rule generated by this option:
+-include("header.hrl").</code>
+ <p>The Makefile rule generated by this option looks as follows:
</p>
<code>
module.beam: module.erl \
/usr/local/lib/erlang/lib/eunit/include/eunit.hrl \
- header.hrl
- </code>
+ header.hrl</code>
</item>
<tag><c>{makedep_output, Output}</c></tag>
<item>
- <p>Write generated rule(s) to <c>Output</c> instead of the
+ <p>Writes generated rules to <c>Output</c> instead of the
default <c><![CDATA[<File>.Pbeam]]></c>. <c>Output</c>
can be a filename or an <c>io_device()</c>. To write to
- stdout, use <c>standard_io</c>. However if <c>binary</c>
+ stdout, use <c>standard_io</c>. However, if <c>binary</c>
is set, nothing is written to <c>Output</c> and the
result is returned to the caller with
<c>{ok, ModuleName, Binary}</c>.
@@ -205,7 +201,7 @@ module.beam: module.erl \
<tag><c>{makedep_target, Target}</c></tag>
<item>
- <p>Change the name of the rule emitted to <c>Target</c>.
+ <p>Changes the name of the rule emitted to <c>Target</c>.
</p>
</item>
@@ -217,20 +213,20 @@ module.beam: module.erl \
<tag><c>makedep_add_missing</c></tag>
<item>
- <p>Consider missing headers as generated files and add them to the
+ <p>Considers missing headers as generated files and adds them to the
dependencies.
</p>
</item>
<tag><c>makedep_phony</c></tag>
<item>
- <p>Add a phony target for each dependency.
+ <p>Adds a phony target for each dependency.
</p>
</item>
<tag><c>'P'</c></tag>
<item>
- <p>Produces a listing of the parsed code after preprocessing
+ <p>Produces a listing of the parsed code, after preprocessing
and parse transforms, in the file
<c><![CDATA[<File>.P]]></c>. No object file is produced.
</p>
@@ -238,7 +234,7 @@ module.beam: module.erl \
<tag><c>'E'</c></tag>
<item>
- <p>Produces a listing of the code after all source code
+ <p>Produces a listing of the code, after all source code
transformations have been performed, in the file
<c><![CDATA[<File>.E]]></c>. No object file is produced.
</p>
@@ -258,21 +254,21 @@ module.beam: module.erl \
<tag><c>report</c></tag>
<item>
- <p>This is a short form for both <c>report_errors</c> and
+ <p>A short form for both <c>report_errors</c> and
<c>report_warnings</c>.</p>
</item>
<tag><c>return_errors</c></tag>
<item>
- <p>If this flag is set, then
+ <p>If this flag is set,
<c>{error,ErrorList,WarningList}</c> is returned when
there are errors.</p>
</item>
<tag><c>return_warnings</c></tag>
<item>
- <p>If this flag is set, then an extra field containing
- <c>WarningList</c> is added to the tuples returned on
+ <p>If this flag is set, an extra field, containing
+ <c>WarningList</c>, is added to the tuples returned on
success.</p>
</item>
@@ -284,13 +280,13 @@ module.beam: module.erl \
<tag><c>return</c></tag>
<item>
- <p>This is a short form for both <c>return_errors</c> and
+ <p>A short form for both <c>return_errors</c> and
<c>return_warnings</c>.</p>
</item>
<tag><c>verbose</c></tag>
<item>
- <p>Causes more verbose information from the compiler
+ <p>Causes more verbose information from the compiler,
describing what it is doing.</p>
</item>
@@ -314,7 +310,7 @@ module.beam: module.erl \
<tag><c>{i,Dir}</c></tag>
<item>
- <p>Add <c>Dir</c> to the list of directories to be searched
+ <p>Adds <c>Dir</c> to the list of directories to be searched
when including a file. When encountering an
<c>-include</c> or <c>-include_lib</c> directive,
the compiler searches for header files in the following
@@ -322,14 +318,14 @@ module.beam: module.erl \
<list type="ordered">
<item>
<p><c>"."</c>, the current working directory of
- the file server;</p>
+ the file server</p>
</item>
<item>
- <p>the base name of the compiled file;</p>
+ <p>The base name of the compiled file</p>
</item>
<item>
- <p>the directories specified using the <c>i</c> option.
- The directory specified last is searched first.</p>
+ <p>The directories specified using option <c>i</c>;
+ the directory specified last is searched first</p>
</item>
</list>
</item>
@@ -353,15 +349,15 @@ module.beam: module.erl \
<tag><c>from_asm</c></tag>
<item>
<p>The input file is expected to be assembler code (default
- file suffix ".S"). Note that the format of assembler files
- is not documented, and may change between releases.</p>
+ file suffix ".S"). Notice that the format of assembler files
+ is not documented, and can change between releases.</p>
</item>
<tag><c>from_core</c></tag>
<item>
<p>The input file is expected to be core code (default
- file suffix ".core"). Note that the format of core files
- is not documented, and may change between releases.</p>
+ file suffix ".core"). Notice that the format of core files
+ is not documented, and can change between releases.</p>
</item>
<tag><c>no_strict_record_tests</c></tag>
@@ -369,9 +365,9 @@ module.beam: module.erl \
<p>This option is not recommended.</p>
<p>By default, the generated code for
- the <c>Record#record_tag.field</c> operation verifies that
- the tuple <c>Record</c> is of the correct size for
- the record and that the first element is the tag
+ operation <c>Record#record_tag.field</c> verifies that
+ the tuple <c>Record</c> has the correct size for
+ the record, and that the first element is the tag
<c>record_tag</c>. Use this option to omit
the verification code.</p>
</item>
@@ -390,79 +386,87 @@ module.beam: module.erl \
<tag><c>{no_auto_import,[{F,A}, ...]}</c></tag>
<item>
<p>Makes the function <c>F/A</c> no longer being
- auto-imported from the module <c>erlang</c>, which resolves
- BIF name clashes. This option has to be used to resolve name
- clashes with BIFs auto-imported before R14A, if one wants to
+ auto-imported from the <c>erlang</c> module, which resolves
+ BIF name clashes. This option must be used to resolve name
+ clashes with BIFs auto-imported before R14A, if it is needed to
call the local function with the same name as an
auto-imported BIF without module prefix.</p>
<note>
- <p>From R14A and forward, the compiler resolves calls
+ <p>As from R14A and forward, the compiler resolves calls
without module prefix to local or imported functions before
- trying auto-imported BIFs. If the BIF is to be
+ trying with auto-imported BIFs. If the BIF is to be
called, use the <c>erlang</c> module prefix in the call, not
- <c>{ no_auto_import,[{F,A}, ...]}</c></p>
+ <c>{ no_auto_import,[{F,A}, ...]}</c>.</p>
</note>
<p>If this option is written in the source code, as a
<c>-compile</c> directive, the syntax <c>F/A</c> can be used instead
- of <c>{F,A}</c>. Example:</p>
+ of <c>{F,A}</c>, for example:</p>
<code>-compile({no_auto_import,[error/1]}).</code>
</item>
<tag><c>no_auto_import</c></tag>
<item>
- <p>Do not auto import any functions from the module <c>erlang</c>.</p>
+ <p>Do not auto-import any functions from <c>erlang</c> module.</p>
</item>
<tag><c>no_line_info</c></tag>
<item>
- <p>Omit line number information in order to produce a slightly
+ <p>Omits line number information to produce a slightly
smaller output file.
</p>
</item>
</taglist>
- <p>If warnings are turned on (the <c>report_warnings</c> option
- described above), the following options control what type of
- warnings that will be generated.
+ <p>If warnings are turned on (option <c>report_warnings</c>
+ described earlier), the following options control what type of
+ warnings that are generated.
<marker id="erl_lint_options"></marker>
- With the exception of <c>{warn_format,Verbosity}</c> all
- options below have two forms; one <c>warn_xxx</c> form to
- turn on the warning and one <c>nowarn_xxx</c> form to turn off
- the warning. In the description that follows, the form that
- is used to change the default value is listed.</p>
+ Except from <c>{warn_format,Verbosity}</c>, the following options
+ have two forms:</p>
+ <list type="bulleted">
+ <item>A <c>warn_xxx</c> form, to turn on the warning.</item>
+ <item>A <c>nowarn_xxx</c> form, to turn off the warning.</item>
+ </list>
+ <p>In the descriptions that follow, the form that is used to change
+ the default value are listed.</p>
<taglist>
<tag><c>{warn_format, Verbosity}</c></tag>
<item>
<p>Causes warnings to be emitted for malformed format
strings as arguments to <c>io:format</c> and similar
- functions. <c>Verbosity</c> selects the amount of
- warnings: 0 = no warnings; 1 = warnings for invalid
- format strings and incorrect number of arguments; 2 =
- warnings also when the validity could not be checked
- (for example, when the format string argument is a
- variable). The default verbosity is 1. Verbosity 0 can
- also be selected by the option <c>nowarn_format</c>.</p>
+ functions.</p>
+ <p><c>Verbosity</c> selects the number of warnings:</p>
+ <list type="bulleted">
+ <item><c>0</c> = No warnings</item>
+ <item><c>1</c> = Warnings for invalid format strings and incorrect
+ number of arguments</item>
+ <item><c>2</c> = Warnings also when the validity cannot
+ be checked, for example, when the format string argument is a
+ variable.</item>
+ </list>
+ <p>The default verbosity is <c>1</c>. Verbosity <c>0</c> can
+ also be selected by option <c>nowarn_format</c>.</p>
</item>
<tag><c>nowarn_bif_clash</c></tag>
<item>
- <p>This option is removed, it will generate a fatal error if used.</p>
+ <p>This option is removed, it generates a fatal error if used.</p>
<warning>
- <p>Beginning with R14A, the compiler no longer calls the
+ <p>As from beginning with R14A, the compiler no longer calls the
auto-imported BIF if the name clashes with a local or
- explicitly imported function and a call without explicit
- module name is issued. Instead the local or imported
- function is called. Still accepting <c>nowarn_bif_clash</c> would makes a
- module calling functions clashing with autoimported BIFs
+ explicitly imported function, and a call without explicit
+ module name is issued. Instead, the local or imported
+ function is called. Still accepting <c>nowarn_bif_clash</c> would
+ make a module calling functions clashing with auto-imported BIFs
compile with both the old and new compilers, but with
- completely different semantics, why the option was removed.</p>
+ completely different semantics. This is why the option is removed.</p>
- <p>The use of this option has always been strongly discouraged.
- From OTP R14A and forward it's an error to use it.</p>
+ <p>The use of this option has always been discouraged.
+ As from R14A, it is an error to use it.</p>
<p>To resolve BIF clashes, use explicit module names or the
<c>{no_auto_import,[F/A]}</c> compiler directive.</p>
</warning>
@@ -470,11 +474,11 @@ module.beam: module.erl \
<tag><c>{nowarn_bif_clash, FAs}</c></tag>
<item>
- <p>This option is removed, it will generate a fatal error if used.</p>
+ <p>This option is removed, it generates a fatal error if used.</p>
<warning>
- <p>The use of this option has always been strongly discouraged.
- From OTP R14A and forward it's an error to use it.</p>
+ <p>The use of this option has always been discouraged.
+ As from R14A, it is an error to use it.</p>
<p>To resolve BIF clashes, use explicit module names or the
<c>{no_auto_import,[F/A]}</c> compiler directive.</p>
</warning>
@@ -482,35 +486,29 @@ module.beam: module.erl \
<tag><c>warn_export_all</c></tag>
<item>
- <p>Causes a warning to be emitted if the <c>export_all</c>
- option has also been given.</p>
+ <p>Emits a warning if option <c>export_all</c> is also given.</p>
</item>
<tag><c>warn_export_vars</c></tag>
<item>
- <p>Causes warnings to be emitted for all implicitly
- exported variables referred to after the primitives
- where they were first defined. No warnings for exported
- variables unless they are referred to in some pattern,
- which is the default, can be selected by the option
- <c>nowarn_export_vars</c>.</p>
+ <p>Emits warnings for all implicitly exported variables
+ referred to after the primitives where they were first defined.
+ By default, the compiler only emits warnings for exported
+ variables referred to in a pattern.</p>
</item>
- <tag><c>warn_shadow_vars</c></tag>
+ <tag><c>nowarn_shadow_vars</c></tag>
<item>
- <p>Causes warnings to be emitted for "fresh" variables
- in functional objects or list comprehensions with the same
- name as some already defined variable. The default is to
- warn for such variables. No warnings for shadowed
- variables can be selected by the option
- <c>nowarn_shadow_vars</c>.</p>
+ <p>Turns off warnings for "fresh" variables
+ in functional objects or list comprehensions with the same
+ name as some already defined variable. Default is to
+ emit warnings for such variables.</p>
</item>
<tag><c>nowarn_unused_function</c></tag>
<item>
- <p>Turns off warnings for unused local functions.
- By default (<c>warn_unused_function</c>), warnings are
- emitted for all local functions that are not called
+ <p>Turns off warnings for unused local functions. Default
+ is to emit warnings for all local functions that are not called
directly or indirectly by an exported function.
The compiler does not include unused local functions in
the generated beam file, but the warning is still useful
@@ -519,148 +517,142 @@ module.beam: module.erl \
<tag><c>{nowarn_unused_function, FAs}</c></tag>
<item>
- <p>Turns off warnings for unused local functions as
- <c>nowarn_unused_function</c> but only for the mentioned
+ <p>Turns off warnings for unused local functions like
+ <c>nowarn_unused_function</c> does, but only for the mentioned
local functions. <c>FAs</c> is a tuple <c>{Name,Arity}</c>
or a list of such tuples.</p>
</item>
<tag><c>nowarn_deprecated_function</c></tag>
<item>
- <p>Turns off warnings for calls to deprecated functions. By
- default (<c>warn_deprecated_function</c>), warnings are
- emitted for every call to a function known by the compiler
- to be deprecated. Note that the compiler does not know
- about the <c>-deprecated()</c> attribute but uses an
+ <p>Turns off warnings for calls to deprecated functions. Default
+ is to emit warnings for every call to a function known by the
+ compiler to be deprecated. Notice that the compiler does not know
+ about attribute <c>-deprecated()</c>, but uses an
assembled list of deprecated functions in Erlang/OTP. To
- do a more general check the <c>Xref</c> tool can be used.
+ do a more general check, the <c>Xref</c> tool can be used.
See also
<seealso marker="tools:xref#deprecated_function">xref(3)</seealso>
and the function
- <seealso marker="tools:xref#m/1">xref:m/1</seealso> also
- accessible through
- the <seealso marker="stdlib:c#xm/1">c:xm/1</seealso>
- function.</p>
+ <seealso marker="tools:xref#m/1">xref:m/1</seealso>, also
+ accessible through the function
+ <seealso marker="stdlib:c#xm/1">c:xm/1</seealso>.</p>
</item>
<tag><c>{nowarn_deprecated_function, MFAs}</c></tag>
<item>
- <p>Turns off warnings for calls to deprecated functions as
- <c>nowarn_deprecated_function</c> but only for
+ <p>Turns off warnings for calls to deprecated functions like
+ <c>nowarn_deprecated_function</c> does, but only for
the mentioned functions. <c>MFAs</c> is a tuple
<c>{Module,Name,Arity}</c> or a list of such tuples.</p>
</item>
<tag><c>nowarn_deprecated_type</c></tag>
<item>
- <p>Turns off warnings for uses of deprecated types. By
- default (<c>warn_deprecated_type</c>), warnings are
- emitted for every use of a type known by the compiler
- to be deprecated.</p>
+ <p>Turns off warnings for use of deprecated types. Default
+ is to emit warnings for every use of a type known by the compiler
+ to be deprecated.</p>
</item>
<tag><c>warn_obsolete_guard</c></tag>
<item>
- <p>Causes warnings to be emitted for calls to old type
- testing BIFs such as <c>pid/1</c> and <c>list/1</c>. See
- the
- <seealso marker="doc/reference_manual:expressions#guards">Erlang Reference Manual</seealso>
+ <p>Emits warnings for calls to old type testing BIFs,
+ such as <c>pid/1</c> and <c>list/1</c>. See the
+ <seealso marker="doc/reference_manual:expressions#guards">Erlang Reference Manual</seealso>
for a complete list of type testing BIFs and their old
- equivalents. No warnings for calls to old type testing
- BIFs, which is the default, can be selected by the option
- <c>nowarn_obsolete_guard</c>.</p>
+ equivalents. Default is to emit no warnings for calls to
+ old type testing BIFs.</p>
</item>
<tag><c>warn_unused_import</c></tag>
<item>
- <p>Causes warnings to be emitted for unused imported
- functions. No warnings for unused imported functions,
- which is the default, can be selected by the option
- <c>nowarn_unused_import</c>. </p>
+ <p>Emits warnings for unused imported functions.
+ Default is to emit no warnings for unused imported functions.</p>
</item>
<tag><c>nowarn_unused_vars</c></tag>
<item>
- <p>By default, warnings are emitted for variables which
- are not used, with the exception of variables beginning
- with an underscore ("Prolog style warnings").
+ <p>By default, warnings are emitted for unused variables,
+ except for variables beginning with an underscore
+ ("Prolog style warnings").
Use this option to turn off this kind of warnings.</p>
</item>
<tag><c>nowarn_unused_record</c></tag>
<item>
- <p>Turns off warnings for unused record types. By
- default (<c>warn_unused_records</c>), warnings are
- emitted for unused locally defined record types.</p>
+ <p>Turns off warnings for unused record types. Default is to
+ emit warnings for unused locally defined record types.</p>
</item>
</taglist>
<p>Another class of warnings is generated by the compiler
during optimization and code generation. They warn about
patterns that will never match (such as <c>a=b</c>), guards
- that will always evaluate to false, and expressions that will
+ that always evaluate to false, and expressions that
always fail (such as <c>atom+42</c>).</p>
-
- <p>Note that the compiler does not warn for expressions that it
- does not attempt to optimize. For instance, the compiler tries
- to evaluate <c>1/0</c>, notices that it will cause an
- exception and emits a warning. On the other hand,
- the compiler is silent about the similar expression
- <c>X/0</c>; because of the variable in it, the compiler does
- not even try to evaluate and therefore it emits no warnings.
- </p>
-
- <p>Currently, those warnings cannot be disabled (except by
+ <p>Those warnings cannot be disabled (except by
disabling all warnings).</p>
+ <note>
+ <p>The compiler does not warn for expressions that it
+ does not attempt to optimize. For example, the compiler tries
+ to evaluate <c>1/0</c>, detects that it will cause an
+ exception, and emits a warning. However,
+ the compiler is silent about the similar expression,
+ <c>X/0</c>, because of the variable in it. Thus, the compiler does
+ not even try to evaluate and therefore it emits no warnings.</p>
+ </note>
+
<warning>
- <p>Obviously, the absence of warnings does not mean that
+ <p>The absence of warnings does not mean that
there are no remaining errors in the code.</p>
</warning>
-
- <p>Note that all the options except the include path
- (<c>{i,Dir}</c>) can also be given in the file with a
- <c>-compile([Option,...])</c>. attribute.
- The <c>-compile()</c> attribute is allowed after function
+
+ <note>
+ <p>All options, except the include path
+ (<c>{i,Dir}</c>), can also be given in the file with attribute
+ <c>-compile([Option,...])</c>.
+ Attribute <c>-compile()</c> is allowed after the function
definitions.</p>
-
- <p>Note also that the <c>{nowarn_unused_function, FAs}</c>,
+ </note>
+
+ <note>
+ <p>The options <c>{nowarn_unused_function, FAs}</c>,
<c>{nowarn_bif_clash, FAs}</c>, and
- <c>{nowarn_deprecated_function, MFAs}</c> options are only
+ <c>{nowarn_deprecated_function, MFAs}</c> are only
recognized when given in files. They are not affected by
- the <c>warn_unused_function</c>, <c>warn_bif_clash</c>, or
- <c>warn_deprecated_function</c> options.</p>
+ options <c>warn_unused_function</c>, <c>warn_bif_clash</c>, or
+ <c>warn_deprecated_function</c>.</p>
+ </note>
<p>For debugging of the compiler, or for pure curiosity,
the intermediate code generated by each compiler pass can be
inspected.
- A complete list of the options to produce list files can be
- printed by typing <c>compile:options()</c> at the Erlang
- shell prompt.
- The options will be printed in order that the passes are
+ To print a complete list of the options to produce list files,
+ type <c>compile:options()</c> at the Erlang shell prompt.
+ The options are printed in the order that the passes are
executed. If more than one listing option is used, the one
representing the earliest pass takes effect.</p>
- <p><em>Unrecognized options are ignored.</em></p>
+ <p>Unrecognized options are ignored.</p>
<p>Both <c>WarningList</c> and <c>ErrorList</c> have
the following format:</p>
<code>
-[{FileName,[ErrorInfo]}].
- </code>
-
- <p><c>ErrorInfo</c> is described below. The file name has been
- included here as the compiler uses the Erlang pre-processor
- <c>epp</c>, which allows the code to be included in other
- files. For this reason, it is important to know to
- <em>which</em> file an error or warning line number refers.
+[{FileName,[ErrorInfo]}].</code>
+
+ <p><c>ErrorInfo</c> is described later in this section.
+ The filename is included here, as the compiler uses the
+ Erlang pre-processor <c>epp</c>, which allows the code to be
+ included in other files. It is therefore important to know to
+ <em>which</em> file the line number of an error or a warning refers.
</p>
</desc>
</func>
<func>
<name>forms(Forms)</name>
- <fsummary>Compile a list of forms</fsummary>
+ <fsummary>Compiles a list of forms.</fsummary>
<desc>
<p>Is the same as
<c>forms(File, [verbose,report_errors,report_warnings])</c>.
@@ -670,7 +662,7 @@ module.beam: module.erl \
<func>
<name>forms(Forms, Options) -> CompRet</name>
- <fsummary>Compile a list of forms</fsummary>
+ <fsummary>Compiles a list of forms.</fsummary>
<type>
<v>Forms = [Form]</v>
<v>CompRet = BinRet | ErrRet</v>
@@ -681,48 +673,49 @@ module.beam: module.erl \
<desc>
<p>Analogous to <c>file/1</c>, but takes a list of forms (in
the Erlang abstract format representation) as first argument.
- The option <c>binary</c> is implicit; i.e., no object code
- file is produced. Options that would ordinarily produce a
- listing file, such as 'E', will instead cause the internal
- format for that compiler pass (an Erlang term; usually not a
- binary) to be returned instead of a binary.</p>
+ Option <c>binary</c> is implicit, that is, no object code
+ file is produced. For options that normally produce a listing
+ file, such as 'E', the internal format for that compiler pass
+ (an Erlang term, usually not a binary) is returned instead of
+ a binary.</p>
</desc>
</func>
<func>
<name>format_error(ErrorDescriptor) -> chars()</name>
- <fsummary>Format an error descriptor</fsummary>
+ <fsummary>Formats an error descriptor.</fsummary>
<type>
<v>ErrorDescriptor = errordesc()</v>
</type>
<desc>
<p>Uses an <c>ErrorDescriptor</c> and returns a deep list of
- characters which describes the error. This function is
- usually called implicitly when an <c>ErrorInfo</c> structure
- is processed. See below.</p>
+ characters that describes the error. This function is
+ usually called implicitly when an <c>ErrorInfo</c> structure
+ (described in section
+ <seealso marker="#error_information">Error Information</seealso>) is processed.</p>
</desc>
</func>
<func>
<name>output_generated(Options) -> true | false</name>
- <fsummary>Determine whether the compile will generate an output file</fsummary>
+ <fsummary>Determines whether the compiler generates an output file.</fsummary>
<type>
<v>Options = [term()]</v>
</type>
<desc>
- <p>Determines whether the compiler would generate a <c>beam</c>
+ <p>Determines whether the compiler generates a <c>beam</c>
file with the given options. <c>true</c> means that a <c>beam</c>
- file would be generated; <c>false</c> means that the compiler
- would generate some listing file, return a binary, or merely
- check the syntax of the source code.</p>
+ file is generated. <c>false</c> means that the compiler
+ generates some listing file, returns a binary, or merely
+ checks the syntax of the source code.</p>
</desc>
</func>
<func>
<name>noenv_file(File, Options) -> CompRet</name>
- <fsummary>Compile a file (ignoring ERL_COMPILER_OPTIONS)</fsummary>
+ <fsummary>Compiles a file (ignoring <c>ERL_COMPILER_OPTIONS)</c>.</fsummary>
<desc>
- <p>Works exactly like <seealso marker="#file/2">file/2</seealso>,
+ <p>Works like <seealso marker="#file/2">file/2</seealso>,
except that the environment variable <c>ERL_COMPILER_OPTIONS</c>
is not consulted.</p>
</desc>
@@ -730,9 +723,9 @@ module.beam: module.erl \
<func>
<name>noenv_forms(Forms, Options) -> CompRet</name>
- <fsummary>Compile a list of forms (ignoring ERL_COMPILER_OPTIONS)</fsummary>
+ <fsummary>Compiles a list of forms (ignoring <c>ERL_COMPILER_OPTIONS)</c>.</fsummary>
<desc>
- <p>Works exactly like <seealso marker="#forms/2">forms/2</seealso>,
+ <p>Works like <seealso marker="#forms/2">forms/2</seealso>,
except that the environment variable <c>ERL_COMPILER_OPTIONS</c>
is not consulted.</p>
</desc>
@@ -740,12 +733,13 @@ module.beam: module.erl \
<func>
<name>noenv_output_generated(Options) -> true | false</name>
- <fsummary>Determine whether the compile will generate an output file (ignoring ERL_COMPILER_OPTIONS)</fsummary>
+ <fsummary>Determines whether the compiler generates an output file
+ (ignoring <c>ERL_COMPILER_OPTIONS)</c>.</fsummary>
<type>
<v>Options = [term()]</v>
</type>
<desc>
- <p>Works exactly like
+ <p>Works like
<seealso marker="#output_generated/1">output_generated/1</seealso>,
except that the environment variable <c>ERL_COMPILER_OPTIONS</c>
is not consulted.</p>
@@ -755,14 +749,14 @@ module.beam: module.erl \
</funcs>
<section>
- <title>Default compiler options</title>
+ <title>Default Compiler Options</title>
<p>The (host operating system) environment variable
<c>ERL_COMPILER_OPTIONS</c> can be used to give default compiler
options. Its value must be a valid Erlang term. If the value is a
- list, it will be used as is. If it is not a list, it will be put
+ list, it is used as is. If it is not a list, it is put
into a list.</p>
- <p>The list will be appended to any options given to
+ <p>The list is appended to any options given to
<seealso marker="#file/2">file/2</seealso>,
<seealso marker="#forms/2">forms/2</seealso>, and
<seealso marker="#output_generated/1">output_generated/2</seealso>.
@@ -770,9 +764,9 @@ module.beam: module.erl \
<seealso marker="#noenv_file/2">noenv_file/2</seealso>,
<seealso marker="#noenv_forms/2">noenv_forms/2</seealso>, or
<seealso marker="#noenv_output_generated/1">noenv_output_generated/2</seealso>
- if you don't want the environment variable to be consulted
- (for instance, if you are calling the compiler recursively from
- inside a parse transform).</p>
+ if you do not want the environment variable to be consulted,
+ for example, if you are calling the compiler recursively from
+ inside a parse transform.</p>
</section>
<section>
@@ -781,31 +775,31 @@ module.beam: module.erl \
module. Inlining means that a call to a function is replaced with
the function body with the arguments replaced with the actual
values. The semantics are preserved, except if exceptions are
- generated in the inlined code. Exceptions will be reported as
+ generated in the inlined code. Exceptions are reported as
occurring in the function the body was inlined into. Also,
- <c>function_clause</c> exceptions will be converted to similar
+ <c>function_clause</c> exceptions are converted to similar
<c>case_clause</c> exceptions.</p>
- <p>When a function is inlined, the original function will be
+ <p>When a function is inlined, the original function is
kept if it is exported (either by an explicit export or if the
- <c>export_all</c> option was given) or if not all calls to the
- function were inlined.</p>
+ option <c>export_all</c> was given) or if not all calls to the
+ function are inlined.</p>
<p>Inlining does not necessarily improve running time.
- For instance, inlining may increase Beam stack usage which will
- probably be detrimental to performance for recursive functions.
+ For example, inlining can increase Beam stack use, which
+ probably is detrimental to performance for recursive functions.
</p>
- <p>Inlining is never default; it must be explicitly enabled with a
+ <p>Inlining is never default. It must be explicitly enabled with a
compiler option or a <c>-compile()</c> attribute in the source
module.</p>
- <p>To enable inlining, either use the <c>inline</c> option to
- let the compiler decide which functions to inline or
+ <p>To enable inlining, either use the option <c>inline</c> to
+ let the compiler decide which functions to inline, or
<c>{inline,[{Name,Arity},...]}</c> to have the compiler inline
all calls to the given functions. If the option is given inside
a <c>compile</c> directive in an Erlang module, <c>{Name,Arity}</c>
- may be written as <c>Name/Arity</c>.</p>
+ can be written as <c>Name/Arity</c>.</p>
<p>Example of explicit inlining:</p>
@@ -817,33 +811,30 @@ pi() -> 3.1416.
<p>Example of implicit inlining:</p>
<pre>
--compile(inline).
- </pre>
+-compile(inline).</pre>
- <p>The <c>{inline_size,Size}</c> option controls how large functions
- that are allowed to be inlined. Default is <c>24</c>, which will
- keep the size of the inlined code roughly the same as
- the un-inlined version (only relatively small functions will be
+ <p>The option <c>{inline_size,Size}</c> controls how large functions
+ that are allowed to be inlined. Default is <c>24</c>, which
+ keeps the size of the inlined code roughly the same as
+ the un-inlined version (only relatively small functions are
inlined).</p>
<p>Example:</p>
<pre>
%% Aggressive inlining - will increase code size.
-compile(inline).
--compile({inline_size,100}).
- </pre>
+-compile({inline_size,100}).</pre>
</section>
<section>
- <title>Inlining of list functions</title>
- <p>The compiler can also inline a variety of list manipulation functions
- from the stdlib's lists module.</p>
+ <title>Inlining of List Functions</title>
+ <p>The compiler can also inline various list manipulation functions
+ from the module <c>list</c> in <c>STDLIB</c>.</p>
<p>This feature must be explicitly enabled with a compiler option or a
<c>-compile()</c> attribute in the source module.</p>
- <p>To enable inlining of list functions, use the <c>inline_list_funcs</c>
- option.</p>
+ <p>To enable inlining of list functions, use option <c>inline_list_funcs</c>.</p>
<p>The following functions are inlined:</p>
<list type="bulleted">
@@ -869,24 +860,23 @@ pi() -> 3.1416.
</section>
<section>
+ <marker id="error_information"></marker>
<title>Error Information</title>
- <p>The <c>ErrorInfo</c> mentioned above is the standard
- <c>ErrorInfo</c> structure which is returned from all IO modules.
+ <p>The <c>ErrorInfo</c> mentioned earlier is the standard
+ <c>ErrorInfo</c> structure, which is returned from all I/O modules.
It has the following format:</p>
<code>
-{ErrorLine, Module, ErrorDescriptor}
- </code>
+{ErrorLine, Module, ErrorDescriptor}</code>
- <p><c>ErrorLine</c> will be the atom <c>none</c> if the error does
- not correspond to a specific line (e.g. if the source file does
- not exist).</p>
+ <p><c>ErrorLine</c> is the atom <c>none</c> if the error does
+ not correspond to a specific line, for example, if the source file does
+ not exist.</p>
<p>A string describing the error is obtained with the following
call:</p>
<code>
-Module:format_error(ErrorDescriptor)
- </code>
+Module:format_error(ErrorDescriptor)</code>
</section>
<section>
diff --git a/lib/compiler/doc/src/notes.xml b/lib/compiler/doc/src/notes.xml
index 84ebd2f210..9b5b44f3e1 100644
--- a/lib/compiler/doc/src/notes.xml
+++ b/lib/compiler/doc/src/notes.xml
@@ -31,6 +31,58 @@
<p>This document describes the changes made to the Compiler
application.</p>
+<section><title>Compiler 5.0.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Matching out a map from a record and then updating the
+ record could cause a 'badarg' exception at run-time.
+ (Thanks to Dmitry Aleksandrov for reporting this bug.)</p>
+ <p>
+ Own Id: OTP-12402</p>
+ </item>
+ <item>
+ <p>The compiler would crash when compiling some complex,
+ nonsensical guards such as:</p>
+ <p> ... <c>when {{X}}, -X</c>...</p>
+ <p>
+ Own Id: OTP-12410</p>
+ </item>
+ <item>
+ <p>
+ In rare circumstances, using binary pattern in the value
+ part of a map pattern would cause the compiler to crash.</p>
+ <p>
+ Own Id: OTP-12414</p>
+ </item>
+ <item>
+ <p>Case expressions where a map was wrapped in a tuple or
+ list such as:</p>
+ <p><c>case {a,Map} of</c><br/> <c>{a,#{k:=_}}=Tuple -&gt;
+ Tuple</c><br/> <c>end.</c></p>
+ <p>would be unsafely "optimized" to either cause an
+ exception at run-time or would return an empty map.</p>
+ <p>
+ Own Id: OTP-12451</p>
+ </item>
+ <item>
+ <p>When a variable was compared to a literal map using
+ the '<c>==</c>' operator, the compiler would change the
+ operator to '<c>=:=</c>' since it is more efficient.
+ However, this optimization is not safe if the map literal
+ has numeric keys or values. The compiler will now only do
+ the optimization if all keys and values are
+ non-numeric.</p>
+ <p>
+ Own Id: OTP-12456</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Compiler 5.0.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/compiler/doc/src/ref_man.xml b/lib/compiler/doc/src/ref_man.xml
index 6478ad4b11..6584e79c4e 100644
--- a/lib/compiler/doc/src/ref_man.xml
+++ b/lib/compiler/doc/src/ref_man.xml
@@ -29,7 +29,7 @@
<file>application.sgml</file>
</header>
<description>
- <p>The <em>Compiler</em> application compiles Erlang
+ <p>The <c>Compiler</c> application compiles Erlang
code to byte-code. The highly compact byte-code is executed by
the Erlang emulator.</p>
</description>
diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile
index c6d09d85eb..78efc8dff0 100644
--- a/lib/compiler/src/Makefile
+++ b/lib/compiler/src/Makefile
@@ -70,6 +70,7 @@ MODULES = \
cerl \
cerl_clauses \
cerl_inline \
+ cerl_sets \
cerl_trees \
compile \
core_lib \
@@ -81,6 +82,7 @@ MODULES = \
rec_env \
sys_core_dsetel \
sys_core_fold \
+ sys_core_fold_lists \
sys_core_inline \
sys_pre_attributes \
sys_pre_expand \
@@ -158,6 +160,10 @@ $(EBIN)/beam_asm.beam: $(ESRC)/beam_asm.erl $(EGEN)/beam_opcodes.hrl
$(EBIN)/cerl_inline.beam: $(ESRC)/cerl_inline.erl
$(V_ERLC) $(ERL_COMPILE_FLAGS) +nowarn_shadow_vars -o$(EBIN) $<
+# Inlining core_parse is slow and has no benefit.
+$(EBIN)/core_parse.beam: $(EGEN)/core_parse.erl
+ $(V_ERLC) $(subst +inline,,$(ERL_COMPILE_FLAGS)) -o$(EBIN) $<
+
# ----------------------------------------------------
# Release Target
# ----------------------------------------------------
@@ -187,6 +193,7 @@ $(EBIN)/core_parse.beam: core_parse.hrl $(EGEN)/core_parse.erl
$(EBIN)/core_pp.beam: core_parse.hrl
$(EBIN)/sys_core_dsetel.beam: core_parse.hrl
$(EBIN)/sys_core_fold.beam: core_parse.hrl
+$(EBIN)/sys_core_fold_lists.beam: core_parse.hrl
$(EBIN)/sys_core_inline.beam: core_parse.hrl
$(EBIN)/sys_pre_expand.beam: ../../stdlib/include/erl_bits.hrl
$(EBIN)/v3_codegen.beam: v3_life.hrl
diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl
index fe4f473846..410f598665 100644
--- a/lib/compiler/src/beam_a.erl
+++ b/lib/compiler/src/beam_a.erl
@@ -54,6 +54,9 @@ rename_instrs([{call_only,A,F}|Is]) ->
[{call,A,F},return|rename_instrs(Is)];
rename_instrs([{call_ext_only,A,F}|Is]) ->
[{call_ext,A,F},return|rename_instrs(Is)];
+rename_instrs([{'%live',_}|Is]) ->
+ %% When compiling from old .S files.
+ rename_instrs(Is);
rename_instrs([I|Is]) ->
[rename_instr(I)|rename_instrs(Is)];
rename_instrs([]) -> [].
@@ -88,6 +91,10 @@ rename_instr({bs_private_append=I,F,Sz,U,Src,Flags,Dst}) ->
{bs_init,F,{I,U,Flags},none,[Sz,Src],Dst};
rename_instr(bs_init_writable=I) ->
{bs_init,{f,0},I,1,[{x,0}],{x,0}};
+rename_instr({test,Op,F,[Ctx,Bits,{string,Str}]}) ->
+ %% When compiling from a .S file.
+ <<Bs:Bits/bits,_/bits>> = list_to_binary(Str),
+ {test,Op,F,[Ctx,Bs]};
rename_instr({put_map_assoc,Fail,S,D,R,L}) ->
{put_map,Fail,assoc,S,D,R,L};
rename_instr({put_map_exact,Fail,S,D,R,L}) ->
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index f8cf178d2e..73694b96ce 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -132,10 +132,10 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
LiteralChunk = case beam_dict:literal_table(Dict) of
{0,[]} -> [];
{NumLiterals,LitTab0} ->
- LitTab1 = iolist_to_binary(LitTab0),
- LitTab2 = <<NumLiterals:32,LitTab1/binary>>,
- LitTab = iolist_to_binary(zlib:compress(LitTab2)),
- chunk(<<"LitT">>, <<(byte_size(LitTab2)):32>>, LitTab)
+ LitTab1 = [<<NumLiterals:32>>,LitTab0],
+ LitTab = zlib:compress(LitTab1),
+ chunk(<<"LitT">>, <<(iolist_size(LitTab1)):32>>,
+ LitTab)
end,
%% Create the line chunk.
@@ -431,45 +431,35 @@ encode_alloc_list_1([], Dict, Acc) ->
{iolist_to_binary(Acc),Dict}.
encode(Tag, N) when N < 0 ->
- encode1(Tag, negative_to_bytes(N, []));
+ encode1(Tag, negative_to_bytes(N));
encode(Tag, N) when N < 16 ->
(N bsl 4) bor Tag;
encode(Tag, N) when N < 16#800 ->
[((N bsr 3) band 2#11100000) bor Tag bor 2#00001000, N band 16#ff];
encode(Tag, N) ->
- encode1(Tag, to_bytes(N, [])).
+ encode1(Tag, to_bytes(N)).
encode1(Tag, Bytes) ->
- case length(Bytes) of
+ case iolist_size(Bytes) of
Num when 2 =< Num, Num =< 8 ->
[((Num-2) bsl 5) bor 2#00011000 bor Tag| Bytes];
Num when 8 < Num ->
[2#11111000 bor Tag, encode(?tag_u, Num-9)| Bytes]
end.
-
-to_bytes(N0, Acc) ->
- Bits = 3*128,
- case N0 bsr Bits of
- 0 ->
- to_bytes_1(N0, Acc);
- N ->
- to_bytes(N, binary_to_list(<<N0:Bits>>) ++ Acc)
- end.
-
-to_bytes_1(0, [B|_]=Done) when B < 128 -> Done;
-to_bytes_1(N, Acc) -> to_bytes(N bsr 8, [N band 16#ff|Acc]).
-
-negative_to_bytes(N0, Acc) ->
- Bits = 3*128,
- case N0 bsr Bits of
- -1 ->
- negative_to_bytes_1(N0, Acc);
- N ->
- negative_to_bytes_1(N, binary_to_list(<<N0:Bits>>) ++ Acc)
+to_bytes(N) ->
+ Bin = binary:encode_unsigned(N),
+ case Bin of
+ <<0:1,_/bits>> -> Bin;
+ <<1:1,_/bits>> -> [0,Bin]
end.
-negative_to_bytes_1(-1, [B1,_B2|_]=Done) when B1 > 127 ->
- Done;
-negative_to_bytes_1(N, Acc) ->
- negative_to_bytes_1(N bsr 8, [N band 16#ff|Acc]).
+negative_to_bytes(N) when N >= -16#8000 ->
+ <<N:16>>;
+negative_to_bytes(N) ->
+ Bytes = byte_size(binary:encode_unsigned(-N)),
+ Bin = <<N:Bytes/unit:8>>,
+ case Bin of
+ <<0:1,_/bits>> -> [16#ff,Bin];
+ <<1:1,_/bits>> -> Bin
+ end.
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 5626aa34ab..e2639e9cac 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -61,15 +61,6 @@ blockify(Is) ->
blockify([{loop_rec,{f,Fail},{x,0}},{loop_rec_end,_Lbl},{label,Fail}|Is], Acc) ->
%% Useless instruction sequence.
blockify(Is, Acc);
-
-%% New bit syntax matching.
-blockify([{bs_save2,R,Point}=I,{bs_restore2,R,Point}|Is], Acc) ->
- blockify([I|Is], Acc);
-blockify([{bs_save2,R,Point}=I,{test,is_eq_exact,_,_}=Test,
- {bs_restore2,R,Point}|Is], Acc) ->
- blockify([I,Test|Is], Acc);
-
-%% Do other peep-hole optimizations.
blockify([{test,is_atom,{f,Fail},[Reg]}=I|
[{select,select_val,Reg,{f,Fail},
[{atom,false},{f,_}=BrFalse,
@@ -155,7 +146,7 @@ collect(remove_message) -> {set,[],[],remove_message};
collect({put_map,F,Op,S,D,R,{list,Puts}}) ->
{set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}};
collect({get_map_elements,F,S,{list,Gets}}) ->
- {Ss,Ds} = beam_utils:spliteven(Gets),
+ {Ss,Ds} = beam_utils:split_even(Gets),
{set,Ds,[S|Ss],{get_map_elements,F}};
collect({'catch',R,L}) -> {set,[R],[],{'catch',L}};
collect(fclearerror) -> {set,[],[],fclearerror};
@@ -184,7 +175,7 @@ embed_lines([], Acc) -> Acc.
opt_blocks([{block,Bl0}|Is]) ->
%% The live annotation at the beginning is not useful.
- [{'%live',_}|Bl] = Bl0,
+ [{'%live',_,_}|Bl] = Bl0,
[{block,opt_block(Bl)}|opt_blocks(Is)];
opt_blocks([I|Is]) ->
[I|opt_blocks(Is)];
@@ -252,13 +243,6 @@ combine_alloc({_,Ns,Nh1,Init}, {_,nostack,Nh2,[]}) ->
%% opt([Instruction]) -> [Instruction]
%% Optimize the instruction stream inside a basic block.
-opt([{set,[Dst],As,{bif,Bif,Fail}}=I1,
- {set,[Dst],[Dst],{bif,'not',Fail}}=I2|Is]) ->
- %% Get rid of the 'not' if the operation can be inverted.
- case inverse_comp_op(Bif) of
- none -> [I1,I2|opt(Is)];
- RevBif -> [{set,[Dst],As,{bif,RevBif,Fail}}|opt(Is)]
- end;
opt([{set,[X],[X],move}|Is]) -> opt(Is);
opt([{set,_,_,{line,_}}=Line1,
{set,[D1],[{integer,Idx1},Reg],{bif,element,{f,0}}}=I1,
@@ -269,7 +253,7 @@ opt([{set,_,_,{line,_}}=Line1,
opt([{set,Ds0,Ss,Op}|Is0]) ->
{Ds,Is} = opt_moves(Ds0, Is0),
[{set,Ds,Ss,Op}|opt(Is)];
-opt([{'%live',_}=I|Is]) ->
+opt([{'%live',_,_}=I|Is]) ->
[I|opt(Is)];
opt([]) -> [].
@@ -428,18 +412,6 @@ x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N));
x_live([_|Rs], Regs) -> x_live(Rs, Regs);
x_live([], Regs) -> Regs.
-%% inverse_comp_op(Op) -> none|RevOp
-
-inverse_comp_op('=:=') -> '=/=';
-inverse_comp_op('=/=') -> '=:=';
-inverse_comp_op('==') -> '/=';
-inverse_comp_op('/=') -> '==';
-inverse_comp_op('>') -> '=<';
-inverse_comp_op('<') -> '>=';
-inverse_comp_op('>=') -> '<';
-inverse_comp_op('=<') -> '>';
-inverse_comp_op(_) -> none.
-
%%%
%%% Evaluation of constant bit fields.
%%%
diff --git a/lib/compiler/src/beam_bool.erl b/lib/compiler/src/beam_bool.erl
index 5a4621dc37..5ed9c16d61 100644
--- a/lib/compiler/src/beam_bool.erl
+++ b/lib/compiler/src/beam_bool.erl
@@ -126,44 +126,53 @@ bopt_block(Reg, Fail, OldIs, [{block,Bl0}|Acc0], St0) ->
%% There was a reference to a boolean expression
%% from inside a protected block (try/catch), to
%% a boolean expression outside.
- throw:protected_barrier ->
+ throw:protected_barrier ->
failed;
- %% The 'xor' operator was used. We currently don't
- %% find it worthwile to translate 'xor' operators
- %% (the code would be clumsy).
- throw:'xor' ->
+ %% The 'xor' operator was used. We currently don't
+ %% find it worthwile to translate 'xor' operators
+ %% (the code would be clumsy).
+ throw:'xor' ->
failed;
- %% The block does not contain a boolean expression,
- %% but only a call to a guard BIF.
- %% For instance: ... when element(1, T) ->
- throw:not_boolean_expr ->
+ %% The block does not contain a boolean expression,
+ %% but only a call to a guard BIF.
+ %% For instance: ... when element(1, T) ->
+ throw:not_boolean_expr ->
failed;
- %% The block contains a 'move' instruction that could
- %% not be handled.
- throw:move ->
+ %% The block contains a 'move' instruction that could
+ %% not be handled.
+ throw:move ->
failed;
- %% The optimization is not safe. (A register
- %% used by the instructions following the
- %% optimized code is either not assigned a
- %% value at all or assigned a different value.)
- throw:all_registers_not_killed ->
+ %% The optimization is not safe. (A register
+ %% used by the instructions following the
+ %% optimized code is either not assigned a
+ %% value at all or assigned a different value.)
+ throw:all_registers_not_killed ->
failed;
- throw:registers_used ->
+ throw:registers_used ->
failed;
- %% A protected block refered to the value
- %% returned by another protected block,
- %% probably because the Core Erlang code
- %% used nested try/catches in the guard.
- %% (v3_core never produces nested try/catches
- %% in guards, so it must have been another
- %% Core Erlang translator.)
- throw:protected_violation ->
+ %% A protected block refered to the value
+ %% returned by another protected block,
+ %% probably because the Core Erlang code
+ %% used nested try/catches in the guard.
+ %% (v3_core never produces nested try/catches
+ %% in guards, so it must have been another
+ %% Core Erlang translator.)
+ throw:protected_violation ->
+ failed;
+
+ %% Failed to work out the live registers for a GC
+ %% BIF. For example, if the number of live registers
+ %% needed to be 4 because {x,3} was a source register,
+ %% but {x,2} was not known to be initialized, this
+ %% exception would be thrown.
+ throw:gc_bif_alloc_failure ->
failed
+
end
end.
@@ -665,10 +674,16 @@ put_reg_1(V, [], I) -> [{I,V}].
fetch_reg(V, [{I,V}|_]) -> {x,I};
fetch_reg(V, [_|SRs]) -> fetch_reg(V, SRs).
-live_regs(Regs) ->
- foldl(fun ({I,_}, _) ->
- I
- end, -1, Regs)+1.
+live_regs([{_,reserved}|_]) ->
+ %% We are not sure that this register is initialized, so we must
+ %% abort the optimization.
+ throw(gc_bif_alloc_failure);
+live_regs([{I,_}]) ->
+ I+1;
+live_regs([{_,_}|Regs]) ->
+ live_regs(Regs);
+live_regs([]) ->
+ 0.
%%%
@@ -772,6 +787,9 @@ is_not_used(R, Is, Label, #st{ll=Ll}) ->
initialized_regs(Is) ->
initialized_regs(Is, ordsets:new()).
+initialized_regs([{set,Dst,_Src,{alloc,Live,_}}|_], Regs0) ->
+ Regs = add_init_regs(free_vars_regs(Live), Regs0),
+ add_init_regs(Dst, Regs);
initialized_regs([{set,Dst,Src,_}|Is], Regs) ->
initialized_regs(Is, add_init_regs(Dst, add_init_regs(Src, Regs)));
initialized_regs([{test,_,_,Src}|Is], Regs) ->
diff --git a/lib/compiler/src/beam_bsm.erl b/lib/compiler/src/beam_bsm.erl
index d54c2a9fde..ee3e88959d 100644
--- a/lib/compiler/src/beam_bsm.erl
+++ b/lib/compiler/src/beam_bsm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -20,7 +20,7 @@
-module(beam_bsm).
-export([module/2,format_error/1]).
--import(lists, [member/2,foldl/3,reverse/1,sort/1,all/2,dropwhile/2]).
+-import(lists, [member/2,foldl/3,reverse/1,sort/1,all/2]).
%%%
%%% We optimize bit syntax matching where the tail end of a binary is
@@ -242,6 +242,12 @@ btb_reaches_match_2([{bif,_,{f,F},Ss,Dst}=I|Is], Regs0, D0) ->
Regs = btb_kill([Dst], Regs0),
D = btb_follow_branch(F, Regs, D0),
btb_reaches_match_1(Is, Regs, D);
+btb_reaches_match_2([{get_map_elements,{f,F},Src,{list,Ls}}=I|Is], Regs0, D0) ->
+ {Ss,Ds} = beam_utils:split_even(Ls),
+ btb_ensure_not_used([Src|Ss], I, Regs0),
+ Regs = btb_kill(Ds, Regs0),
+ D = btb_follow_branch(F, Regs, D0),
+ btb_reaches_match_1(Is, Regs, D);
btb_reaches_match_2([{test,bs_start_match2,{f,F},Live,[Ctx,_],Ctx}=I|Is],
Regs0, D0) ->
CtxRegs = btb_context_regs(Regs0),
@@ -542,16 +548,13 @@ btb_context_regs_1(Regs, N, Tag, Acc) ->
%% a binary. MustSave is true if the function may pass the match
%% context to the bs_context_to_binary instruction (in which case
%% the current position in the binary must have saved into the
-%% start position using "bs_save_2 Ctx start".
+%% start position using "bs_save_2 Ctx start").
btb_index(Fs) ->
btb_index_1(Fs, []).
btb_index_1([{function,_,_,Entry,Is0}|Fs], Acc0) ->
- [{label,Entry}|Is] =
- dropwhile(fun({label,L}) when L =:= Entry -> false;
- (_) -> true
- end, Is0),
+ Is = drop_to_label(Is0, Entry),
Acc = btb_index_2(Is, Entry, false, Acc0),
btb_index_1(Fs, Acc);
btb_index_1([], Acc) -> gb_trees:from_orddict(sort(Acc)).
@@ -566,6 +569,9 @@ btb_index_2(Is0, Entry, _, Acc) ->
throw:none -> Acc
end.
+drop_to_label([{label,L}|Is], L) -> Is;
+drop_to_label([_|Is], L) -> drop_to_label(Is, L).
+
btb_index_find_start_match([{test,_,{f,F},_},{bs_context_to_binary,_}|Is]) ->
btb_index_find_label(Is, F);
btb_index_find_start_match(_) ->
@@ -615,7 +621,7 @@ collect_warnings_instr([_|Is], D, Acc) ->
collect_warnings_instr([], _, Acc) -> Acc.
add_warning(Term, Anno, Ws) ->
- Line = abs(get_line(Anno)),
+ Line = get_line(Anno),
File = get_file(Anno),
[{File,[{Line,?MODULE,Term}]}|Ws].
diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl
index b653998252..1d26993103 100644
--- a/lib/compiler/src/beam_clean.erl
+++ b/lib/compiler/src/beam_clean.erl
@@ -184,14 +184,6 @@ function_replace([{function,Name,Arity,Entry,Asm0}|Fs], Dict, Acc) ->
function_replace(Fs, Dict, [{function,Name,Arity,Entry,Asm}|Acc]);
function_replace([], _, Acc) -> Acc.
-replace([{test,bs_match_string=Op,{f,Lbl},[Ctx,Bin0]}|Is], Acc, D) ->
- Bits = bit_size(Bin0),
- Bin = case Bits rem 8 of
- 0 -> Bin0;
- Rem -> <<Bin0/bitstring,0:(8-Rem)>>
- end,
- I = {test,Op,{f,label(Lbl, D)},[Ctx,Bits,{string,binary_to_list(Bin)}]},
- replace(Is, [I|Acc], D);
replace([{test,Test,{f,Lbl},Ops}|Is], Acc, D) ->
replace(Is, [{test,Test,{f,label(Lbl, D)},Ops}|Acc], D);
replace([{test,Test,{f,Lbl},Live,Ops,Dst}|Is], Acc, D) ->
@@ -234,31 +226,6 @@ replace([{bs_init,{f,Lbl},Info,Live,Ss,Dst}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_init,{f,label(Lbl, D)},Info,Live,Ss,Dst}|Acc], D);
replace([{bs_put,{f,Lbl},Info,Ss}|Is], Acc, D) when Lbl =/= 0 ->
replace(Is, [{bs_put,{f,label(Lbl, D)},Info,Ss}|Acc], D);
-replace([{bs_init2,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init2,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_init_bits,{f,Lbl},Sz,Words,R,F,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_init_bits,{f,label(Lbl, D)},Sz,Words,R,F,Dst}|Acc], D);
-replace([{bs_put_integer,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_integer,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_utf8=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf16=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_utf32=I,{f,Lbl},Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Fl,Val}|Acc], D);
-replace([{bs_put_binary,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_binary,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_put_float,{f,Lbl},Bits,Unit,Fl,Val}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_put_float,{f,label(Lbl, D)},Bits,Unit,Fl,Val}|Acc], D);
-replace([{bs_add,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{bs_add,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_append,{f,Lbl},_,_,_,_,_,_,_}=I0|Is], Acc, D) when Lbl =/= 0 ->
- I = setelement(2, I0, {f,label(Lbl, D)}),
- replace(Is, [I|Acc], D);
-replace([{bs_utf8_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
-replace([{bs_utf16_size=I,{f,Lbl},Src,Dst}|Is], Acc, D) when Lbl =/= 0 ->
- replace(Is, [{I,{f,label(Lbl, D)},Src,Dst}|Acc], D);
replace([{put_map=I,{f,Lbl},Op,Src,Dst,Live,List}|Is], Acc, D)
when Lbl =/= 0 ->
replace(Is, [{I,{f,label(Lbl, D)},Op,Src,Dst,Live,List}|Acc], D);
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index b15adfa889..bbe607cf19 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -21,112 +21,10 @@
-export([module/2]).
-%%% The following optimisations are done:
-%%%
-%%% (1) In this code
-%%%
-%%% move DeadValue {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% the first assignment to {x,0} has no effect (is dead),
-%%% so it can be removed. Besides removing a move instruction,
-%%% if the move was preceeded by a label, the resulting code
-%%% will look this
-%%%
-%%% L1: jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: move Anything {x,0}
-%%% .
-%%% .
-%%% .
-%%%
-%%% which can be further optimized by the jump optimizer (beam_jump).
-%%%
-%%% (2) In this code
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L2
-%%% .
-%%% .
-%%% .
-%%% L2: test is_atom FailLabel {x,0}
-%%% select_val {x,0}, FailLabel [... AtomLiteral => L3...]
-%%% .
-%%% .
-%%% .
-%%% L3: ...
-%%%
-%%% FailLabel: ...
-%%%
-%%% the first code fragment can be changed to
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump L3
-%%%
-%%% If the literal is not included in the table of literals in the
-%%% select_val instruction, the first code fragment will instead be
-%%% rewritten as:
-%%%
-%%% L1: move AtomLiteral {x,0}
-%%% jump FailLabel
-%%%
-%%% The move instruction will be removed by optimization (1) above,
-%%% if the code following the L3 label overwrites {x,0}.
-%%%
-%%% The code following the L2 label will be kept, but it will be removed later
-%%% by the jump optimizer.
-%%%
-%%% (3) In this code
-%%%
-%%% test is_eq_exact ALabel Src Dst
-%%% move Src Dst
-%%%
-%%% the move instruction can be removed.
-%%% Same thing for
-%%%
-%%% test is_nil ALabel Dst
-%%% move [] Dst
-%%%
-%%%
-%%% (4) In this code
-%%%
-%%% select_val {x,Reg}, ALabel [... Literal => L1...]
-%%% .
-%%% .
-%%% .
-%%% L1: move Literal {x,Reg}
-%%%
-%%% we can remove the move instruction.
-%%%
-%%% (5) In the following code
-%%%
-%%% bif '=:=' Fail Src1 Src2 {x,0}
-%%% jump L1
-%%% .
-%%% .
-%%% .
-%%% L1: select_val {x,0}, ALabel [... true => L2..., ...false => L3...]
-%%% .
-%%% .
-%%% .
-%%% L2: .... L3: ....
-%%%
-%%% the first two instructions can be replaced with
-%%%
-%%% test is_eq_exact L3 Src1 Src2
-%%% jump L2
-%%%
-%%% provided that {x,0} is killed at both L2 and L3.
-%%%
+%%% Dead code is code that is executed but has no effect. This
+%%% optimization pass either removes dead code or jumps around it,
+%%% potentially making it unreachable and a target for the
+%%% the beam_jump pass.
-import(lists, [mapfoldl/3,reverse/1]).
@@ -173,12 +71,39 @@ move_move_into_block([I|Is], Acc) ->
move_move_into_block([], Acc) -> reverse(Acc).
%%%
-%%% Scan instructions in execution order and remove dead code.
+%%% Scan instructions in execution order and remove redundant 'move'
+%%% instructions. 'move' instructions are redundant if we know that
+%%% the register already contains the value being assigned, as in the
+%%% following code:
+%%%
+%%% test is_eq_exact SomeLabel Src Dst
+%%% move Src Dst
+%%%
+%%% or in:
+%%%
+%%% test is_nil SomeLabel Dst
+%%% move nil Dst
+%%%
+%%% or in:
+%%%
+%%% select_val Register FailLabel [... Literal => L1...]
+%%% .
+%%% .
+%%% .
+%%% L1: move Literal Register
+%%%
+%%% Also add extra labels to help the second backward pass.
%%%
forward(Is, Lc) ->
- forward(Is, gb_trees:empty(), Lc, []).
-
+ forward(Is, #{}, Lc, []).
+
+forward([{move,_,_}=Move|[{label,L}|_]=Is], D, Lc, Acc) ->
+ %% move/2 followed by jump/1 is optimized by backward/3.
+ forward([Move,{jump,{f,L}}|Is], D, Lc, Acc);
+forward([{bif,_,_,_,_}=Bif|[{label,L}|_]=Is], D, Lc, Acc) ->
+ %% bif/4 followed by jump/1 is optimized by backward/3.
+ forward([Bif,{jump,{f,L}}|Is], D, Lc, Acc);
forward([{block,[]}|Is], D, Lc, Acc) ->
%% Empty blocks can prevent optimizations.
forward(Is, D, Lc, Acc);
@@ -190,21 +115,24 @@ forward([{label,Lbl}=LblI,{block,[{set,[Dst],[Lit],move}|BlkIs]}=Blk|Is], D, Lc,
%% cannot be reached in any other way than through the select_val/3
%% instruction (i.e. there can be no fallthrough to such label and
%% it cannot be referenced by, for example, a jump/1 instruction).
- Block = case gb_trees:lookup({Lbl,Dst}, D) of
- {value,Lit} -> {block,BlkIs}; %Safe to remove move instruction.
- _ -> Blk %Must keep move instruction.
- end,
+ Key = {Lbl,Dst},
+ Block = case D of
+ #{Key := Lit} -> {block,BlkIs}; %Safe to remove move instruction.
+ _ -> Blk %Must keep move instruction.
+ end,
forward([Block|Is], D, Lc, [LblI|Acc]);
forward([{label,Lbl}=LblI|[{move,Lit,Dst}|Is1]=Is0], D, Lc, Acc) ->
%% Assumption: The target labels in a select_val/3 instruction
%% cannot be reached in any other way than through the select_val/3
%% instruction (i.e. there can be no fallthrough to such label and
%% it cannot be referenced by, for example, a jump/1 instruction).
- Is = case gb_trees:lookup({Lbl,Dst}, D) of
- {value,Lit} -> Is1; %Safe to remove move instruction.
- _ -> Is0 %Keep move instruction.
+ Is = case maps:find({Lbl,Dst}, D) of
+ {ok,Lit} -> Is1; %Safe to remove move instruction.
+ _ -> Is0 %Keep move instruction.
end,
forward(Is, D, Lc, [LblI|Acc]);
+forward([{test,is_eq_exact,_,[Same,Same]}|Is], D, Lc, Acc) ->
+ forward(Is, D, Lc, Acc);
forward([{test,is_eq_exact,_,[Dst,Src]}=I,
{block,[{set,[Dst],[Src],move}|Bl]}|Is], D, Lc, Acc) ->
forward([I,{block,Bl}|Is], D, Lc, Acc);
@@ -215,15 +143,13 @@ forward([{test,is_eq_exact,_,[Dst,Src]}=I,{move,Src,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
forward([{test,is_nil,_,[Dst]}=I,{move,nil,Dst}|Is], D, Lc, Acc) ->
forward([I|Is], D, Lc, Acc);
-forward([{test,is_eq_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
- end;
-forward([{test,is_ne_exact,_,_}=I|Is], D, Lc, Acc) ->
- case Is of
- [{label,_}|_] -> forward(Is, D, Lc, [I|Acc]);
- _ -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
+forward([{test,_,_,_}=I|Is]=Is0, D, Lc, Acc) ->
+ %% Help the second, backward pass to by inserting labels after
+ %% relational operators so that they can be skipped if they are
+ %% known to be true.
+ case useful_to_insert_label(Is0) of
+ false -> forward(Is, D, Lc, [I|Acc]);
+ true -> forward(Is, D, Lc+1, [{label,Lc},I|Acc])
end;
forward([I|Is], D, Lc, Acc) ->
forward(Is, D, Lc, [I|Acc]);
@@ -231,17 +157,57 @@ forward([], _, Lc, Acc) -> {Acc,Lc}.
update_value_dict([Lit,{f,Lbl}|T], Reg, D0) ->
Key = {Lbl,Reg},
- D = case gb_trees:lookup(Key, D0) of
- none -> gb_trees:insert(Key, Lit, D0); %New.
- {value,inconsistent} -> D0; %Inconsistent.
- {value,_} -> gb_trees:update(Key, inconsistent, D0)
- end,
+ D = case D0 of
+ #{Key := inconsistent} -> D0;
+ #{Key := _} -> D0#{Key := inconsistent};
+ _ -> D0#{Key => Lit}
+ end,
update_value_dict(T, Reg, D);
update_value_dict([], _, D) -> D.
+useful_to_insert_label([_,{label,_}|_]) ->
+ false;
+useful_to_insert_label([{test,Op,_,_}|_]) ->
+ case Op of
+ is_lt -> true;
+ is_ge -> true;
+ is_eq_exact -> true;
+ is_ne_exact -> true;
+ _ -> false
+ end.
+
+%%%
+%%% Scan instructions in reverse execution order and try to
+%%% shortcut branch instructions.
+%%%
+%%% For example, in this code:
%%%
-%%% Scan instructions in reverse execution order and remove dead code.
+%%% move Literal Register
+%%% jump L1
+%%% .
+%%% .
+%%% .
+%%% L1: test is_{integer,atom} FailLabel Register
+%%% select_val {x,0} FailLabel [... Literal => L2...]
+%%% .
+%%% .
+%%% .
+%%% L2: ...
%%%
+%%% the 'selectval' instruction will always transfer control to L2,
+%%% so we can just as well jump to L2 directly by rewriting the
+%%% first part of the sequence like this:
+%%%
+%%% move Literal Register
+%%% jump L2
+%%%
+%%% If register Register is killed at label L2, we can remove the
+%%% 'move' instruction, leaving just the 'jump' instruction:
+%%%
+%%% jump L2
+%%%
+%%% These transformations may leave parts of the code unreachable.
+%%% The beam_jump pass will remove the unreachable code.
backward(Is, D) ->
backward(Is, D, []).
@@ -277,15 +243,8 @@ backward([{select,select_val,Reg,{f,Fail0},List0}|Is], D, Acc) ->
Fail = shortcut_bs_test(Fail1, Is, D),
Sel = {select,select_val,Reg,{f,Fail},List},
backward(Is, D, [Sel|Acc]);
-backward([{jump,{f,To0}},{move,Src,Reg}=Move0|Is], D, Acc) ->
- {To,Move} = case Src of
- {atom,Val0} ->
- To1 = shortcut_select_label(To0, Reg, Val0, D),
- {To2,Val} = shortcut_boolean_label(To1, Reg, Val0, D),
- {To2,{move,{atom,Val},Reg}};
- _ ->
- {shortcut_label(To0, D),Move0}
- end,
+backward([{jump,{f,To0}},{move,Src,Reg}=Move|Is], D, Acc) ->
+ To = shortcut_select_label(To0, Reg, Src, D),
Jump = {jump,{f,To}},
case beam_utils:is_killed_at(Reg, To, D) of
false -> backward([Move|Is], D, [Jump|Acc]);
@@ -297,32 +256,39 @@ backward([{jump,{f,To}}=J|[{bif,Op,_,Ops,Reg}|Is]=Is0], D, Acc) ->
catch
throw:not_possible -> backward(Is0, D, [J|Acc])
end;
+backward([{test,bs_start_match2,F,_,[R,_],Ctxt}=I|Is], D,
+ [{test,bs_match_string,F,[Ctxt,Bs]},
+ {test,bs_test_tail2,F,[Ctxt,0]}|Acc0]=Acc) ->
+ case beam_utils:is_killed(Ctxt, Acc0, D) of
+ true ->
+ Eq = {test,is_eq_exact,F,[R,{literal,Bs}]},
+ backward(Is, D, [Eq|Acc0]);
+ false ->
+ backward(Is, D, [I|Acc])
+ end;
backward([{test,bs_start_match2,{f,To0},Live,[Src|_]=Info,Dst}|Is], D, Acc) ->
To = shortcut_bs_start_match(To0, Src, D),
I = {test,bs_start_match2,{f,To},Live,Info,Dst},
backward(Is, D, [I|Acc]);
-backward([{test,is_eq_exact,{f,To0},[Reg,{atom,Val}]=Ops}|Is], D, Acc) ->
- To1 = shortcut_bs_test(To0, Is, D),
- To = shortcut_fail_label(To1, Reg, Val, D),
- I = combine_eqs(To, Ops, D, Acc),
- backward(Is, D, [I|Acc]);
backward([{test,Op,{f,To0},Ops0}|Is], D, Acc) ->
To1 = shortcut_bs_test(To0, Is, D),
To2 = shortcut_label(To1, D),
+ To3 = shortcut_rel_op(To2, Op, Ops0, D),
+
%% Try to shortcut a repeated test:
%%
%% test Op {f,Fail1} Operands test Op {f,Fail2} Operands
%% . . . ==> ...
%% Fail1: test Op {f,Fail2} Operands Fail1: test Op {f,Fail2} Operands
%%
- To = case beam_utils:code_at(To2, D) of
- [{test,Op,{f,To3},Ops}|_] ->
+ To = case beam_utils:code_at(To3, D) of
+ [{test,Op,{f,To4},Ops}|_] ->
case equal_ops(Ops0, Ops) of
- true -> To3;
- false -> To2
+ true -> To4;
+ false -> To3
end;
_Code ->
- To2
+ To3
end,
I = case Op of
is_eq_exact -> combine_eqs(To, Ops0, D, Acc);
@@ -367,8 +333,8 @@ equal_ops([Op|T0], [Op|T1]) ->
equal_ops([], []) -> true;
equal_ops(_, _) -> false.
-shortcut_select_list([{_,Val}=Lit,{f,To0}|T], Reg, D, Acc) ->
- To = shortcut_select_label(To0, Reg, Val, D),
+shortcut_select_list([Lit,{f,To0}|T], Reg, D, Acc) ->
+ To = shortcut_select_label(To0, Reg, Lit, D),
shortcut_select_list(T, Reg, D, [{f,To},Lit|Acc]);
shortcut_select_list([], _, _, Acc) -> reverse(Acc).
@@ -378,58 +344,29 @@ shortcut_label(To0, D) ->
_ -> To0
end.
-shortcut_select_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_atom,_,[Reg]},{select,select_val,Reg,{f,Fail},Map}|_] ->
- To = find_select_val(Map, Val, Fail),
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,_},[Reg,{atom,Val}]},{jump,{f,To}}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,AnotherVal}]}|_]
- when is_atom(Val), Val =/= AnotherVal ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_ne_exact,{f,_},[Reg,{atom,_}]},{label,To}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- [{test,is_tuple,{f,To},[Reg]}|_] when is_atom(Val) ->
- shortcut_select_label(To, Reg, Val, D);
- _ ->
- To0
- end.
-
-shortcut_fail_label(To0, Reg, Val, D) ->
- case beam_utils:code_at(To0, D) of
- [{jump,{f,To}}|_] ->
- shortcut_fail_label(To, Reg, Val, D);
- [{test,is_eq_exact,{f,To},[Reg,{atom,Val}]}|_] when is_atom(Val) ->
- shortcut_fail_label(To, Reg, Val, D);
- _ ->
- To0
- end.
+shortcut_select_label(To, Reg, Lit, D) ->
+ shortcut_rel_op(To, is_ne_exact, [Reg,Lit], D).
-shortcut_boolean_label(To0, Reg, Bool0, D) when is_boolean(Bool0) ->
- case beam_utils:code_at(To0, D) of
- [{line,_},{bif,'not',_,[Reg],Reg},{jump,{f,To}}|_] ->
- Bool = not Bool0,
- {shortcut_select_label(To, Reg, Bool, D),Bool};
- _ ->
- {To0,Bool0}
- end;
-shortcut_boolean_label(To, _, Bool, _) -> {To,Bool}.
-
-find_select_val([{_,Val},{f,To}|_], Val, _) -> To;
-find_select_val([{_,_}, {f,_}|T], Val, Fail) ->
- find_select_val(T, Val, Fail);
-find_select_val([], _, Fail) -> Fail.
+%% Replace a comparison operator with a test instruction and a jump.
+%% For example, if we have this code:
+%%
+%% bif '=:=' Fail Src1 Src2 {x,0}
+%% jump L1
+%% .
+%% .
+%% .
+%% L1: select_val {x,0} FailLabel [... true => L2..., ...false => L3...]
+%%
+%% the first two instructions can be replaced with
+%%
+%% test is_eq_exact L3 Src1 Src2
+%% jump L2
+%%
+%% provided that {x,0} is killed at both L2 and L3.
replace_comp_op(To, Reg, Op, Ops, D) ->
- False = comp_op_find_shortcut(To, Reg, false, D),
- True = comp_op_find_shortcut(To, Reg, true, D),
+ False = comp_op_find_shortcut(To, Reg, {atom,false}, D),
+ True = comp_op_find_shortcut(To, Reg, {atom,true}, D),
[bif_to_test(Op, Ops, False),{jump,{f,True}}].
comp_op_find_shortcut(To0, Reg, Val, D) ->
@@ -461,9 +398,9 @@ not_possible() -> throw(not_possible).
%%
%% is_eq_exact F1 Reg Lit1 select_val Reg F2 [ Lit1 L1
%% L1: . Lit2 L2 ]
-%% .
-%% . ==>
-%% .
+%% .
+%% . ==>
+%% .
%% F1: is_eq_exact F2 Reg Lit2 F1: is_eq_exact F2 Reg Lit2
%% L2: .... L2:
%%
@@ -488,31 +425,26 @@ remove_from_list(Lit, [Val,{f,_}=Fail|T]) ->
[Val,Fail|remove_from_list(Lit, T)];
remove_from_list(_, []) -> [].
-%% shortcut_bs_test(TargetLabel, [Instruction], D) -> TargetLabel'
-%% Try to shortcut the failure label for a bit syntax matching.
-%% We know that the binary contains at least Bits bits after
-%% the latest save point.
+%% shortcut_bs_test(TargetLabel, ReversedInstructions, D) -> TargetLabel'
+%% Try to shortcut the failure label for bit syntax matching.
shortcut_bs_test(To, Is, D) ->
shortcut_bs_test_1(beam_utils:code_at(To, D), Is, To, D).
-shortcut_bs_test_1([{bs_restore2,Reg,SavePoint}|Is], PrevIs, To, D) ->
- shortcut_bs_test_2(Is, {Reg,SavePoint}, PrevIs, To, D);
-shortcut_bs_test_1([_|_], _, To, _) -> To.
-
-shortcut_bs_test_2([{label,_}|Is], Save, PrevIs, To, D) ->
- shortcut_bs_test_2(Is, Save, PrevIs, To, D);
-shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
- {Reg,_Point} = RP, PrevIs, To0, D) ->
- case count_bits_matched(PrevIs, RP, 0) of
+shortcut_bs_test_1([{bs_restore2,Reg,SavePoint},
+ {label,_},
+ {test,bs_test_tail2,{f,To},[_,TailBits]}|_],
+ PrevIs, To0, D) ->
+ case count_bits_matched(PrevIs, {Reg,SavePoint}, 0) of
Bits when Bits > TailBits ->
%% This instruction will fail. We know because a restore has been
- %% done from the previous point SavePoint in the binary, and we also know
- %% that the binary contains at least Bits bits from SavePoint.
+ %% done from the previous point SavePoint in the binary, and we
+ %% also know that the binary contains at least Bits bits from
+ %% SavePoint.
%%
%% Since we will skip a bs_restore2 if we shortcut to label To,
- %% we must now make sure that code at To does not depend on the position
- %% in the context in any way.
+ %% we must now make sure that code at To does not depend on
+ %% the position in the context in any way.
case shortcut_bs_pos_used(To, Reg, D) of
false -> To;
true -> To0
@@ -520,15 +452,26 @@ shortcut_bs_test_2([{test,bs_test_tail2,{f,To},[_,TailBits]}|_],
_Bits ->
To0
end;
-shortcut_bs_test_2([_|_], _, _, To, _) -> To.
+shortcut_bs_test_1([_|_], _, To, _) -> To.
+%% counts_bits_matched(ReversedInstructions, SavePoint, Bits) -> Bits'
+%% Given a reversed instruction stream, determine the minimum number
+%% of bits that will be matched by bit syntax instructions up to the
+%% given save point.
+
+count_bits_matched([{test,bs_get_utf8,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+8);
+count_bits_matched([{test,bs_get_utf16,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+16);
+count_bits_matched([{test,bs_get_utf32,{f,_},_,_,_}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+32);
count_bits_matched([{test,_,_,_,[_,Sz,U,{field_flags,_}],_}|Is], SavePoint, Bits) ->
case Sz of
{integer,N} -> count_bits_matched(Is, SavePoint, Bits+N*U);
_ -> count_bits_matched(Is, SavePoint, Bits)
end;
-count_bits_matched([{test,bs_match_string,_,[_,Bits,_]}|Is], SavePoint, Bits0) ->
- count_bits_matched(Is, SavePoint, Bits0+Bits);
+count_bits_matched([{test,bs_match_string,_,[_,Bs]}|Is], SavePoint, Bits) ->
+ count_bits_matched(Is, SavePoint, Bits+bit_size(Bs));
count_bits_matched([{test,_,_,_}|Is], SavePoint, Bits) ->
count_bits_matched(Is, SavePoint, Bits);
count_bits_matched([{bs_save2,Reg,SavePoint}|_], {Reg,SavePoint}, Bits) ->
@@ -545,20 +488,332 @@ shortcut_bs_pos_used_1(Is, Reg, D) ->
not beam_utils:is_killed(Reg, Is, D).
%% shortcut_bs_start_match(TargetLabel, Reg) -> TargetLabel
-%% A failing bs_start_match2 instruction means that the source
-%% cannot be a binary, so there is no need to jump bs_context_to_binary/1
-%% or another bs_start_match2 instruction.
+%% A failing bs_start_match2 instruction means that the source (Reg)
+%% cannot be a binary. That means that it is safe to skip
+%% bs_context_to_binary instructions operating on Reg, and
+%% bs_start_match2 instructions operating on Reg.
shortcut_bs_start_match(To, Reg, D) ->
- shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To).
+ shortcut_bs_start_match_1(beam_utils:code_at(To, D), Reg, To, D).
+
+shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To, D) ->
+ shortcut_bs_start_match_1(Is, Reg, To, D);
+shortcut_bs_start_match_1([{jump,{f,To}}|_], Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_],
+ Reg, _, D) ->
+ Code = beam_utils:code_at(To, D),
+ shortcut_bs_start_match_1(Code, Reg, To, D);
+shortcut_bs_start_match_1(_, _, To, _) ->
+ To.
+
+%% shortcut_rel_op(FailLabel, Operator, [Operand], D) -> FailLabel'
+%% Try to shortcut the given test instruction. Example:
+%%
+%% is_ge L1 {x,0} 48
+%% .
+%% .
+%% .
+%% L1: is_ge L2 {x,0} 65
+%%
+%% The first test instruction can be rewritten to "is_ge L2 {x,0} 48"
+%% since the instruction at L1 will also fail.
+%%
+%% If there are instructions between L1 and the other test instruction
+%% it may still be possible to do the shortcut. For example:
+%%
+%% L1: is_eq_exact L3 {x,0} 92
+%% is_ge L2 {x,0} 65
+%%
+%% Since the first test instruction failed, we know that {x,0} must
+%% be less than 48; therefore, we know that {x,0} cannot be equal to
+%% 92 and the jump to L3 cannot happen.
+
+shortcut_rel_op(To, Op, Ops, D) ->
+ case normalize_op({test,Op,{f,To},Ops}) of
+ {{NormOp,A,B},_} ->
+ Normalized = {negate_op(NormOp),A,B},
+ shortcut_rel_op_fp(To, Normalized, D);
+ {_,_} ->
+ To;
+ error ->
+ To
+ end.
-shortcut_bs_start_match_1([{bs_context_to_binary,Reg}|Is], Reg, To) ->
- shortcut_bs_start_match_2(Is, Reg, To);
-shortcut_bs_start_match_1(_, _, To) -> To.
+shortcut_rel_op_fp(To0, Normalized, D) ->
+ Code = beam_utils:code_at(To0, D),
+ case shortcut_any_label(Code, Normalized) of
+ error ->
+ To0;
+ To ->
+ shortcut_rel_op_fp(To, Normalized, D)
+ end.
-shortcut_bs_start_match_2([{jump,{f,To}}|_], _, _) ->
- To;
-shortcut_bs_start_match_2([{test,bs_start_match2,{f,To},_,[Reg|_],_}|_], Reg, _) ->
- To;
-shortcut_bs_start_match_2(_Is, _Reg, To) ->
- To.
+%% shortcut_any_label([Instruction], PrevCondition) -> FailLabel | error
+%% Using PrevCondition (a previous condition known to be true),
+%% try to shortcut to another failure label.
+
+shortcut_any_label([{jump,{f,Lbl}}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{label,Lbl}|_], _Prev) ->
+ Lbl;
+shortcut_any_label([{select,select_val,R,{f,Fail},L}|_], Prev) ->
+ shortcut_selectval(L, R, Fail, Prev);
+shortcut_any_label([I|Is], Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ %% We have a relational operator.
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% This test instruction will always branch
+ %% to Fail.
+ Fail;
+ yes ->
+ %% This test instruction will never branch,
+ %% so we will look at the next instruction.
+ shortcut_any_label(Is, Prev);
+ maybe ->
+ %% May or may not branch. From now on, we can only
+ %% shortcut to the this specific failure label
+ %% Fail.
+ shortcut_specific_label(Is, Fail, Prev)
+ end
+ end.
+
+%% shortcut_specific_label([Instruction], FailLabel, PrevCondition) ->
+%% FailLabel | error
+%% We have previously encountered a test instruction that may or
+%% may not branch to FailLabel. Therefore we are only allowed
+%% to do the shortcut to the same fail label (FailLabel).
+
+shortcut_specific_label([{label,_}|Is], Fail, Prev) ->
+ shortcut_specific_label(Is, Fail, Prev);
+shortcut_specific_label([{select,select_val,R,{f,F},L}|_], Fail, Prev) ->
+ case shortcut_selectval(L, R, F, Prev) of
+ Fail -> Fail;
+ _ -> error
+ end;
+shortcut_specific_label([I|Is], Fail, Prev) ->
+ case normalize_op(I) of
+ error ->
+ error;
+ {Normalized,Fail} ->
+ case will_succeed(Prev, Normalized) of
+ no ->
+ %% Will branch to FailLabel.
+ Fail;
+ yes ->
+ %% Will definitely never branch.
+ shortcut_specific_label(Is, Fail, Prev);
+ maybe ->
+ %% May branch, but still OK since it will branch
+ %% to FailLabel.
+ shortcut_specific_label(Is, Fail, Prev)
+ end;
+ {Normalized,_} ->
+ %% This test instruction will branch to a different
+ %% fail label, if it branches at all.
+ case will_succeed(Prev, Normalized) of
+ yes ->
+ %% Still OK, since the branch will never be
+ %% taken.
+ shortcut_specific_label(Is, Fail, Prev);
+ no ->
+ %% Give up. The branch will definitely be taken
+ %% to a different fail label.
+ error;
+ maybe ->
+ %% Give up. If the branch is taken, it will be
+ %% to a different fail label.
+ error
+ end
+ end.
+
+
+%% shortcut_selectval(List, Reg, Fail, PrevCond) -> FailLabel | error
+%% Try to shortcut a selectval instruction. A selectval instruction
+%% is equivalent to the following instruction sequence:
+%%
+%% is_ne_exact L1 Reg Value1
+%% .
+%% .
+%% .
+%% is_ne_exact LN Reg ValueN
+%% jump DefaultFailLabel
+%%
+shortcut_selectval([Val,{f,Lbl}|T], R, Fail, Prev) ->
+ case will_succeed(Prev, {'=/=',R,get_literal(Val)}) of
+ yes -> shortcut_selectval(T, R, Fail, Prev);
+ no -> Lbl;
+ maybe -> error
+ end;
+shortcut_selectval([], _, Fail, _) -> Fail.
+
+%% will_succeed(PrevCondition, Condition) -> yes | no | maybe
+%% PrevCondition is a condition known to be true. This function
+%% will tell whether Condition will succeed.
+
+will_succeed({Op1,Reg,A}, {Op2,Reg,B}) ->
+ will_succeed_1(Op1, A, Op2, B);
+will_succeed({'=:=',Reg,{literal,A}}, {TypeTest,Reg}) ->
+ case erlang:TypeTest(A) of
+ false -> no;
+ true -> yes
+ end;
+will_succeed({_,_,_}, maybe) ->
+ maybe;
+will_succeed({_,_,_}, Test) when is_tuple(Test) ->
+ maybe.
+
+will_succeed_1('=:=', A, '<', B) ->
+ if
+ B =< A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=<', B) ->
+ if
+ B < A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '=:=', B) ->
+ if
+ A =:= B -> yes;
+ true -> no
+ end;
+will_succeed_1('=:=', A, '=/=', B) ->
+ if
+ A =:= B -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>=', B) ->
+ if
+ B > A -> no;
+ true -> yes
+ end;
+will_succeed_1('=:=', A, '>', B) ->
+ if
+ B >= A -> no;
+ true -> yes
+ end;
+
+will_succeed_1('=/=', A, '=/=', B) when A =:= B -> yes;
+will_succeed_1('=/=', A, '=:=', B) when A =:= B -> no;
+
+will_succeed_1('<', A, '=:=', B) when B >= A -> no;
+will_succeed_1('<', A, '=/=', B) when B >= A -> yes;
+will_succeed_1('<', A, '<', B) when B >= A -> yes;
+will_succeed_1('<', A, '=<', B) when B > A -> yes;
+will_succeed_1('<', A, '>=', B) when B > A -> no;
+will_succeed_1('<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('=<', A, '=:=', B) when B > A -> no;
+will_succeed_1('=<', A, '=/=', B) when B > A -> yes;
+will_succeed_1('=<', A, '<', B) when B > A -> yes;
+will_succeed_1('=<', A, '=<', B) when B >= A -> yes;
+will_succeed_1('=<', A, '>=', B) when B > A -> no;
+will_succeed_1('=<', A, '>', B) when B >= A -> no;
+
+will_succeed_1('>=', A, '=:=', B) when B < A -> no;
+will_succeed_1('>=', A, '=/=', B) when B < A -> yes;
+will_succeed_1('>=', A, '<', B) when B =< A -> no;
+will_succeed_1('>=', A, '=<', B) when B < A -> no;
+will_succeed_1('>=', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>=', A, '>', B) when B < A -> yes;
+
+will_succeed_1('>', A, '=:=', B) when B =< A -> no;
+will_succeed_1('>', A, '=/=', B) when B =< A -> yes;
+will_succeed_1('>', A, '<', B) when B =< A -> no;
+will_succeed_1('>', A, '=<', B) when B < A -> no;
+will_succeed_1('>', A, '>=', B) when B =< A -> yes;
+will_succeed_1('>', A, '>', B) when B < A -> yes;
+
+will_succeed_1(_, _, _, _) -> maybe.
+
+%% normalize_op(Instruction) -> {Normalized,FailLabel} | error
+%% Normalized = {Operator,Register,Literal} |
+%% {TypeTest,Register} |
+%% maybe
+%% Operation = '<' | '=<' | '=:=' | '=/=' | '>=' | '>'
+%% TypeTest = is_atom | is_integer ...
+%% Literal = {literal,Term}
+%%
+%% Normalize a relational operator to facilitate further
+%% comparisons between operators. Always make the register
+%% operand the first operand. Thus the following instruction:
+%%
+%% {test,is_ge,{f,99},{integer,13},{x,0}}
+%%
+%% will be normalized to:
+%%
+%% {'=<',{x,0},{literal,13}}
+%%
+%% NOTE: Bit syntax test instructions are scary. They may change the
+%% state of match contexts and update registers, so we don't dare
+%% mess with them.
+
+normalize_op({test,is_ge,{f,Fail},Ops}) ->
+ normalize_op_1('>=', Ops, Fail);
+normalize_op({test,is_lt,{f,Fail},Ops}) ->
+ normalize_op_1('<', Ops, Fail);
+normalize_op({test,is_eq_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=:=', Ops, Fail);
+normalize_op({test,is_ne_exact,{f,Fail},Ops}) ->
+ normalize_op_1('=/=', Ops, Fail);
+normalize_op({test,is_nil,{f,Fail},[R]}) ->
+ normalize_op_1('=:=', [R,nil], Fail);
+normalize_op({test,Op,{f,Fail},[R]}) ->
+ case erl_internal:new_type_test(Op, 1) of
+ true -> {{Op,R},Fail};
+ false -> {maybe,Fail}
+ end;
+normalize_op({test,_,{f,Fail},_}=I) ->
+ case beam_utils:is_pure_test(I) of
+ true -> {maybe,Fail};
+ false -> error
+ end;
+normalize_op(_) ->
+ error.
+
+normalize_op_1(Op, [Op1,Op2], Fail) ->
+ case {get_literal(Op1),get_literal(Op2)} of
+ {error,error} ->
+ %% Both operands are registers.
+ {maybe,Fail};
+ {error,Lit} ->
+ {{Op,Op1,Lit},Fail};
+ {Lit,error} ->
+ {{turn_op(Op),Op2,Lit},Fail};
+ {_,_} ->
+ %% Both operands are literals. Can probably only
+ %% happen if the Core Erlang optimizations passes were
+ %% turned off, so don't bother trying to do something
+ %% smart here.
+ {maybe,Fail}
+ end.
+
+turn_op('<') -> '>';
+turn_op('>=') -> '=<';
+turn_op('=:='=Op) -> Op;
+turn_op('=/='=Op) -> Op.
+
+negate_op('>=') -> '<';
+negate_op('<') -> '>=';
+negate_op('=<') -> '>';
+negate_op('>') -> '=<';
+negate_op('=:=') -> '=/=';
+negate_op('=/=') -> '=:='.
+
+get_literal({atom,Val}) ->
+ {literal,Val};
+get_literal({integer,Val}) ->
+ {literal,Val};
+get_literal({float,Val}) ->
+ {literal,Val};
+get_literal(nil) ->
+ {literal,[]};
+get_literal({literal,_}=Lit) ->
+ Lit;
+get_literal({_,_}) -> error.
diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl
index ea51673fa3..b1aa98278e 100644
--- a/lib/compiler/src/beam_dict.erl
+++ b/lib/compiler/src/beam_dict.erl
@@ -31,22 +31,22 @@
-type index() :: non_neg_integer().
--type atom_tab() :: gb_trees:tree(atom(), index()).
+-type atom_tab() :: #{atom() => index()}.
-type import_tab() :: gb_trees:tree(mfa(), index()).
--type fname_tab() :: gb_trees:tree(Name :: term(), index()).
--type line_tab() :: gb_trees:tree({Fname :: index(), Line :: term()}, index()).
+-type fname_tab() :: #{Name :: term() => index()}.
+-type line_tab() :: #{{Fname :: index(), Line :: term()} => index()}.
-type literal_tab() :: dict:dict(Literal :: term(), index()).
-record(asm,
- {atoms = gb_trees:empty() :: atom_tab(),
+ {atoms = #{} :: atom_tab(),
exports = [] :: [{label(), arity(), label()}],
locals = [] :: [{label(), arity(), label()}],
imports = gb_trees:empty() :: import_tab(),
strings = <<>> :: binary(), %String pool
lambdas = [], %[{...}]
literals = dict:new() :: literal_tab(),
- fnames = gb_trees:empty() :: fname_tab(),
- lines = gb_trees:empty() :: line_tab(),
+ fnames = #{} :: fname_tab(),
+ lines = #{} :: line_tab(),
num_lines = 0 :: non_neg_integer(), %Number of line instructions
next_import = 0 :: non_neg_integer(),
string_offset = 0 :: non_neg_integer(),
@@ -65,7 +65,7 @@ new() ->
%% Remember the highest opcode.
-spec opcode(non_neg_integer(), bdict()) -> bdict().
-opcode(Op, Dict) when Dict#asm.highest_opcode > Op -> Dict;
+opcode(Op, Dict) when Dict#asm.highest_opcode >= Op -> Dict;
opcode(Op, Dict) -> Dict#asm{highest_opcode=Op}.
%% Returns the highest opcode encountered.
@@ -77,14 +77,12 @@ highest_opcode(#asm{highest_opcode=Op}) -> Op.
%% atom(Atom, Dict) -> {Index,Dict'}
-spec atom(atom(), bdict()) -> {pos_integer(), bdict()}.
-atom(Atom, #asm{atoms=Atoms0}=Dict) when is_atom(Atom) ->
- case gb_trees:lookup(Atom, Atoms0) of
- {value,Index} ->
- {Index,Dict};
- none ->
- NextIndex = gb_trees:size(Atoms0) + 1,
- Atoms = gb_trees:insert(Atom, NextIndex, Atoms0),
- {NextIndex,Dict#asm{atoms=Atoms}}
+atom(Atom, #asm{atoms=Atoms}=Dict) when is_atom(Atom) ->
+ case Atoms of
+ #{ Atom := Index} -> {Index,Dict};
+ _ ->
+ NextIndex = maps:size(Atoms) + 1,
+ {NextIndex,Dict#asm{atoms=Atoms#{Atom=>NextIndex}}}
end.
%% Remembers an exported function.
@@ -177,26 +175,22 @@ line([], #asm{num_lines=N}=Dict) ->
%% No location available. Return the special pre-defined
%% index 0.
{0,Dict#asm{num_lines=N+1}};
-line([{location,Name,Line}], #asm{lines=Lines0,num_lines=N}=Dict0) ->
+line([{location,Name,Line}], #asm{lines=Lines,num_lines=N}=Dict0) ->
{FnameIndex,Dict1} = fname(Name, Dict0),
- case gb_trees:lookup({FnameIndex,Line}, Lines0) of
- {value,Index} ->
- {Index,Dict1#asm{num_lines=N+1}};
- none ->
- Index = gb_trees:size(Lines0) + 1,
- Lines = gb_trees:insert({FnameIndex,Line}, Index, Lines0),
- Dict = Dict1#asm{lines=Lines,num_lines=N+1},
- {Index,Dict}
+ Key = {FnameIndex,Line},
+ case Lines of
+ #{Key := Index} -> {Index,Dict1#asm{num_lines=N+1}};
+ _ ->
+ Index = maps:size(Lines) + 1,
+ {Index, Dict1#asm{lines=Lines#{Key=>Index},num_lines=N+1}}
end.
-fname(Name, #asm{fnames=Fnames0}=Dict) ->
- case gb_trees:lookup(Name, Fnames0) of
- {value,Index} ->
- {Index,Dict};
- none ->
- Index = gb_trees:size(Fnames0),
- Fnames = gb_trees:insert(Name, Index, Fnames0),
- {Index,Dict#asm{fnames=Fnames}}
+fname(Name, #asm{fnames=Fnames}=Dict) ->
+ case Fnames of
+ #{Name := Index} -> {Index,Dict};
+ _ ->
+ Index = maps:size(Fnames),
+ {Index,Dict#asm{fnames=Fnames#{Name=>Index}}}
end.
%% Returns the atom table.
@@ -204,14 +198,12 @@ fname(Name, #asm{fnames=Fnames0}=Dict) ->
-spec atom_table(bdict()) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
atom_table(#asm{atoms=Atoms}) ->
- NumAtoms = gb_trees:size(Atoms),
- Sorted = lists:keysort(2, gb_trees:to_list(Atoms)),
- Fun = fun({A,_}) ->
- L = atom_to_list(A),
- [length(L)|L]
- end,
- AtomTab = lists:map(Fun, Sorted),
- {NumAtoms,AtomTab}.
+ NumAtoms = maps:size(Atoms),
+ Sorted = lists:keysort(2, maps:to_list(Atoms)),
+ {NumAtoms,[begin
+ L = atom_to_list(A),
+ [length(L)|L]
+ end || {A,_} <- Sorted]}.
%% Returns the table of local functions.
%% local_table(Dict) -> {NumLocals, [{Function, Arity, Label}...]}
@@ -273,11 +265,11 @@ my_term_to_binary(Term) ->
non_neg_integer(),[{non_neg_integer(),non_neg_integer()}]}.
line_table(#asm{fnames=Fnames0,lines=Lines0,num_lines=NumLineInstrs}) ->
- NumFnames = gb_trees:size(Fnames0),
- Fnames1 = lists:keysort(2, gb_trees:to_list(Fnames0)),
+ NumFnames = maps:size(Fnames0),
+ Fnames1 = lists:keysort(2, maps:to_list(Fnames0)),
Fnames = [Name || {Name,_} <- Fnames1],
- NumLines = gb_trees:size(Lines0),
- Lines1 = lists:keysort(2, gb_trees:to_list(Lines0)),
+ NumLines = maps:size(Lines0),
+ Lines1 = lists:keysort(2, maps:to_list(Lines0)),
Lines = [L || {L,_} <- Lines1],
{NumLineInstrs,NumFnames,Fnames,NumLines,Lines}.
diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl
index 05d067dc48..54e06df995 100644
--- a/lib/compiler/src/beam_flatten.erl
+++ b/lib/compiler/src/beam_flatten.erl
@@ -63,9 +63,7 @@ norm({set,[],[S,D],{set_tuple_element,I}}) -> {set_tuple_element,S,D,I};
norm({set,[D1,D2],[S],get_list}) -> {get_list,S,D1,D2};
norm({set,[D],[S|Puts],{alloc,R,{put_map,Op,F}}}) ->
{put_map,F,Op,S,D,R,{list,Puts}};
-norm({set,Ds,[S|Ss],{get_map_elements,F}}) ->
- Gets = beam_utils:joineven(Ss,Ds),
- {get_map_elements,F,S,{list,Gets}};
+%% get_map_elements is always handled in beam_split (moved out of block)
norm({set,[],[],remove_message}) -> remove_message;
norm({set,[],[],fclearerror}) -> fclearerror;
norm({set,[],[],fcheckerror}) -> {fcheckerror,{f,0}}.
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index b952139f2c..80b2998ddc 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -127,7 +127,7 @@
%%% on the program state.
%%%
--import(lists, [reverse/1,reverse/2,foldl/3,dropwhile/2]).
+-import(lists, [reverse/1,reverse/2,foldl/3]).
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
@@ -152,20 +152,26 @@ function({function,Name,Arity,CLabel,Asm0}) ->
share(Is0) ->
%% We will get more sharing if we never fall through to a label.
Is = eliminate_fallthroughs(Is0, []),
- share_1(Is, dict:new(), [], []).
+ share_1(Is, #{}, [], []).
share_1([{label,_}=Lbl|Is], Dict, [], Acc) ->
share_1(Is, Dict, [], [Lbl|Acc]);
share_1([{label,L}=Lbl|Is], Dict0, Seq, Acc) ->
- case dict:find(Seq, Dict0) of
+ case maps:find(Seq, Dict0) of
error ->
- Dict = dict:store(Seq, L, Dict0),
+ Dict = maps:put(Seq, L, Dict0),
share_1(Is, Dict, [], [Lbl|Seq ++ Acc]);
{ok,Label} ->
share_1(Is, Dict0, [], [Lbl,{jump,{f,Label}}|Acc])
end;
share_1([{func_info,_,_,_}=I|Is], _, [], Acc) ->
reverse(Is, [I|Acc]);
+share_1([{'try',_,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
+share_1([{try_case,_}=I|Is], Dict0, Seq, Acc) ->
+ Dict = clean_non_sharable(Dict0),
+ share_1(Is, Dict, [I|Seq], Acc);
share_1([I|Is], Dict, Seq, Acc) ->
case is_unreachable_after(I) of
false ->
@@ -174,6 +180,24 @@ share_1([I|Is], Dict, Seq, Acc) ->
share_1(Is, Dict, [I], Acc)
end.
+clean_non_sharable(Dict) ->
+ %% We are passing in or out of a 'try' block. Remove
+ %% sequences that should not shared over the boundaries
+ %% of a 'try' block. Since the end of the sequence must match,
+ %% the only possible match between a sequence outside and
+ %% a sequence inside the 'try' block is a sequence that ends
+ %% with an instruction that causes an exception. Any sequence
+ %% that causes an exception must contain a line/1 instruction.
+ maps:filter(fun(K, _V) -> sharable_with_try(K) end, Dict).
+
+sharable_with_try([{line,_}|_]) ->
+ %% This sequence may cause an exception and may potentially
+ %% match a sequence on the other side of the 'try' block
+ %% boundary.
+ false;
+sharable_with_try([_|Is]) ->
+ sharable_with_try(Is);
+sharable_with_try([]) -> true.
%% Eliminate all fallthroughs. Return the result reversed.
@@ -244,13 +268,13 @@ extract_seq_1(_, _) -> no.
-record(st, {fc, %Label for function class errors.
entry, %Entry label (must not be moved).
mlbl, %Moved labels.
- labels %Set of referenced labels.
+ labels :: cerl_sets:set() %Set of referenced labels.
}).
opt([{label,Fc}|_]=Is0, CLabel) ->
Lbls = initial_labels(Is0),
find_fixpoint(fun(Is) ->
- St = #st{fc=Fc,entry=CLabel,mlbl=dict:new(),
+ St = #st{fc=Fc,entry=CLabel,mlbl=#{},
labels=Lbls},
opt(Is, [], St)
end, Is0).
@@ -295,24 +319,29 @@ opt([{test,_,{f,_}=Lbl,_,_,_}=I|Is], Acc, St) ->
opt(Is, [I|Acc], label_used(Lbl, St));
opt([{select,_,_R,Fail,Vls}=I|Is], Acc, St) ->
skip_unreachable(Is, [I|Acc], label_used([Fail|Vls], St));
-opt([{label,L}=I|Is], Acc, #st{entry=L}=St) ->
- %% NEVER move the entry label.
- opt(Is, [I|Acc], St);
-opt([{label,L1},{jump,{f,L2}}=I|Is], [Prev|Acc], St0) ->
- St = St0#st{mlbl=dict:append(L2, L1, St0#st.mlbl)},
- opt([Prev,I|Is], Acc, label_used({f,L2}, St));
opt([{label,Lbl}=I|Is], Acc, #st{mlbl=Mlbl}=St0) ->
- case dict:find(Lbl, Mlbl) of
+ case maps:find(Lbl, Mlbl) of
{ok,Lbls} ->
%% Essential to remove the list of labels from the dictionary,
%% since we will rescan the inserted labels. We MUST rescan.
- St = St0#st{mlbl=dict:erase(Lbl, Mlbl)},
+ St = St0#st{mlbl=maps:remove(Lbl, Mlbl)},
insert_labels([Lbl|Lbls], Is, Acc, St);
error -> opt(Is, [I|Acc], St0)
end;
-opt([{jump,{f,Lbl}},{label,Lbl}=I|Is], Acc, St) ->
- opt([I|Is], Acc, St);
-opt([{jump,Lbl}=I|Is], Acc, St) ->
+opt([{jump,{f,_}=X}|[{label,_},{jump,X}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,Lbl}}|[{label,Lbl}|_]=Is], Acc, St) ->
+ opt(Is, Acc, St);
+opt([{jump,{f,L}=Lbl}=I|Is], Acc0, #st{mlbl=Mlbl0}=St0) ->
+ %% All labels before this jump instruction should now be
+ %% moved to the location of the jump's target.
+ {Lbls,Acc} = collect_labels(Acc0, St0),
+ St = case Lbls of
+ [] -> St0;
+ [_|_] ->
+ Mlbl = maps_append_list(L, Lbls, Mlbl0),
+ St0#st{mlbl=Mlbl}
+ end,
skip_unreachable(Is, [I|Acc], label_used(Lbl, St));
%% Optimization: quickly handle some common instructions that don't
%% have any failure labels and where is_unreachable_after(I) =:= false.
@@ -334,14 +363,20 @@ opt([I|Is], Acc, #st{labels=Used0}=St0) ->
end;
opt([], Acc, #st{fc=Fc,mlbl=Mlbl}) ->
Code = reverse(Acc),
- case dict:find(Fc, Mlbl) of
+ case maps:find(Fc, Mlbl) of
{ok,Lbls} -> insert_fc_labels(Lbls, Mlbl, Code);
error -> Code
end.
+maps_append_list(K,Vs,M) ->
+ case M of
+ #{K:=Vs0} -> M#{K:=Vs0++Vs}; % same order as dict
+ _ -> M#{K => Vs}
+ end.
+
insert_fc_labels([L|Ls], Mlbl, Acc0) ->
Acc = [{label,L}|Acc0],
- case dict:find(L, Mlbl) of
+ case maps:find(L, Mlbl) of
error ->
insert_fc_labels(Ls, Mlbl, Acc);
{ok,Lbls} ->
@@ -349,6 +384,17 @@ insert_fc_labels([L|Ls], Mlbl, Acc0) ->
end;
insert_fc_labels([], _, Acc) -> Acc.
+collect_labels(Is, #st{entry=Entry}) ->
+ collect_labels_1(Is, Entry, []).
+
+collect_labels_1([{label,Entry}|_]=Is, Entry, Acc) ->
+ %% Never move the entry label.
+ {Acc,Is};
+collect_labels_1([{label,L}|Is], Entry, Acc) ->
+ collect_labels_1(Is, Entry, [L|Acc]);
+collect_labels_1(Is, _Entry, Acc) ->
+ {Acc,Is}.
+
%% label_defined(Is, Label) -> true | false.
%% Test whether the label Label is defined at the start of the instruction
%% sequence, possibly preceeded by other label definitions.
@@ -394,7 +440,7 @@ skip_unreachable([], Acc, St) ->
%% Add one or more label to the set of used labels.
-label_used({f,L}, St) -> St#st{labels=gb_sets:add(L, St#st.labels)};
+label_used({f,L}, St) -> St#st{labels=cerl_sets:add_element(L,St#st.labels)};
label_used([H|T], St0) -> label_used(T, label_used(H, St0));
label_used([], St) -> St;
label_used(_Other, St) -> St.
@@ -402,7 +448,7 @@ label_used(_Other, St) -> St.
%% Test if label is used.
is_label_used(L, St) ->
- gb_sets:is_member(L, St#st.labels).
+ cerl_sets:is_element(L, St#st.labels).
%% is_unreachable_after(Instruction) -> boolean()
%% Test whether the code after Instruction is unreachable.
@@ -432,17 +478,17 @@ is_exit_instruction(_) -> false.
%% (including inside blocks).
is_label_used_in(Lbl, Is) ->
- is_label_used_in_1(Is, Lbl, gb_sets:empty()).
+ is_label_used_in_1(Is, Lbl, cerl_sets:new()).
is_label_used_in_1([{block,Block}|Is], Lbl, Empty) ->
- lists:any(fun(I) -> is_label_used_in_2(I, Lbl) end, Block)
+ lists:any(fun(I) -> is_label_used_in_block(I, Lbl) end, Block)
orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([I|Is], Lbl, Empty) ->
Used = ulbl(I, Empty),
- gb_sets:is_member(Lbl, Used) orelse is_label_used_in_1(Is, Lbl, Empty);
+ cerl_sets:is_element(Lbl, Used) orelse is_label_used_in_1(Is, Lbl, Empty);
is_label_used_in_1([], _, _) -> false.
-is_label_used_in_2({set,_,_,Info}, Lbl) ->
+is_label_used_in_block({set,_,_,Info}, Lbl) ->
case Info of
{bif,_,{f,F}} -> F =:= Lbl;
{alloc,_,{gc_bif,_,{f,F}}} -> F =:= Lbl;
@@ -452,7 +498,6 @@ is_label_used_in_2({set,_,_,Info}, Lbl) ->
{put_tuple,_} -> false;
{get_tuple_element,_} -> false;
{set_tuple_element,_} -> false;
- {get_map_elements,{f,F}} -> F =:= Lbl;
{line,_} -> false;
_ when is_atom(Info) -> false
end.
@@ -467,13 +512,10 @@ remove_unused_labels(Is) ->
rem_unused(Is, Used, []).
rem_unused([{label,Lbl}=I|Is0], Used, [Prev|_]=Acc) ->
- case gb_sets:is_member(Lbl, Used) of
+ case cerl_sets:is_element(Lbl, Used) of
false ->
Is = case is_unreachable_after(Prev) of
- true ->
- dropwhile(fun({label,_}) -> false;
- (_) -> true
- end, Is0);
+ true -> drop_upto_label(Is0);
false -> Is0
end,
rem_unused(Is, Used, Acc);
@@ -492,7 +534,11 @@ initial_labels([{line,_}|Is], Acc) ->
initial_labels([{label,Lbl}|Is], Acc) ->
initial_labels(Is, [Lbl|Acc]);
initial_labels([{func_info,_,_,_},{label,Lbl}|_], Acc) ->
- gb_sets:from_list([Lbl|Acc]).
+ cerl_sets:from_list([Lbl|Acc]).
+
+drop_upto_label([{label,_}|_]=Is) -> Is;
+drop_upto_label([_|Is]) -> drop_upto_label(Is);
+drop_upto_label([]) -> [].
%% ulbl(Instruction, UsedGbSet) -> UsedGbSet'
%% Update the gb_set UsedGbSet with any function-local labels
@@ -536,10 +582,10 @@ ulbl({get_map_elements,Lbl,_Src,_List}, Used) ->
ulbl(_, Used) -> Used.
mark_used({f,0}, Used) -> Used;
-mark_used({f,L}, Used) -> gb_sets:add(L, Used).
+mark_used({f,L}, Used) -> cerl_sets:add_element(L, Used).
mark_used_list([{f,L}|T], Used) ->
- mark_used_list(T, gb_sets:add(L, Used));
+ mark_used_list(T, cerl_sets:add_element(L, Used));
mark_used_list([_|T], Used) ->
mark_used_list(T, Used);
mark_used_list([], Used) -> Used.
diff --git a/lib/compiler/src/beam_listing.erl b/lib/compiler/src/beam_listing.erl
index 50d1f3cdb1..726bb7f5eb 100644
--- a/lib/compiler/src/beam_listing.erl
+++ b/lib/compiler/src/beam_listing.erl
@@ -46,8 +46,8 @@ module(Stream, {Mod,Exp,Attr,Code,NumLabels}) ->
fun ({function,Name,Arity,Entry,Asm}) ->
io:format(Stream, "\n\n{function, ~w, ~w, ~w}.\n",
[Name, Arity, Entry]),
- foreach(fun(Op) -> print_op(Stream, Op) end, Asm) end,
- Code);
+ io:put_chars(Stream, format_asm(Asm))
+ end, Code);
module(Stream, {Mod,Exp,Inter}) ->
%% Other kinds of intermediate formats.
io:fwrite(Stream, "~w.~n~p.~n", [Mod,Exp]),
@@ -56,10 +56,11 @@ module(Stream, [_|_]=Fs) ->
%% Form-based abstract format.
foreach(fun (F) -> io:format(Stream, "~p.\n", [F]) end, Fs).
-print_op(Stream, Label) when element(1, Label) == label ->
- io:format(Stream, " ~p.\n", [Label]);
-print_op(Stream, Op) ->
- io:format(Stream, " ~p.\n", [Op]).
+format_asm([{label,L}|Is]) ->
+ [" {label,",integer_to_list(L),"}.\n"|format_asm(Is)];
+format_asm([I|Is]) ->
+ [io_lib:format(" ~p", [I]),".\n"|format_asm(Is)];
+format_asm([]) -> [].
function(File, {function,Name,Arity,Args,Body,Vdb,_Anno}) ->
io:nl(File),
diff --git a/lib/compiler/src/beam_peep.erl b/lib/compiler/src/beam_peep.erl
index 97a8c7ba70..5abacc8d5d 100644
--- a/lib/compiler/src/beam_peep.erl
+++ b/lib/compiler/src/beam_peep.erl
@@ -108,14 +108,14 @@ peep([{test,Op,_,Ops}=I|Is], SeenTests0, Acc) ->
%% has succeeded.
peep(Is, gb_sets:empty(), [I|Acc]);
true ->
- Test = {Op,Ops},
- case gb_sets:is_element(Test, SeenTests0) of
+ case is_test_redundant(Op, Ops, SeenTests0) of
true ->
- %% This test has already succeeded and
+ %% This test or a similar test has already succeeded and
%% is therefore redundant.
peep(Is, SeenTests0, Acc);
false ->
%% Remember that we have seen this test.
+ Test = {Op,Ops},
SeenTests = gb_sets:insert(Test, SeenTests0),
peep(Is, SeenTests, [I|Acc])
end
@@ -136,6 +136,15 @@ peep([I|Is], _, Acc) ->
peep(Is, gb_sets:empty(), [I|Acc]);
peep([], _, Acc) -> reverse(Acc).
+is_test_redundant(Op, Ops, Seen) ->
+ gb_sets:is_element({Op,Ops}, Seen) orelse
+ is_test_redundant_1(Op, Ops, Seen).
+
+is_test_redundant_1(is_boolean, [R], Seen) ->
+ gb_sets:is_element({is_eq_exact,[R,{atom,false}]}, Seen) orelse
+ gb_sets:is_element({is_eq_exact,[R,{atom,true}]}, Seen);
+is_test_redundant_1(_, _, _) -> false.
+
kill_seen(Dst, Seen0) ->
gb_sets:from_ordset(kill_seen_1(gb_sets:to_list(Seen0), Dst)).
diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl
index f5dba314ae..0c62b0bf3d 100644
--- a/lib/compiler/src/beam_split.erl
+++ b/lib/compiler/src/beam_split.erl
@@ -53,9 +53,8 @@ split_block([{set,[D],[S|Puts],{alloc,R,{put_map,Op,{f,Lbl}=Fail}}}|Is],
Bl, Acc) when Lbl =/= 0 ->
split_block(Is, [], [{put_map,Fail,Op,S,D,R,{list,Puts}}|
make_block(Bl, Acc)]);
-split_block([{set,Ds,[S|Ss],{get_map_elements,{f,Lbl}=Fail}}|Is], Bl, Acc)
- when Lbl =/= 0 ->
- Gets = beam_utils:joineven(Ss,Ds),
+split_block([{set,Ds,[S|Ss],{get_map_elements,Fail}}|Is], Bl, Acc) ->
+ Gets = beam_utils:join_even(Ss,Ds),
split_block(Is, [], [{get_map_elements,Fail,S,{list,Gets}}|make_block(Bl, Acc)]);
split_block([{set,[R],[],{'catch',L}}|Is], Bl, Acc) ->
split_block(Is, [], [{'catch',R,L}|make_block(Bl, Acc)]);
diff --git a/lib/compiler/src/beam_trim.erl b/lib/compiler/src/beam_trim.erl
index fad9c42584..8181e555a1 100644
--- a/lib/compiler/src/beam_trim.erl
+++ b/lib/compiler/src/beam_trim.erl
@@ -172,6 +172,10 @@ remap([{bif,Name,Fail,Ss,D}|Is], Map, Acc) ->
remap([{gc_bif,Name,Fail,Live,Ss,D}|Is], Map, Acc) ->
I = {gc_bif,Name,Fail,Live,[Map(S) || S <- Ss],Map(D)},
remap(Is, Map, [I|Acc]);
+remap([{get_map_elements,Fail,M,{list,L0}}|Is], Map, Acc) ->
+ L = [Map(E) || E <- L0],
+ I = {get_map_elements,Fail,Map(M),{list,L}},
+ remap(Is, Map, [I|Acc]);
remap([{bs_init,Fail,Info,Live,Ss0,Dst0}|Is], Map, Acc) ->
Ss = [Map(Src) || Src <- Ss0],
Dst = Map(Dst0),
@@ -275,6 +279,8 @@ frame_size([{kill,_}|Is], Safe) ->
frame_size(Is, Safe);
frame_size([{make_fun2,_,_,_,_}|Is], Safe) ->
frame_size(Is, Safe);
+frame_size([{get_map_elements,{f,L},_,_}|Is], Safe) ->
+ frame_size_branch(L, Is, Safe);
frame_size([{deallocate,N}|_], _) -> N;
frame_size([{line,_}|Is], Safe) ->
frame_size(Is, Safe);
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index cdddad4153..7ab548152e 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -149,9 +149,10 @@ simplify_basic_1([], Ts, Acc) ->
%%
simplify_float(Is0, Ts0) ->
{Is1,Ts} = simplify_float_1(Is0, Ts0, [], []),
- Is2 = flt_need_heap(Is1),
+ Is2 = opt_fmoves(Is1, []),
+ Is3 = flt_need_heap(Is2),
try
- {flt_liveness(Is2),Ts}
+ {flt_liveness(Is3),Ts}
catch
throw:not_possible -> not_possible
end.
@@ -202,14 +203,15 @@ simplify_float_1([{set,_,_,{'catch',_}}=I|Is]=Is0, _Ts, Rs0, Acc0) ->
simplify_float_1(Is, tdb_new(), Rs0, [I|Acc]);
simplify_float_1([{set,_,_,{line,_}}=I|Is], Ts, Rs, Acc) ->
simplify_float_1(Is, Ts, Rs, [I|Acc]);
+simplify_float_1([I|Is], Ts0, [], Acc) ->
+ Ts = update(I, Ts0),
+ simplify_float_1(Is, Ts, [], [I|Acc]);
simplify_float_1([I|Is]=Is0, Ts0, Rs0, Acc0) ->
Ts = update(I, Ts0),
{Rs,Acc} = flush(Rs0, Is0, Acc0),
simplify_float_1(Is, Ts, Rs, [I|checkerror(Acc)]);
-simplify_float_1([], Ts, Rs, Acc0) ->
- Acc = checkerror(Acc0),
- Is0 = reverse(flush_all(Rs, [], Acc)),
- Is = opt_fmoves(Is0, []),
+simplify_float_1([], Ts, [], Acc) ->
+ Is = reverse(Acc),
{Is,Ts}.
coerce_to_float({integer,I}=Int) ->
@@ -244,7 +246,7 @@ clearerror([], OrigIs) -> [{set,[],[],fclearerror}|OrigIs].
%% Combine two blocks and eliminate any move instructions that assign
%% to registers that are killed later in the block.
%%
-merge_blocks(B1, [{'%live',_}|B2]) ->
+merge_blocks(B1, [{'%live',_,_}|B2]) ->
merge_blocks_1(B1++[{set,[],[],stop_here}|B2]).
merge_blocks_1([{set,[],_,stop_here}|Is]) -> Is;
@@ -329,27 +331,27 @@ build_alloc(Words, Floats) -> {alloc,[{words,Words},{floats,Floats}]}.
%% flt_liveness([Instruction]) -> [Instruction]
%% (Re)calculate the number of live registers for each heap allocation
-%% function. We base liveness of the number of live registers at
-%% entry to the instruction sequence.
+%% function. We base liveness of the number of register map at the
+%% beginning of the instruction sequence.
%%
%% A 'not_possible' term will be thrown if the set of live registers
%% is not continous at an allocation function (e.g. if {x,0} and {x,2}
%% are live, but not {x,1}).
-flt_liveness([{'%live',Live}=LiveInstr|Is]) ->
- flt_liveness_1(Is, init_regs(Live), [LiveInstr]).
+flt_liveness([{'%live',_Live,Regs}=LiveInstr|Is]) ->
+ flt_liveness_1(Is, Regs, [LiveInstr]).
-flt_liveness_1([{set,Ds,Ss,{alloc,_,Alloc}}|Is], Regs0, Acc) ->
- Live = live_regs(Regs0),
+flt_liveness_1([{set,Ds,Ss,{alloc,Live0,Alloc}}|Is], Regs0, Acc) ->
+ Live = min(Live0, live_regs(Regs0)),
I = {set,Ds,Ss,{alloc,Live,Alloc}},
- Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds),
+ Regs1 = init_regs(Live),
+ Regs = x_live(Ds, Regs1),
flt_liveness_1(Is, Regs, [I|Acc]);
flt_liveness_1([{set,Ds,_,_}=I|Is], Regs0, Acc) ->
- Regs = foldl(fun(R, A) -> set_live(R, A) end, Regs0, Ds),
- flt_liveness_1(Is, Regs, [I|Acc]);
-flt_liveness_1([{'%live',_}=I|Is], Regs, Acc) ->
+ Regs = x_live(Ds, Regs0),
flt_liveness_1(Is, Regs, [I|Acc]);
-flt_liveness_1([], _Regs, Acc) -> reverse(Acc).
+flt_liveness_1([{'%live',_,_}], _Regs, Acc) ->
+ reverse(Acc).
init_regs(Live) ->
(1 bsl Live) - 1.
@@ -364,14 +366,15 @@ live_regs_1(R, N) ->
1 -> live_regs_1(R bsr 1, N+1)
end.
-set_live({x,X}, Regs) -> Regs bor (1 bsl X);
-set_live(_, Regs) -> Regs.
+x_live([{x,N}|Rs], Regs) -> x_live(Rs, Regs bor (1 bsl N));
+x_live([_|Rs], Regs) -> x_live(Rs, Regs);
+x_live([], Regs) -> Regs.
%% update(Instruction, TypeDb) -> NewTypeDb
%% Update the type database to account for executing an instruction.
%%
%% First the cases for instructions inside basic blocks.
-update({'%live',_}, Ts) -> Ts;
+update({'%live',_,_}, Ts) -> Ts;
update({set,[D],[S],move}, Ts) ->
tdb_copy(S, D, Ts);
update({set,[D],[{integer,I},Reg],{bif,element,_}}, Ts0) ->
@@ -469,6 +472,7 @@ is_math_bif(erf, 1) -> true;
is_math_bif(erfc, 1) -> true;
is_math_bif(exp, 1) -> true;
is_math_bif(log, 1) -> true;
+is_math_bif(log2, 1) -> true;
is_math_bif(log10, 1) -> true;
is_math_bif(sqrt, 1) -> true;
is_math_bif(atan2, 2) -> true;
@@ -550,10 +554,10 @@ flush(Rs, [{set,[_],[],{put_tuple,_}}|_]=Is0, Acc0) ->
Acc = flush_all(Rs, Is0, Acc0),
{[],Acc};
flush(Rs0, [{set,Ds,Ss,_Op}|_], Acc0) ->
- Save = gb_sets:from_list(Ss),
+ Save = cerl_sets:from_list(Ss),
Acc = save_regs(Rs0, Save, Acc0),
Rs1 = foldl(fun(S, A) -> mark(S, A, clean) end, Rs0, Ss),
- Kill = gb_sets:from_list(Ds),
+ Kill = cerl_sets:from_list(Ds),
Rs = kill_regs(Rs1, Kill),
{Rs,Acc};
flush(Rs0, Is, Acc0) ->
@@ -576,7 +580,7 @@ save_regs(Rs, Save, Acc) ->
foldl(fun(R, A) -> save_reg(R, Save, A) end, Acc, Rs).
save_reg({I,V,dirty}, Save, Acc) ->
- case gb_sets:is_member(V, Save) of
+ case cerl_sets:is_element(V, Save) of
true -> [{set,[V],[{fr,I}],fmove}|checkerror(Acc)];
false -> Acc
end;
@@ -586,7 +590,7 @@ kill_regs(Rs, Kill) ->
[kill_reg(R, Kill) || R <- Rs].
kill_reg({_,V,_}=R, Kill) ->
- case gb_sets:is_member(V, Kill) of
+ case cerl_sets:is_element(V, Kill) of
true -> free;
false -> R
end;
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index e82ba82d38..b82bcd0e95 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -26,7 +26,7 @@
code_at/2,bif_to_test/3,is_pure_test/1,
live_opt/1,delete_live_annos/1,combine_heap_needs/2]).
--export([joineven/2,spliteven/1]).
+-export([join_even/2,split_even/1]).
-import(lists, [member/2,sort/1,reverse/1,splitwith/2]).
@@ -128,8 +128,7 @@ empty_label_index() ->
%% Add an index for a label.
index_label(Lbl, Is0, Acc) ->
- Is = lists:dropwhile(fun({label,_}) -> true;
- (_) -> false end, Is0),
+ Is = drop_labels(Is0),
gb_trees:enter(Lbl, Is, Acc).
@@ -187,7 +186,7 @@ is_pure_test({test,is_lt,_,[_,_]}) -> true;
is_pure_test({test,is_nil,_,[_]}) -> true;
is_pure_test({test,is_nonempty_list,_,[_]}) -> true;
is_pure_test({test,test_arity,_,[_,_]}) -> true;
-is_pure_test({test,has_map_fields,_,[_,{list,_}]}) -> true;
+is_pure_test({test,has_map_fields,_,[_|_]}) -> true;
is_pure_test({test,Op,_,Ops}) ->
erl_internal:new_type_test(Op, length(Ops)).
@@ -196,7 +195,7 @@ is_pure_test({test,Op,_,Ops}) ->
%% Go through the instruction sequence in reverse execution
%% order, keep track of liveness and remove 'move' instructions
%% whose destination is a register that will not be used.
-%% Also insert {'%live',Live} annotations at the beginning
+%% Also insert {'%live',Live,Regs} annotations at the beginning
%% and end of each block.
%%
live_opt(Is0) ->
@@ -217,7 +216,7 @@ delete_live_annos([{block,Bl0}|Is]) ->
[] -> delete_live_annos(Is);
[_|_]=Bl -> [{block,Bl}|delete_live_annos(Is)]
end;
-delete_live_annos([{'%live',_}|Is]) ->
+delete_live_annos([{'%live',_,_}|Is]) ->
delete_live_annos(Is);
delete_live_annos([I|Is]) ->
[I|delete_live_annos(Is)];
@@ -344,14 +343,10 @@ check_liveness(R, [{call_ext,Live,_}=I|Is], St) ->
false ->
check_liveness(R, Is, St);
true ->
- %% We must make sure we don't check beyond this instruction
- %% or we will fall through into random unrelated code and
- %% get stuck in a loop.
- %%
- %% We don't want to overwrite a 'catch', so consider this
- %% register in use.
- %%
- {used,St}
+ %% We must make sure we don't check beyond this
+ %% instruction or we will fall through into random
+ %% unrelated code and get stuck in a loop.
+ {killed,St}
end
end;
check_liveness(R, [{call_fun,Live}|Is], St) ->
@@ -366,11 +361,6 @@ check_liveness(R, [{apply,Args}|Is], St) ->
{x,_} -> {killed,St};
{y,_} -> check_liveness(R, Is, St)
end;
-check_liveness({x,R}, [{'%live',Live}|Is], St) ->
- if
- R < Live -> check_liveness(R, Is, St);
- true -> {killed,St}
- end;
check_liveness(R, [{bif,Op,{f,Fail},Ss,D}|Is], St0) ->
case check_liveness_fail(R, Op, Ss, Fail, St0) of
{killed,St} = Killed ->
@@ -477,6 +467,22 @@ check_liveness(R, [{loop_rec_end,{f,Fail}}|_], St) ->
check_liveness_at(R, Fail, St);
check_liveness(R, [{line,_}|Is], St) ->
check_liveness(R, Is, St);
+check_liveness(R, [{get_map_elements,{f,Fail},S,{list,L}}|Is], St0) ->
+ {Ss,Ds} = split_even(L),
+ case member(R, [S|Ss]) of
+ true ->
+ {used,St0};
+ false ->
+ case check_liveness_at(R, Fail, St0) of
+ {killed,St}=Killed ->
+ case member(R, Ds) of
+ true -> Killed;
+ false -> check_liveness(R, Is, St)
+ end;
+ Other ->
+ Other
+ end
+ end;
check_liveness(_R, Is, St) when is_list(Is) ->
%% case Is of
%% [I|_] ->
@@ -554,7 +560,7 @@ check_killed_block(R, [{set,Ds,Ss,_Op}|Is]) ->
false -> check_killed_block(R, Is)
end
end;
-check_killed_block(R, [{'%live',Live}|Is]) ->
+check_killed_block(R, [{'%live',Live,_}|Is]) ->
case R of
{x,X} when X >= Live -> killed;
_ -> check_killed_block(R, Is)
@@ -577,7 +583,7 @@ check_used_block({x,X}=R, [{set,Ds,Ss,{alloc,Live,Op}}|Is], St) ->
end;
check_used_block(R, [{set,Ds,Ss,Op}|Is], St) ->
check_used_block_1(R, Ss, Ds, Op, Is, St);
-check_used_block(R, [{'%live',Live}|Is], St) ->
+check_used_block(R, [{'%live',Live,_}|Is], St) ->
case R of
{x,X} when X >= Live -> {killed,St};
_ -> check_used_block(R, Is, St)
@@ -617,13 +623,15 @@ is_reg_used_at_1(R, Lbl, St0) ->
end.
index_labels_1([{label,Lbl}|Is0], Acc) ->
- Is = lists:dropwhile(fun({label,_}) -> true;
- (_) -> false end, Is0),
+ Is = drop_labels(Is0),
index_labels_1(Is0, [{Lbl,Is}|Acc]);
index_labels_1([_|Is], Acc) ->
index_labels_1(Is, Acc);
index_labels_1([], Acc) -> gb_trees:from_orddict(sort(Acc)).
+drop_labels([{label,_}|Is]) -> drop_labels(Is);
+drop_labels(Is) -> Is.
+
%% Help functions for combine_heap_needs.
combine_alloc_lists(Al1, Al2) ->
@@ -678,9 +686,9 @@ live_opt([{test,bs_start_match2,Fail,Live,[Src,_],_}=I|Is], _, D, Acc) ->
%% Other instructions.
live_opt([{block,Bl0}|Is], Regs0, D, Acc) ->
- Live0 = {'%live',live_regs(Regs0)},
+ Live0 = {'%live',live_regs(Regs0),Regs0},
{Bl,Regs} = live_opt_block(reverse(Bl0), Regs0, D, [Live0]),
- Live = {'%live',live_regs(Regs)},
+ Live = {'%live',live_regs(Regs),Regs},
live_opt(Is, Regs, D, [{block,[Live|Bl]}|Acc]);
live_opt([{label,L}=I|Is], Regs, D0, Acc) ->
D = gb_trees:insert(L, Regs, D0),
@@ -758,13 +766,9 @@ live_opt([{line,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
%% The following instructions can occur if the "compilation" has been
-%% started from a .S file using the 'asm' option.
+%% started from a .S file using the 'from_asm' option.
live_opt([{trim,_,_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
-live_opt([{allocate,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
-live_opt([{allocate_heap,_,_,Live}=I|Is], _, D, Acc) ->
- live_opt(Is, live_call(Live), D, [I|Acc]);
live_opt([{'%',_}=I|Is], Regs, D, Acc) ->
live_opt(Is, Regs, D, [I|Acc]);
live_opt([{recv_set,_}=I|Is], Regs, D, Acc) ->
@@ -835,14 +839,14 @@ x_live([], Regs) -> Regs.
is_live(X, Regs) -> ((Regs bsr X) band 1) =:= 1.
-%% spliteven/1
+%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
-spliteven(Rs) -> spliteven(Rs,[],[]).
-spliteven([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
-spliteven([S,D|Rs],Ss,Ds) ->
- spliteven(Rs,[S|Ss],[D|Ds]).
+split_even(Rs) -> split_even(Rs,[],[]).
+split_even([],Ss,Ds) -> {reverse(Ss),reverse(Ds)};
+split_even([S,D|Rs],Ss,Ds) ->
+ split_even(Rs,[S|Ss],[D|Ds]).
-%% joineven/1
+%% join_even/1
%% {[1,3,5],[2,4,6]} -> [1,2,3,4,5,6]
-joineven([],[]) -> [];
-joineven([S|Ss],[D|Ds]) -> [S,D|joineven(Ss,Ds)].
+join_even([],[]) -> [];
+join_even([S|Ss],[D|Ds]) -> [S,D|join_even(Ss,Ds)].
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 0acc7a227f..780826b126 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -22,14 +22,13 @@
%% Avoid warning for local function error/1 clashing with autoimported BIF.
-compile({no_auto_import,[error/1]}).
--export([file/1, files/1]).
%% Interface for compiler.
-export([module/2, format_error/1]).
-include("beam_disasm.hrl").
--import(lists, [reverse/1,foldl/3,foreach/2,member/2,dropwhile/2]).
+-import(lists, [reverse/1,foldl/3,foreach/2,dropwhile/2]).
-define(MAXREG, 1024).
@@ -40,38 +39,12 @@
-define(DBG_FORMAT(F, D), ok).
-endif.
-%%%
-%%% API functions.
-%%%
-
--spec file(file:filename()) -> 'ok' | {'error', term()}.
-
-file(Name) when is_list(Name) ->
- case case filename:extension(Name) of
- ".S" -> s_file(Name);
- ".beam" -> beam_file(Name)
- end of
- [] -> ok;
- Es -> {error,Es}
- end.
-
--spec files([file:filename()]) -> 'ok'.
-
-files([F|Fs]) ->
- ?DBG_FORMAT("# Verifying: ~p~n", [F]),
- case file(F) of
- ok -> ok;
- {error,Es} ->
- io:format("~tp:~n~ts~n", [F,format_error(Es)])
- end,
- files(Fs);
-files([]) -> ok.
-
%% To be called by the compiler.
module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
case validate(Mod, Fs) of
- [] -> {ok,Code};
+ [] ->
+ {ok,Code};
Es0 ->
Es = [{?MODULE,E} || E <- Es0],
{error,[{atom_to_list(Mod),Es}]}
@@ -79,12 +52,6 @@ module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
-spec format_error(term()) -> iolist().
-format_error([]) -> [];
-format_error([{{M,F,A},{I,Off,Desc}}|Es]) ->
- [io_lib:format(" ~p:~p/~p+~p:~n ~p - ~p~n",
- [M,F,A,Off,I,Desc])|format_error(Es)];
-format_error([Error|Es]) ->
- [format_error(Error)|format_error(Es)];
format_error({{_M,F,A},{I,Off,limit}}) ->
io_lib:format(
"function ~p/~p+~p:~n"
@@ -103,8 +70,6 @@ format_error({{_M,F,A},{I,Off,Desc}}) ->
" Internal consistency check failed - please report this bug.~n"
" Instruction: ~p~n"
" Error: ~p:~n", [F,A,Off,I,Desc]);
-format_error({Module,Error}) ->
- [Module:format_error(Error)];
format_error(Error) ->
io_lib:format("~p~n", [Error]).
@@ -112,36 +77,6 @@ format_error(Error) ->
%%% Local functions follow.
%%%
-s_file(Name) ->
- {ok,Is} = file:consult(Name),
- {module,Module} = lists:keyfind(module, 1, Is),
- Fs = find_functions(Is),
- validate(Module, Fs).
-
-find_functions(Fs) ->
- find_functions_1(Fs, none, [], []).
-
-find_functions_1([{function,Name,Arity,Entry}|Is], Func, FuncAcc, Acc0) ->
- Acc = add_func(Func, FuncAcc, Acc0),
- find_functions_1(Is, {Name,Arity,Entry}, [], Acc);
-find_functions_1([I|Is], Func, FuncAcc, Acc) ->
- find_functions_1(Is, Func, [I|FuncAcc], Acc);
-find_functions_1([], Func, FuncAcc, Acc) ->
- reverse(add_func(Func, FuncAcc, Acc)).
-
-add_func(none, _, Acc) -> Acc;
-add_func({Name,Arity,Entry}, Is, Acc) ->
- [{function,Name,Arity,Entry,reverse(Is)}|Acc].
-
-beam_file(Name) ->
- try beam_disasm:file(Name) of
- {error,beam_lib,Reason} -> [{beam_lib,Reason}];
- #beam_file{module=Module, code=Code0} ->
- Code = normalize_disassembled_code(Code0),
- validate(Module, Code)
- catch _:_ -> [disassembly_failed]
- end.
-
%%%
%%% The validator follows.
%%%
@@ -196,23 +131,16 @@ validate_0(Module, [{function,Name,Ar,Entry,Code}|Fs], Ft) ->
try validate_1(Code, Name, Ar, Entry, Ft) of
_ -> validate_0(Module, Fs, Ft)
catch
- Error ->
+ throw:Error ->
+ %% Controlled error.
[Error|validate_0(Module, Fs, Ft)];
- error:Error ->
- [validate_error(Error, Module, Name, Ar)|validate_0(Module, Fs, Ft)]
+ Class:Error ->
+ %% Crash.
+ Stack = erlang:get_stacktrace(),
+ io:fwrite("Function: ~w/~w\n", [Name,Ar]),
+ erlang:raise(Class, Error, Stack)
end.
--ifdef(DEBUG).
-validate_error(Error, Module, Name, Ar) ->
- exit(validate_error_1(Error, Module, Name, Ar)).
--else.
-validate_error(Error, Module, Name, Ar) ->
- validate_error_1(Error, Module, Name, Ar).
--endif.
-validate_error_1(Error, Module, Name, Ar) ->
- {{Module,Name,Ar},
- {internal_error,'_',{Error,erlang:get_stacktrace()}}}.
-
-type index() :: non_neg_integer().
-type reg_tab() :: gb_trees:tree(index(), 'none' | {'value', _}).
@@ -225,8 +153,6 @@ validate_error_1(Error, Module, Name, Ar) ->
hf=0, %Available heap size for floats.
fls=undefined, %Floating point state.
ct=[], %List of hot catch/try labels
- bsm=undefined, %Bit syntax matching state.
- bits=undefined, %Number of bits in bit syntax binary.
setelem=false %Previous instruction was setelement/3.
}).
@@ -308,7 +234,7 @@ labels_1([{label,L}|Is], R) ->
labels_1([{line,_}|Is], R) ->
labels_1(Is, R);
labels_1(Is, R) ->
- {lists:reverse(R),Is}.
+ {reverse(R),Is}.
init_state(Arity) ->
Xs = init_regs(Arity, term),
@@ -403,10 +329,6 @@ valfun_1({init,{y,_}=Reg}, Vst) ->
set_type_y(initialized, Reg, Vst);
valfun_1({test_heap,Heap,Live}, Vst) ->
test_heap(Heap, Live, Vst);
-valfun_1({bif,_Op,nofail,Src,Dst}, Vst) ->
- %% The 'nofail' atom only occurs in disassembled code.
- validate_src(Src, Vst),
- set_type_reg(term, Dst, Vst);
valfun_1({bif,Op,{f,_},Src,Dst}=I, Vst) ->
case is_bif_safe(Op, length(Src)) of
false ->
@@ -432,18 +354,12 @@ valfun_1({put_tuple,Sz,Dst}, Vst0) when is_integer(Sz) ->
valfun_1({put,Src}, Vst) ->
assert_term(Src, Vst),
eat_heap(1, Vst);
-valfun_1({put_string,Sz,_,Dst}, Vst0) when is_integer(Sz) ->
- Vst = eat_heap(2*Sz, Vst0),
- set_type_reg(cons, Dst, Vst);
%% Instructions for optimization of selective receives.
valfun_1({recv_mark,{f,Fail}}, Vst) when is_integer(Fail) ->
Vst;
valfun_1({recv_set,{f,Fail}}, Vst) when is_integer(Fail) ->
Vst;
%% Misc.
-valfun_1({'%live',Live}, Vst) ->
- verify_live(Live, Vst),
- Vst;
valfun_1(remove_message, Vst) ->
Vst;
valfun_1({'%',_}, Vst) ->
@@ -494,37 +410,33 @@ valfun_1({'try',Dst,{f,Fail}}, Vst0) ->
Vst = #vst{current=#st{ct=Fails}=St} =
set_type_y({trytag,[Fail]}, Dst, Vst0),
Vst#vst{current=St#st{ct=[[Fail]|Fails]}};
-valfun_1({catch_end,Reg}, #vst{current=#st{ct=[Fail|Fails]}=St0}=Vst0) ->
+valfun_1({catch_end,Reg}, #vst{current=#st{ct=[Fail|Fails]}}=Vst0) ->
case get_special_y_type(Reg, Vst0) of
{catchtag,Fail} ->
- Vst = #vst{current=St} =
- set_type_y(initialized_ct, Reg,
- Vst0#vst{current=St0#st{ct=Fails}}),
+ Vst = #vst{current=St} = set_catch_end(Reg, Vst0),
Xs = gb_trees_from_list([{0,term}]),
- Vst#vst{current=St#st{x=Xs,fls=undefined}};
+ Vst#vst{current=St#st{x=Xs,ct=Fails,fls=undefined}};
Type ->
error({bad_type,Type})
end;
-valfun_1({try_end,Reg}, #vst{current=#st{ct=[Fail|Fails]}=St}=Vst0) ->
+valfun_1({try_end,Reg}, #vst{current=#st{ct=[Fail|Fails]}=St0}=Vst0) ->
case get_special_y_type(Reg, Vst0) of
{trytag,Fail} ->
Vst = case Fail of
[FailLabel] -> branch_state(FailLabel, Vst0);
_ -> Vst0
end,
- set_type_reg(initialized_ct, Reg,
- Vst#vst{current=St#st{ct=Fails,fls=undefined}});
+ St = St0#st{ct=Fails,fls=undefined},
+ set_catch_end(Reg, Vst#vst{current=St});
Type ->
error({bad_type,Type})
end;
-valfun_1({try_case,Reg}, #vst{current=#st{ct=[Fail|Fails]}=St0}=Vst0) ->
+valfun_1({try_case,Reg}, #vst{current=#st{ct=[Fail|Fails]}}=Vst0) ->
case get_special_y_type(Reg, Vst0) of
{trytag,Fail} ->
- Vst = #vst{current=St} =
- set_type_y(initialized_ct, Reg,
- Vst0#vst{current=St0#st{ct=Fails}}),
- Xs = gb_trees_from_list([{0,{atom,[]}},{1,term},{2,term}]), %XXX
- Vst#vst{current=St#st{x=Xs,fls=undefined}};
+ Vst = #vst{current=St} = set_catch_end(Reg, Vst0),
+ Xs = gb_trees_from_list([{0,{atom,[]}},{1,term},{2,term}]),
+ Vst#vst{current=St#st{x=Xs,ct=Fails,fls=undefined}};
Type ->
error({bad_type,Type})
end;
@@ -602,8 +514,6 @@ valfun_4({call_ext_last,Live,Func,StkSize},
tail_call(Func, Live, Vst);
valfun_4({call_ext_last,_,_,_}, #vst{current=#st{numy=NumY}}) ->
error({allocated,NumY});
-valfun_4({make_fun,_,_,Live}, Vst) ->
- call('fun', Live, Vst);
valfun_4({make_fun2,_,_,_,Live}, Vst) ->
call(make_fun, Live, Vst);
%% Other BIFs
@@ -620,8 +530,6 @@ valfun_4({bif,element,{f,Fail},[Pos,Tuple],Dst}, Vst0) ->
TupleType = upgrade_tuple_type({tuple,[get_tuple_size(PosType)]}, TupleType0),
Vst = set_type(TupleType, Tuple, Vst1),
set_type_reg(term, Dst, Vst);
-valfun_4({raise,{f,_}=Fail,Src,Dst}, Vst) ->
- valfun_4({bif,raise,Fail,Src,Dst}, Vst);
valfun_4({bif,Op,{f,Fail},Src,Dst}, Vst0) ->
validate_src(Src, Vst0),
Vst = branch_state(Fail, Vst0),
@@ -738,32 +646,6 @@ valfun_4({bs_save2,Ctx,SavePoint}, Vst) ->
valfun_4({bs_restore2,Ctx,SavePoint}, Vst) ->
bsm_restore(Ctx, SavePoint, Vst);
-%% Bit syntax instructions.
-valfun_4({bs_start_match,{f,_Fail}=F,Src}, Vst) ->
- valfun_4({test,bs_start_match,F,[Src]}, Vst);
-valfun_4({test,bs_start_match,{f,Fail},[Src]}, Vst) ->
- assert_term(Src, Vst),
- bs_start_match(branch_state(Fail, Vst));
-
-valfun_4({bs_save,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_save(SavePoint, Vst);
-valfun_4({bs_restore,SavePoint}, Vst) ->
- bs_assert_state(Vst),
- bs_assert_savepoint(SavePoint, Vst),
- Vst;
-valfun_4({test,bs_skip_bits,{f,Fail},[Src,_,_]}, Vst) ->
- bs_assert_state(Vst),
- assert_term(Src, Vst),
- branch_state(Fail, Vst);
-valfun_4({test,bs_test_tail,{f,Fail},_}, Vst) ->
- bs_assert_state(Vst),
- branch_state(Fail, Vst);
-valfun_4({test,_,{f,Fail},[_,_,_,Dst]}, Vst0) ->
- bs_assert_state(Vst0),
- Vst = branch_state(Fail, Vst0),
- set_type_reg({integer,[]}, Dst, Vst);
-
%% Other test instructions.
valfun_4({test,is_float,{f,Lbl},[Float]}, Vst) ->
assert_term(Float, Vst),
@@ -779,9 +661,17 @@ valfun_4({test,test_arity,{f,Lbl},[Tuple,Sz]}, Vst) when is_integer(Sz) ->
assert_type(tuple, Tuple, Vst),
set_type_reg({tuple,Sz}, Tuple, branch_state(Lbl, Vst));
valfun_4({test,has_map_fields,{f,Lbl},Src,{list,List}}, Vst) ->
- validate_src([Src], Vst),
- assert_strict_literal_termorder(List),
+ assert_type(map, Src, Vst),
+ assert_unique_map_keys(List),
branch_state(Lbl, Vst);
+valfun_4({test,is_map,{f,Lbl},[Src]}, Vst0) ->
+ Vst = branch_state(Lbl, Vst0),
+ case Src of
+ {Tag,_} when Tag =:= x; Tag =:= y ->
+ set_type_reg(map, Src, Vst);
+ _ ->
+ Vst
+ end;
valfun_4({test,_Op,{f,Lbl},Src}, Vst) ->
validate_src(Src, Vst),
branch_state(Lbl, Vst);
@@ -795,9 +685,6 @@ valfun_4({bs_utf8_size,{f,Fail},A,Dst}, Vst) ->
valfun_4({bs_utf16_size,{f,Fail},A,Dst}, Vst) ->
assert_term(A, Vst),
set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
-valfun_4({bs_bits_to_bytes,{f,Fail},Src,Dst}, Vst) ->
- assert_term(Src, Vst),
- set_type_reg({integer,[]}, Dst, branch_state(Fail, Vst));
valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
if
@@ -808,8 +695,7 @@ valfun_4({bs_init2,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
end,
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
- Vst3 = prune_x_regs(Live, Vst2),
- Vst = bs_zero_bits(Vst3),
+ Vst = prune_x_regs(Live, Vst2),
set_type_reg(binary, Dst, Vst);
valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
verify_live(Live, Vst0),
@@ -821,8 +707,7 @@ valfun_4({bs_init_bits,{f,Fail},Sz,Heap,Live,_,Dst}, Vst0) ->
end,
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
- Vst3 = prune_x_regs(Live, Vst2),
- Vst = bs_zero_bits(Vst3),
+ Vst = prune_x_regs(Live, Vst2),
set_type_reg(binary, Dst, Vst);
valfun_4({bs_append,{f,Fail},Bits,Heap,Live,_Unit,Bin,_Flags,Dst}, Vst0) ->
verify_live(Live, Vst0),
@@ -830,54 +715,36 @@ valfun_4({bs_append,{f,Fail},Bits,Heap,Live,_Unit,Bin,_Flags,Dst}, Vst0) ->
assert_term(Bin, Vst0),
Vst1 = heap_alloc(Heap, Vst0),
Vst2 = branch_state(Fail, Vst1),
- Vst3 = prune_x_regs(Live, Vst2),
- Vst = bs_zero_bits(Vst3),
+ Vst = prune_x_regs(Live, Vst2),
set_type_reg(binary, Dst, Vst);
valfun_4({bs_private_append,{f,Fail},Bits,_Unit,Bin,_Flags,Dst}, Vst0) ->
assert_term(Bits, Vst0),
assert_term(Bin, Vst0),
- Vst1 = branch_state(Fail, Vst0),
- Vst = bs_zero_bits(Vst1),
+ Vst = branch_state(Fail, Vst0),
set_type_reg(binary, Dst, Vst);
valfun_4({bs_put_string,Sz,_}, Vst) when is_integer(Sz) ->
Vst;
-valfun_4({bs_put_binary,{f,Fail},Sz,_,_,Src}=I, Vst0) ->
- assert_term(Sz, Vst0),
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_binary,{f,Fail},Sz,_,_,Src}, Vst) ->
+ assert_term(Sz, Vst),
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-valfun_4({bs_put_float,{f,Fail},Sz,_,_,Src}=I, Vst0) ->
- assert_term(Sz, Vst0),
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_float,{f,Fail},Sz,_,_,Src}, Vst) ->
+ assert_term(Sz, Vst),
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-valfun_4({bs_put_integer,{f,Fail},Sz,_,_,Src}=I, Vst0) ->
- assert_term(Sz, Vst0),
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_integer,{f,Fail},Sz,_,_,Src}, Vst) ->
+ assert_term(Sz, Vst),
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-valfun_4({bs_put_utf8,{f,Fail},_,Src}=I, Vst0) ->
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_utf8,{f,Fail},_,Src}, Vst) ->
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-valfun_4({bs_put_utf16,{f,Fail},_,Src}=I, Vst0) ->
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_utf16,{f,Fail},_,Src}, Vst) ->
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-valfun_4({bs_put_utf32,{f,Fail},_,Src}=I, Vst0) ->
- assert_term(Src, Vst0),
- Vst = bs_align_check(I, Vst0),
+valfun_4({bs_put_utf32,{f,Fail},_,Src}, Vst) ->
+ assert_term(Src, Vst),
branch_state(Fail, Vst);
-%% Old bit syntax construction (before R10B).
-valfun_4({bs_init,_,_}, Vst) ->
- bs_zero_bits(Vst);
-valfun_4({bs_need_buf,_}, Vst) -> Vst;
-valfun_4({bs_final,{f,Fail},Dst}, Vst0) ->
- Vst = branch_state(Fail, Vst0),
- set_type_reg(binary, Dst, Vst);
-valfun_4({bs_final2,Src,Dst}, Vst0) ->
- assert_term(Src, Vst0),
- set_type_reg(binary, Dst, Vst0);
%% Map instructions.
valfun_4({put_map_assoc,{f,Fail},Src,Dst,Live,{list,List}}, Vst) ->
verify_put_map(Fail, Src, Dst, Live, List, Vst);
@@ -889,26 +756,32 @@ valfun_4(_, _) ->
error(unknown_instruction).
verify_get_map(Fail, Src, List, Vst0) ->
- assert_term(Src, Vst0),
+ assert_type(map, Src, Vst0),
Vst1 = branch_state(Fail, Vst0),
- Lits = mmap(fun(L,_R) -> [L] end, List),
- assert_strict_literal_termorder(Lits),
+ Keys = extract_map_keys(List),
+ assert_unique_map_keys(Keys),
verify_get_map_pair(List,Vst0,Vst1).
+extract_map_keys([Key,_Val|T]) ->
+ [Key|extract_map_keys(T)];
+extract_map_keys([]) -> [].
+
verify_get_map_pair([],_,Vst) -> Vst;
verify_get_map_pair([Src,Dst|Vs],Vst0,Vsti) ->
assert_term(Src, Vst0),
verify_get_map_pair(Vs,Vst0,set_type_reg(term,Dst,Vsti)).
verify_put_map(Fail, Src, Dst, Live, List, Vst0) ->
+ assert_type(map, Src, Vst0),
verify_live(Live, Vst0),
verify_y_init(Vst0),
foreach(fun (Term) -> assert_term(Term, Vst0) end, List),
- assert_term(Src, Vst0),
Vst1 = heap_alloc(0, Vst0),
Vst2 = branch_state(Fail, Vst1),
Vst = prune_x_regs(Live, Vst2),
- set_type_reg(term, Dst, Vst).
+ Keys = extract_map_keys(List),
+ assert_unique_map_keys(Keys),
+ set_type_reg(map, Dst, Vst).
%%
%% Common code for validating bs_get* instructions.
@@ -936,9 +809,6 @@ validate_bs_skip_utf(Fail, Ctx, Live, Vst0) ->
%%
val_dsetel({move,_,_}, Vst) ->
Vst;
-val_dsetel({put_string,0,{string,""},_}, Vst) ->
- %% An empty string is OK since it doesn't build anything.
- Vst;
val_dsetel({call_ext,3,{extfunc,erlang,setelement,3}}, #vst{current=St}=Vst) ->
Vst#vst{current=St#st{setelem=true}};
val_dsetel({set_tuple_element,_,_,_}, #vst{current=#st{setelem=false}}) ->
@@ -972,7 +842,7 @@ call(Name, Live, #vst{current=St}=Vst) ->
Type when Type =/= exception ->
%% Type is never 'exception' because it has been handled earlier.
Xs = gb_trees_from_list([{0,Type}]),
- Vst#vst{current=St#st{x=Xs,f=init_fregs(),bsm=undefined}}
+ Vst#vst{current=St#st{x=Xs,f=init_fregs()}}
end.
%% Tail call.
@@ -1030,7 +900,7 @@ allocate(_, _, _, _, #vst{current=#st{numy=Numy}}) ->
error({existing_stack_frame,{size,Numy}}).
deallocate(#vst{current=St}=Vst) ->
- Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none,bsm=undefined}}.
+ Vst#vst{current=St#st{y=init_regs(0, initialized),numy=none}}.
test_heap(Heap, Live, Vst0) ->
verify_live(Live, Vst0),
@@ -1038,7 +908,7 @@ test_heap(Heap, Live, Vst0) ->
heap_alloc(Heap, Vst).
heap_alloc(Heap, #vst{current=St0}=Vst) ->
- St1 = kill_heap_allocation(St0#st{bsm=undefined}),
+ St1 = kill_heap_allocation(St0),
St = heap_alloc_1(Heap, St1),
Vst#vst{current=St}.
@@ -1122,75 +992,25 @@ assert_freg_set(Fr, _) -> error({bad_source,Fr}).
%%% Maps
-%% ensure that a list of literals has a strict
-%% ascending term order (also meaning unique literals).
-%% Single item lists may have registers.
-assert_strict_literal_termorder([_]) -> ok;
-assert_strict_literal_termorder(Ls) ->
- Vs = lists:map(fun (L) -> get_literal(L) end, Ls),
- case check_strict_value_termorder(Vs) of
- true -> ok;
- false -> error({not_strict_order, Ls})
- end.
-
-%% usage:
-%% mmap(fun(A,B) -> [{A,B}] end, [1,2,3,4]),
-%% [{1,2},{3,4}]
-
-mmap(F,List) ->
- {arity,Ar} = erlang:fun_info(F,arity),
- mmap(F,Ar,List).
-mmap(_F,_,[]) -> [];
-mmap(F,Ar,List) ->
- {Hd,Tl} = lists:split(Ar,List),
- apply(F,Hd) ++ mmap(F,Ar,Tl).
-
-check_strict_value_termorder([]) -> true;
-check_strict_value_termorder([_]) -> true;
-check_strict_value_termorder([V1,V2]) ->
- erts_internal:cmp_term(V1,V2) < 0;
-check_strict_value_termorder([V1,V2|Vs]) ->
- case erts_internal:cmp_term(V1,V2) < 0 of
- true -> check_strict_value_termorder([V2|Vs]);
- false -> false
- end.
+%% A single item list may be either a list or a register.
+%%
+%% A list with more than item must contain unique literals.
+%%
+%% An empty list is not allowed.
-%%%
-%%% Binary matching.
-%%%
-%%% Possible values for the bsm field (=bit syntax matching state).
-%%%
-%%% undefined - Undefined (initial state). No matching instructions allowed.
-%%%
-%%% (gb set) - The gb set contains the defined save points.
-%%%
-%%% The bsm field is reset to 'undefined' by instructions that may cause a
-%%% a garbage collection (might move the binary) and/or context switch
-%%% (may invalidate the save points).
-
-bs_start_match(#vst{current=#st{bsm=undefined}=St}=Vst) ->
- Vst#vst{current=St#st{bsm=gb_sets:empty()}};
-bs_start_match(Vst) ->
- %% Must retain save points here - it is possible to restore back
- %% to a previous binary.
- Vst.
-
-bs_save(Reg, #vst{current=#st{bsm=Saved}=St}=Vst)
- when is_integer(Reg), Reg < ?MAXREG ->
- Vst#vst{current=St#st{bsm=gb_sets:add(Reg, Saved)}};
-bs_save(_, _) -> error(limit).
-
-bs_assert_savepoint(Reg, #vst{current=#st{bsm=Saved}}) ->
- case gb_sets:is_member(Reg, Saved) of
- false -> error({no_save_point,Reg});
- true -> ok
+assert_unique_map_keys([]) ->
+ %% There is no reason to use the get_map_elements and
+ %% has_map_fields instructions with empty lists.
+ error(empty_field_list);
+assert_unique_map_keys([_]) ->
+ ok;
+assert_unique_map_keys([_,_|_]=Ls) ->
+ Vs = [get_literal(L) || L <- Ls],
+ case length(Vs) =:= sets:size(sets:from_list(Vs)) of
+ true -> ok;
+ false -> error(keys_not_unique)
end.
-bs_assert_state(#vst{current=#st{bsm=undefined}}) ->
- error(no_bs_match_state);
-bs_assert_state(_) -> ok.
-
-
%%%
%%% New binary matching instructions.
%%%
@@ -1236,55 +1056,7 @@ bsm_restore(Reg, SavePoint, Vst) ->
end;
_ -> error({illegal_restore,SavePoint,range})
end.
-
-
-%%%
-%%% Validation of alignment in the bit syntax. (Currently, construction only.)
-%%%
-%%% We make sure that the aligned flag is only set when we can be sure of the
-%%% aligment.
-%%%
-
-bs_zero_bits(#vst{current=St}=Vst) ->
- Vst#vst{current=St#st{bits=0}}.
-
-bs_align_check({bs_put_utf8,_,Flags,_}, #vst{current=#st{}=St}=Vst) ->
- bs_verify_flags(Flags, St),
- Vst;
-bs_align_check({bs_put_utf16,_,Flags,_}, #vst{current=#st{}=St}=Vst) ->
- bs_verify_flags(Flags, St),
- Vst;
-bs_align_check({bs_put_utf32,_,Flags,_}, #vst{current=#st{}=St}=Vst) ->
- bs_verify_flags(Flags, St),
- Vst;
-bs_align_check({_,_,Sz,U,Flags,_}, #vst{current=#st{bits=Bits}=St}=Vst) ->
- bs_verify_flags(Flags, St),
- bs_update_bits(Bits, Sz, U, St, Vst).
-
-bs_update_bits(undefined, _, _, _, Vst) -> Vst;
-bs_update_bits(Bits0, {integer,Sz}, U, St, Vst) ->
- Bits = Bits0 + U*Sz,
- Vst#vst{current=St#st{bits=Bits}};
-bs_update_bits(_, {atom,all}, _, _, Vst) ->
- %% A binary will not change the alignment.
- Vst;
-bs_update_bits(_, _, U, _, Vst) when U rem 8 =:= 0 ->
- %% Units of 8, 16, and so on will not change the aligment.
- Vst;
-bs_update_bits(_, _, _, St, Vst) ->
- %% We can no longer be sure about aligment.
- Vst#vst{current=St#st{bits=undefined}}.
-
-bs_verify_flags({field_flags,Fl}, #st{bits=Bits}) ->
- case bs_is_aligned(Fl) of
- false -> ok;
- true when is_integer(Bits), Bits rem 8 =:= 0 -> ok;
- true -> error({aligned_flag_set,{bits,Bits}})
- end.
-bs_is_aligned(Fl) when is_integer(Fl) -> Fl band 1 =:= 1;
-bs_is_aligned(Fl) when is_list(Fl) -> member(aligned, Fl).
-
%%%
%%% Keeping track of types.
%%%
@@ -1300,35 +1072,26 @@ set_type_reg(Type, {x,X}, #vst{current=#st{x=Xs}=St}=Vst)
set_type_reg(Type, Reg, Vst) ->
set_type_y(Type, Reg, Vst).
-set_type_y(Type, {y,Y}=Reg, #vst{current=#st{y=Ys0,numy=NumY}=St}=Vst)
+set_type_y(Type, {y,Y}=Reg, #vst{current=#st{y=Ys0}=St}=Vst)
when is_integer(Y), 0 =< Y ->
limit_check(Y),
- case {Y,NumY} of
- {_,none} ->
- error({no_stack_frame,Reg});
- {_,_} when Y > NumY ->
- error({y_reg_out_of_range,Reg,NumY});
- {_,_} ->
- Ys = if Type =:= initialized_ct ->
- gb_trees:enter(Y, initialized, Ys0);
- true ->
- case gb_trees:lookup(Y, Ys0) of
- none ->
- gb_trees:insert(Y, Type, Ys0);
- {value,uinitialized} ->
- gb_trees:insert(Y, Type, Ys0);
- {value,{catchtag,_}=Tag} ->
- error(Tag);
- {value,{trytag,_}=Tag} ->
- error(Tag);
- {value,_} ->
- gb_trees:update(Y, Type, Ys0)
- end
- end,
- Vst#vst{current=St#st{y=Ys}}
- end;
+ Ys = case gb_trees:lookup(Y, Ys0) of
+ none ->
+ error({invalid_store,Reg,Type});
+ {value,{catchtag,_}=Tag} ->
+ error(Tag);
+ {value,{trytag,_}=Tag} ->
+ error(Tag);
+ {value,_} ->
+ gb_trees:update(Y, Type, Ys0)
+ end,
+ Vst#vst{current=St#st{y=Ys}};
set_type_y(Type, Reg, #vst{}) -> error({invalid_store,Reg,Type}).
+set_catch_end({y,Y}, #vst{current=#st{y=Ys0}=St}=Vst) ->
+ Ys = gb_trees:update(Y, initialized, Ys0),
+ Vst#vst{current=St#st{y=Ys}}.
+
assert_term(Src, Vst) ->
get_term_type(Src, Vst),
ok.
@@ -1389,7 +1152,8 @@ assert_term(Src, Vst) ->
%%
%% number Integer or Float of unknown value
%%
-
+%% map Map.
+%%
assert_type(WantedType, Term, Vst) ->
assert_type(WantedType, get_term_type(Term, Vst)).
@@ -1471,6 +1235,7 @@ get_term_type_1(nil=T, _) -> T;
get_term_type_1({atom,A}=T, _) when is_atom(A) -> T;
get_term_type_1({float,F}=T, _) when is_float(F) -> T;
get_term_type_1({integer,I}=T, _) when is_integer(I) -> T;
+get_term_type_1({literal,Map}, _) when is_map(Map) -> map;
get_term_type_1({literal,_}=T, _) -> T;
get_term_type_1({x,X}=Reg, #vst{current=#st{x=Xs}}) when is_integer(X) ->
case gb_trees:lookup(X, Xs) of
@@ -1525,14 +1290,13 @@ merge_states(L, St, Branched) when L =/= 0 ->
{value,OtherSt} -> merge_states_1(St, OtherSt)
end.
-merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0,bsm=Bsm0}=St,
- #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1,bsm=Bsm1}) ->
+merge_states_1(#st{x=Xs0,y=Ys0,numy=NumY0,h=H0,ct=Ct0},
+ #st{x=Xs1,y=Ys1,numy=NumY1,h=H1,ct=Ct1}) ->
NumY = merge_stk(NumY0, NumY1),
Xs = merge_regs(Xs0, Xs1),
Ys = merge_y_regs(Ys0, Ys1),
Ct = merge_ct(Ct0, Ct1),
- Bsm = merge_bsm(Bsm0, Bsm1),
- St#st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct,bsm=Bsm}.
+ #st{x=Xs,y=Ys,numy=NumY,h=min(H0, H1),ct=Ct}.
merge_stk(S, S) -> S;
merge_stk(_, _) -> undecided.
@@ -1562,20 +1326,24 @@ merge_regs_1([], [_|_]) -> [];
merge_regs_1([_|_], []) -> [].
merge_y_regs(Rs0, Rs1) ->
- Rs = merge_y_regs_1(gb_trees:to_list(Rs0), gb_trees:to_list(Rs1)),
- gb_trees_from_list(Rs).
+ case {gb_trees:size(Rs0),gb_trees:size(Rs1)} of
+ {Sz0,Sz1} when Sz0 < Sz1 ->
+ merge_y_regs_1(Sz0-1, Rs1, Rs0);
+ {_,Sz1} ->
+ merge_y_regs_1(Sz1-1, Rs0, Rs1)
+ end.
-merge_y_regs_1([Same|Rs1], [Same|Rs2]) ->
- [Same|merge_y_regs_1(Rs1, Rs2)];
-merge_y_regs_1([{R1,_}|Rs1], [{R2,_}|_]=Rs2) when R1 < R2 ->
- [{R1,uninitialized}|merge_y_regs_1(Rs1, Rs2)];
-merge_y_regs_1([{R1,_}|_]=Rs1, [{R2,_}|Rs2]) when R1 > R2 ->
- [{R2,uninitialized}|merge_y_regs_1(Rs1, Rs2)];
-merge_y_regs_1([{R,Type1}|Rs1], [{R,Type2}|Rs2]) ->
- [{R,merge_types(Type1, Type2)}|merge_y_regs_1(Rs1, Rs2)];
-merge_y_regs_1([], []) -> [];
-merge_y_regs_1([], [_|_]=Rs) -> Rs;
-merge_y_regs_1([_|_]=Rs, []) -> Rs.
+merge_y_regs_1(Y, S, Regs0) when Y >= 0 ->
+ Type0 = gb_trees:get(Y, Regs0),
+ case gb_trees:get(Y, S) of
+ Type0 ->
+ merge_y_regs_1(Y-1, S, Regs0);
+ Type1 ->
+ Type = merge_types(Type0, Type1),
+ Regs = gb_trees:update(Y, Type, Regs0),
+ merge_y_regs_1(Y-1, S, Regs)
+ end;
+merge_y_regs_1(_, _, Regs) -> Regs.
%% merge_types(Type1, Type2) -> Type
%% Return the most specific type possible.
@@ -1615,10 +1383,6 @@ merge_types(T1, T2) when T1 =/= T2 ->
%% Too different. All we know is that the type is a 'term'.
term.
-merge_bsm(undefined, _) -> undefined;
-merge_bsm(_, undefined) -> undefined;
-merge_bsm(Bsm0, Bsm1) -> gb_sets:intersection(Bsm0, Bsm1).
-
tuple_sz([Sz]) -> Sz;
tuple_sz(Sz) -> Sz.
@@ -1725,6 +1489,7 @@ bif_type(is_float, [_], _) -> bool;
bif_type(is_function, [_], _) -> bool;
bif_type(is_integer, [_], _) -> bool;
bif_type(is_list, [_], _) -> bool;
+bif_type(is_map, [_], _) -> bool;
bif_type(is_number, [_], _) -> bool;
bif_type(is_pid, [_], _) -> bool;
bif_type(is_port, [_], _) -> bool;
@@ -1754,6 +1519,7 @@ is_bif_safe(is_float, 1) -> true;
is_bif_safe(is_function, 1) -> true;
is_bif_safe(is_integer, 1) -> true;
is_bif_safe(is_list, 1) -> true;
+is_bif_safe(is_map, 1) -> true;
is_bif_safe(is_number, 1) -> true;
is_bif_safe(is_pid, 1) -> true;
is_bif_safe(is_port, 1) -> true;
@@ -1796,8 +1562,6 @@ return_type_1(M, F, A, _) when is_atom(M), is_atom(F), is_integer(A), A >= 0 ->
return_type_erl(exit, 1) -> exception;
return_type_erl(throw, 1) -> exception;
-return_type_erl(fault, 1) -> exception;
-return_type_erl(fault, 2) -> exception;
return_type_erl(error, 1) -> exception;
return_type_erl(error, 2) -> exception;
return_type_erl(F, A) when is_atom(F), is_integer(A), A >= 0 -> term.
@@ -1818,6 +1582,7 @@ return_type_math(erf, 1) -> {float,[]};
return_type_math(erfc, 1) -> {float,[]};
return_type_math(exp, 1) -> {float,[]};
return_type_math(log, 1) -> {float,[]};
+return_type_math(log2, 1) -> {float,[]};
return_type_math(log10, 1) -> {float,[]};
return_type_math(sqrt, 1) -> {float,[]};
return_type_math(atan2, 2) -> {float,[]};
@@ -1839,52 +1604,3 @@ error(Error) -> exit(Error).
-else.
error(Error) -> throw(Error).
-endif.
-
-
-%%%
-%%% Rewrite disassembled code to the same format as we used internally
-%%% to not have to worry later.
-%%%
-
-normalize_disassembled_code(Fs) ->
- Index = ndc_index(Fs, []),
- ndc(Fs, Index, []).
-
-ndc_index([{function,Name,Arity,Entry,_Code}|Fs], Acc) ->
- ndc_index(Fs, [{{Name,Arity},Entry}|Acc]);
-ndc_index([], Acc) ->
- gb_trees:from_orddict(lists:sort(Acc)).
-
-ndc([{function,Name,Arity,Entry,Code0}|Fs], D, Acc) ->
- Code = ndc_1(Code0, D, []),
- ndc(Fs, D, [{function,Name,Arity,Entry,Code}|Acc]);
-ndc([], _, Acc) -> reverse(Acc).
-
-ndc_1([{call=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_only=Op,A,{_,F,A}}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)}}|Acc]);
-ndc_1([{call_last=Op,A,{_,F,A},Sz}|Is], D, Acc) ->
- ndc_1(Is, D, [{Op,A,{f,gb_trees:get({F,A}, D)},Sz}|Acc]);
-ndc_1([{arithbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{arithfbif,Op,F,Src,Dst}|Is], D, Acc) ->
- ndc_1(Is, D, [{bif,Op,F,Src,Dst}|Acc]);
-ndc_1([{test,bs_start_match2=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_binary2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_float2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_integer2=Op,F,[A1,Live,A3,A4,A5,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3,A4,A5],Dst}|Acc]);
-ndc_1([{test,bs_get_utf8=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf16=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([{test,bs_get_utf32=Op,F,[A1,Live,A3,Dst]}|Is], D, Acc) ->
- ndc_1(Is, D, [{test,Op,F,Live,[A1,A3],Dst}|Acc]);
-ndc_1([I|Is], D, Acc) ->
- ndc_1(Is, D, [I|Acc]);
-ndc_1([], _, Acc) ->
- reverse(Acc).
diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl
index c2a6ef604e..47e786034d 100644
--- a/lib/compiler/src/beam_z.erl
+++ b/lib/compiler/src/beam_z.erl
@@ -74,22 +74,21 @@ undo_rename({bs_init,F,{I,Extra,U,Flags},Live,[Sz,Src],Dst}) ->
{I,F,Sz,Extra,Live,U,Src,Flags,Dst};
undo_rename({bs_init,_,bs_init_writable=I,_,_,_}) ->
I;
+undo_rename({test,bs_match_string=Op,F,[Ctx,Bin0]}) ->
+ Bits = bit_size(Bin0),
+ Bin = case Bits rem 8 of
+ 0 -> Bin0;
+ Rem -> <<Bin0/bitstring,0:(8-Rem)>>
+ end,
+ {test,Op,F,[Ctx,Bits,{string,binary_to_list(Bin)}]};
undo_rename({put_map,Fail,assoc,S,D,R,L}) ->
{put_map_assoc,Fail,S,D,R,L};
undo_rename({put_map,Fail,exact,S,D,R,L}) ->
{put_map_exact,Fail,S,D,R,L};
undo_rename({test,has_map_fields,Fail,[Src|List]}) ->
- {test,has_map_fields,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
-undo_rename({get_map_elements,Fail,Src,{list, List}}) ->
- {get_map_elements,Fail,Src,{list,[to_typed_literal(V)||V<-List]}};
+ {test,has_map_fields,Fail,Src,{list,List}};
+undo_rename({get_map_elements,Fail,Src,{list,List}}) ->
+ {get_map_elements,Fail,Src,{list,List}};
undo_rename({select,I,Reg,Fail,List}) ->
{I,Reg,Fail,{list,List}};
undo_rename(I) -> I.
-
-%% to_typed_literal(Arg)
-%% transform Arg to specific literal i.e. float | integer | atom if applicable
-to_typed_literal({literal, V}) when is_float(V) -> {float, V};
-to_typed_literal({literal, V}) when is_atom(V) -> {atom, V};
-to_typed_literal({literal, V}) when is_integer(V) -> {integer, V};
-to_typed_literal({literal, []}) -> nil;
-to_typed_literal(V) -> V.
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index 7a2c3d70de..ea960abc1a 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -123,11 +123,14 @@
bitstr_flags/1,
%% keep map exports here for now
+ c_map_pattern/1,
+ is_c_map/1,
map_es/1,
map_arg/1,
update_c_map/3,
c_map/1, is_c_map_empty/1,
ann_c_map/2, ann_c_map/3,
+ ann_c_map_pattern/2,
map_pair_op/1,map_pair_key/1,map_pair_val/1,
update_c_map_pair/4,
c_map_pair/2,
@@ -135,7 +138,8 @@
]).
-export_type([c_binary/0, c_bitstr/0, c_call/0, c_clause/0, c_cons/0, c_fun/0,
- c_literal/0, c_map/0, c_map_pair/0, c_module/0, c_tuple/0,
+ c_let/0, c_literal/0, c_map/0, c_map_pair/0,
+ c_module/0, c_tuple/0,
c_values/0, c_var/0, cerl/0, var_name/0]).
-include("core_parse.hrl").
@@ -252,7 +256,7 @@
%% @see c_primop/2
%% @see c_receive/1
%% @see c_seq/2
-%% @see c_try/3
+%% @see c_try/5
%% @see c_tuple/1
%% @see c_values/1
%% @see c_var/1
@@ -431,6 +435,8 @@ is_literal_term([H | T]) ->
is_literal_term(T) when is_tuple(T) ->
is_literal_term_list(tuple_to_list(T));
is_literal_term(B) when is_bitstring(B) -> true;
+is_literal_term(M) when is_map(M) ->
+ is_literal_term_list(maps:to_list(M));
is_literal_term(_) ->
false.
@@ -1450,7 +1456,7 @@ is_proper_list(_) ->
%% X4]</code>.
%%
%% @see c_cons/2
-%% @see c_nil/1
+%% @see c_nil/0
%% @see is_c_list/1
%% @see list_length/1
%% @see make_list/2
@@ -1481,7 +1487,7 @@ abstract_list([]) ->
%% efficient.</p>
%%
%% @see c_cons/2
-%% @see c_nil/1
+%% @see c_nil/0
%% @see is_c_list/1
%% @see list_elements/1
@@ -1577,6 +1583,20 @@ ann_make_list(_, [], Node) ->
%% ---------------------------------------------------------------------
%% maps
+%% @spec is_c_map(Node::cerl()) -> boolean()
+%%
+%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
+%% map constructor, otherwise <code>false</code>.
+
+-spec is_c_map(cerl()) -> boolean().
+
+is_c_map(#c_map{}) ->
+ true;
+is_c_map(#c_literal{val = V}) when is_map(V) ->
+ true;
+is_c_map(_) ->
+ false.
+
-spec map_es(c_map()) -> [c_map_pair()].
map_es(#c_map{es = Es}) ->
@@ -1590,7 +1610,17 @@ map_arg(#c_map{arg=M}) ->
-spec c_map([c_map_pair()]) -> c_map().
c_map(Pairs) ->
- #c_map{es=Pairs}.
+ ann_c_map([], Pairs).
+
+-spec c_map_pattern([c_map_pair()]) -> c_map().
+
+c_map_pattern(Pairs) ->
+ #c_map{es=Pairs, is_pat=true}.
+
+-spec ann_c_map_pattern([term()], [c_map_pair()]) -> c_map().
+
+ann_c_map_pattern(As, Pairs) ->
+ #c_map{anno=As, es=Pairs, is_pat=true}.
-spec is_c_map_empty(c_map() | c_literal()) -> boolean().
@@ -1598,25 +1628,13 @@ is_c_map_empty(#c_map{ es=[] }) -> true;
is_c_map_empty(#c_literal{val=M}) when is_map(M),map_size(M) =:= 0 -> true;
is_c_map_empty(_) -> false.
--spec ann_c_map([term()], [cerl()]) -> c_map() | c_literal().
+-spec ann_c_map([term()], [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,Es) ->
+ann_c_map(As, Es) ->
ann_c_map(As, #c_literal{val=#{}}, Es).
-spec ann_c_map([term()], c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
-ann_c_map(As,#c_literal{val=Mval}=M,Es) when is_map(Mval), map_size(Mval) =:= 0 ->
- Pairs = [[Ck,Cv]||#c_map_pair{key=Ck,val=Cv}<-Es],
- IsLit = lists:foldl(fun(Pair,Res) ->
- Res andalso is_lit_list(Pair)
- end, true, Pairs),
- Fun = fun(Pair) -> [K,V] = lit_list_vals(Pair), {K,V} end,
- case IsLit of
- false ->
- #c_map{arg=M, es=Es, anno=As };
- true ->
- #c_literal{anno=As, val=maps:from_list(lists:map(Fun, Pairs))}
- end;
ann_c_map(As,#c_literal{val=M},Es) when is_map(M) ->
fold_map_pairs(As,Es,M);
ann_c_map(As,M,Es) ->
@@ -1644,14 +1662,14 @@ fold_map_pairs(As,[#c_map_pair{op=#c_literal{val=exact},key=Ck,val=Cv}=E|Es],M)
end;
false ->
#c_map{arg=#c_literal{val=M,anno=As}, es=[E|Es], anno=As }
- end;
-fold_map_pairs(As,Es,M) ->
- #c_map{arg=#c_literal{val=M,anno=As}, es=Es, anno=As }.
+ end.
-%-spec update_c_map(c_map() | c_literal(), [c_map_pair()]) -> c_map() | c_literal().
+-spec update_c_map(c_map(), cerl(), [cerl()]) -> c_map() | c_literal().
-update_c_map(Old,M,Es) ->
- #c_map{arg=M, es = Es, anno = get_ann(Old)}.
+update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
+ Old#c_map{arg=M, es=Es};
+update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
+ ann_c_map(get_ann(Old), M, Es).
map_pair_key(#c_map_pair{key=K}) -> K.
map_pair_val(#c_map_pair{val=V}) -> V.
@@ -1974,7 +1992,7 @@ update_c_fname(Node, Atom, Arity) ->
%%
%% @see c_fname/2
%% @see c_var/1
-%% @see c_var_name/1
+%% @see var_name/1
-spec is_c_fname(cerl()) -> boolean().
@@ -3652,7 +3670,7 @@ c_try(Expr, Vs, Body, Evs, Handler) ->
%% @spec ann_c_try(As::[term()], Expression::cerl(),
%% Variables::[cerl()], Body::cerl(),
%% EVars::[cerl()], Handler::cerl()) -> cerl()
-%% @see c_try/3
+%% @see c_try/5
-spec ann_c_try([term()], cerl(), [cerl()], cerl(), [cerl()], cerl()) ->
c_try().
@@ -3665,7 +3683,7 @@ ann_c_try(As, Expr, Vs, Body, Evs, Handler) ->
%% @spec update_c_try(Old::cerl(), Expression::cerl(),
%% Variables::[cerl()], Body::cerl(),
%% EVars::[cerl()], Handler::cerl()) -> cerl()
-%% @see c_try/3
+%% @see c_try/5
-spec update_c_try(c_try(), cerl(), [cerl()], cerl(), [cerl()], cerl()) ->
c_try().
@@ -3680,7 +3698,7 @@ update_c_try(Node, Expr, Vs, Body, Evs, Handler) ->
%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
%% try-expression, otherwise <code>false</code>.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec is_c_try(cerl()) -> boolean().
@@ -3694,7 +3712,7 @@ is_c_try(_) ->
%%
%% @doc Returns the expression subtree of an abstract try-expression.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec try_arg(c_try()) -> cerl().
@@ -3707,7 +3725,7 @@ try_arg(Node) ->
%% @doc Returns the list of success variable subtrees of an abstract
%% try-expression.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec try_vars(c_try()) -> [cerl()].
@@ -3719,7 +3737,7 @@ try_vars(Node) ->
%%
%% @doc Returns the success body subtree of an abstract try-expression.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec try_body(c_try()) -> cerl().
@@ -3732,7 +3750,7 @@ try_body(Node) ->
%% @doc Returns the list of exception variable subtrees of an abstract
%% try-expression.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec try_evars(c_try()) -> [cerl()].
@@ -3745,7 +3763,7 @@ try_evars(Node) ->
%% @doc Returns the exception body subtree of an abstract
%% try-expression.
%%
-%% @see c_try/3
+%% @see c_try/5
-spec try_handler(c_try()) -> cerl().
@@ -3767,7 +3785,7 @@ try_handler(Node) ->
%% @see update_c_catch/2
%% @see is_c_catch/1
%% @see catch_body/1
-%% @see c_try/3
+%% @see c_try/5
-spec c_catch(cerl()) -> c_catch().
diff --git a/lib/compiler/src/cerl_clauses.erl b/lib/compiler/src/cerl_clauses.erl
index 87bd47c08b..ef74c5b76f 100644
--- a/lib/compiler/src/cerl_clauses.erl
+++ b/lib/compiler/src/cerl_clauses.erl
@@ -354,29 +354,29 @@ match(P, E, Bs) ->
{false, Bs}
end
end;
- map ->
- %% The most we can do is to say "definitely no match" if a
- %% map pattern is matched against non-map data.
- case E of
- any ->
- {false, Bs};
- _ ->
- case type(E) of
- literal ->
- case is_map(concrete(E)) of
- false ->
- none;
- true ->
- {false, Bs}
- end;
- cons ->
- none;
- tuple ->
- none;
- _ ->
- {false, Bs}
- end
- end;
+ map ->
+ %% The most we can do is to say "definitely no match" if a
+ %% map pattern is matched against non-map data.
+ case E of
+ any ->
+ {false, Bs};
+ _ ->
+ case type(E) of
+ literal ->
+ case is_map(concrete(E)) of
+ false ->
+ none;
+ true ->
+ {false, Bs}
+ end;
+ cons ->
+ none;
+ tuple ->
+ none;
+ _ ->
+ {false, Bs}
+ end
+ end;
_ ->
match_1(P, E, Bs)
end.
diff --git a/lib/compiler/src/cerl_inline.erl b/lib/compiler/src/cerl_inline.erl
index f8489a800b..02cdb966ce 100644
--- a/lib/compiler/src/cerl_inline.erl
+++ b/lib/compiler/src/cerl_inline.erl
@@ -445,15 +445,14 @@ i_var_1(R, Opnd, Ctxt, Env, S) ->
residualize_var(R, S);
false ->
S1 = st__mark_inner_pending(L, S),
- case catch {ok, visit(Opnd, S1)} of
- {ok, {E, S2}} ->
+ try visit(Opnd, S1) of
+ {E, S2} ->
%% Note that we pass the current environment and
%% context to `copy', but not the current renaming.
S3 = st__clear_inner_pending(L, S2),
- copy(R, Opnd, E, Ctxt, Env, S3);
- {'EXIT', X} ->
- exit(X);
- X ->
+ copy(R, Opnd, E, Ctxt, Env, S3)
+ catch
+ throw:X ->
%% If we use destructive update for the
%% `inner-pending' flag, we must make sure to clear
%% it also if we make a nonlocal return.
@@ -1128,8 +1127,8 @@ i_call_3(M, F, As, E, Ctxt, Env, S) ->
%% Note that we extract the results of argument expessions here; the
%% expressions could still be sequences with side effects.
Vs = [concrete(result(A)) || A <- As],
- case catch {ok, apply(atom_val(M), atom_val(F), Vs)} of
- {ok, V} ->
+ try apply(atom_val(M), atom_val(F), Vs) of
+ V ->
%% Evaluation completed normally - try to turn the result
%% back into a syntax tree (representing a literal).
case is_literal_term(V) of
@@ -1142,8 +1141,9 @@ i_call_3(M, F, As, E, Ctxt, Env, S) ->
false ->
%% The result could not be represented as a literal.
i_call_4(M, F, As, E, Ctxt, Env, S)
- end;
- _ ->
+ end
+ catch
+ error:_ ->
%% The evaluation attempt did not complete normally.
i_call_4(M, F, As, E, Ctxt, Env, S)
end.
@@ -1736,12 +1736,11 @@ copy_1(R, Opnd, E, Ctxt, Env, S) ->
copy_inline(R, Opnd, E, Ctxt, Env, S) ->
S1 = st__mark_outer_pending(Opnd#opnd.loc, S),
- case catch {ok, copy_inline_1(R, E, Ctxt, Env, S1)} of
- {ok, {E1, S2}} ->
- {E1, st__clear_outer_pending(Opnd#opnd.loc, S2)};
- {'EXIT', X} ->
- exit(X);
- X ->
+ try copy_inline_1(R, E, Ctxt, Env, S1) of
+ {E1, S2} ->
+ {E1, st__clear_outer_pending(Opnd#opnd.loc, S2)}
+ catch
+ throw:X ->
%% If we use destructive update for the `outer-pending'
%% flag, we must make sure to clear it upon a nonlocal
%% return.
@@ -1758,19 +1757,16 @@ copy_inline_1(R, E, Ctxt, Env, S) ->
copy_inline_2(R, E, Ctxt, Env, S);
false ->
S1 = new_active_effort(get_effort_limit(S), S),
- case catch {ok, copy_inline_2(R, E, Ctxt, Env, S1)} of
- {ok, {E1, S2}} ->
+ try copy_inline_2(R, E, Ctxt, Env, S1) of
+ {E1, S2} ->
%% Revert to the old effort counter.
- {E1, revert_effort(S, S2)};
- {counter_exceeded, effort, _} ->
+ {E1, revert_effort(S, S2)}
+ catch
+ throw:{counter_exceeded, effort, _} ->
%% Aborted this inlining attempt because too much
%% effort was spent. Residualize the variable and
%% revert to the previous state.
- residualize_var(R, S);
- {'EXIT', X} ->
- exit(X);
- X ->
- throw(X)
+ residualize_var(R, S)
end
end.
@@ -1796,11 +1792,12 @@ copy_inline_2(R, E, Ctxt, Env, S) ->
%% close to zero at this point. (This is an extension to the
%% original algorithm.)
S1 = new_active_size(Limit + apply_size(length(Ctxt#app.opnds)), S),
- case catch {ok, inline(E, Ctxt, ren__identity(), Env, S1)} of
- {ok, {E1, S2}} ->
+ try inline(E, Ctxt, ren__identity(), Env, S1) of
+ {E1, S2} ->
%% Revert to the old size counter.
- {E1, revert_size(S, S2)};
- {counter_exceeded, size, S2} ->
+ {E1, revert_size(S, S2)}
+ catch
+ throw:{counter_exceeded, size, S2} ->
%% Aborted this inlining attempt because it got too big.
%% Residualize the variable and revert to the old size
%% counter. (It is important that we do not also revert the
@@ -1813,11 +1810,7 @@ copy_inline_2(R, E, Ctxt, Env, S) ->
%% must make sure to clear the flags of any nested
%% app-contexts upon aborting; see `inline' for details.
S4 = reset_nested_apps(Ctxt, S3), % for effect
- residualize_var(R, S4);
- {'EXIT', X} ->
- exit(X);
- X ->
- throw(X)
+ residualize_var(R, S4)
end.
reset_nested_apps(#app{ctxt = Ctxt, loc = L}, S) ->
diff --git a/lib/compiler/src/cerl_sets.erl b/lib/compiler/src/cerl_sets.erl
new file mode 100644
index 0000000000..4df78dc432
--- /dev/null
+++ b/lib/compiler/src/cerl_sets.erl
@@ -0,0 +1,206 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(cerl_sets).
+
+%% Standard interface.
+-export([new/0,is_set/1,size/1,to_list/1,from_list/1]).
+-export([is_element/2,add_element/2,del_element/2]).
+-export([union/2,union/1,intersection/2,intersection/1]).
+-export([is_disjoint/2]).
+-export([subtract/2,is_subset/2]).
+-export([fold/3,filter/2]).
+
+-export_type([set/0, set/1]).
+
+%%------------------------------------------------------------------------------
+
+-type set() :: set(_).
+-opaque set(Element) :: #{Element => 'ok'}.
+
+%%------------------------------------------------------------------------------
+
+%% new() -> Set
+-spec new() -> set().
+
+new() -> #{}.
+
+%% is_set(Set) -> boolean().
+%% Return 'true' if Set is a set of elements, else 'false'.
+-spec is_set(Set) -> boolean() when
+ Set :: term().
+
+is_set(S) when is_map(S) -> true;
+is_set(_) -> false.
+
+%% size(Set) -> int().
+%% Return the number of elements in Set.
+-spec size(Set) -> non_neg_integer() when
+ Set :: set().
+
+size(S) -> maps:size(S).
+
+%% to_list(Set) -> [Elem].
+%% Return the elements in Set as a list.
+-spec to_list(Set) -> List when
+ Set :: set(Element),
+ List :: [Element].
+
+to_list(S) -> maps:keys(S).
+
+%% from_list([Elem]) -> Set.
+%% Build a set from the elements in List.
+-spec from_list(List) -> Set when
+ List :: [Element],
+ Set :: set(Element).
+from_list(Ls) -> maps:from_list([{K,ok}||K<-Ls]).
+
+%% is_element(Element, Set) -> boolean().
+%% Return 'true' if Element is an element of Set, else 'false'.
+-spec is_element(Element, Set) -> boolean() when
+ Set :: set(Element).
+
+is_element(E,S) ->
+ case S of
+ #{E := _} -> true;
+ _ -> false
+ end.
+
+%% add_element(Element, Set) -> Set.
+%% Return Set with Element inserted in it.
+-spec add_element(Element, Set1) -> Set2 when
+ Set1 :: set(Element),
+ Set2 :: set(Element).
+
+add_element(E,S) -> S#{E=>ok}.
+
+-spec del_element(Element, Set1) -> Set2 when
+ Set1 :: set(Element),
+ Set2 :: set(Element).
+
+%% del_element(Element, Set) -> Set.
+%% Return Set but with Element removed.
+del_element(E,S) -> maps:remove(E,S).
+
+%% union(Set1, Set2) -> Set
+%% Return the union of Set1 and Set2.
+-spec union(Set1, Set2) -> Set3 when
+ Set1 :: set(Element),
+ Set2 :: set(Element),
+ Set3 :: set(Element).
+
+union(S1,S2) -> maps:merge(S1,S2).
+
+%% union([Set]) -> Set
+%% Return the union of the list of sets.
+-spec union(SetList) -> Set when
+ SetList :: [set(Element)],
+ Set :: set(Element).
+
+union([S1,S2|Ss]) ->
+ union1(union(S1, S2), Ss);
+union([S]) -> S;
+union([]) -> new().
+
+union1(S1, [S2|Ss]) ->
+ union1(union(S1, S2), Ss);
+union1(S1, []) -> S1.
+
+%% intersection(Set1, Set2) -> Set.
+%% Return the intersection of Set1 and Set2.
+-spec intersection(Set1, Set2) -> Set3 when
+ Set1 :: set(Element),
+ Set2 :: set(Element),
+ Set3 :: set(Element).
+
+intersection(S1, S2) ->
+ filter(fun (E) -> is_element(E, S1) end, S2).
+
+%% intersection([Set]) -> Set.
+%% Return the intersection of the list of sets.
+-spec intersection(SetList) -> Set when
+ SetList :: [set(Element),...],
+ Set :: set(Element).
+
+intersection([S1,S2|Ss]) ->
+ intersection1(intersection(S1, S2), Ss);
+intersection([S]) -> S.
+
+intersection1(S1, [S2|Ss]) ->
+ intersection1(intersection(S1, S2), Ss);
+intersection1(S1, []) -> S1.
+
+%% is_disjoint(Set1, Set2) -> boolean().
+%% Check whether Set1 and Set2 are disjoint.
+-spec is_disjoint(Set1, Set2) -> boolean() when
+ Set1 :: set(Element),
+ Set2 :: set(Element).
+
+is_disjoint(S1, S2) when map_size(S1) < map_size(S2) ->
+ fold(fun (_, false) -> false;
+ (E, true) -> not is_element(E, S2)
+ end, true, S1);
+is_disjoint(S1, S2) ->
+ fold(fun (_, false) -> false;
+ (E, true) -> not is_element(E, S1)
+ end, true, S2).
+
+%% subtract(Set1, Set2) -> Set.
+%% Return all and only the elements of Set1 which are not also in
+%% Set2.
+-spec subtract(Set1, Set2) -> Set3 when
+ Set1 :: set(Element),
+ Set2 :: set(Element),
+ Set3 :: set(Element).
+
+subtract(S1, S2) ->
+ filter(fun (E) -> not is_element(E, S2) end, S1).
+
+%% is_subset(Set1, Set2) -> boolean().
+%% Return 'true' when every element of Set1 is also a member of
+%% Set2, else 'false'.
+-spec is_subset(Set1, Set2) -> boolean() when
+ Set1 :: set(Element),
+ Set2 :: set(Element).
+
+is_subset(S1, S2) ->
+ fold(fun (E, Sub) -> Sub andalso is_element(E, S2) end, true, S1).
+
+%% fold(Fun, Accumulator, Set) -> Accumulator.
+%% Fold function Fun over all elements in Set and return Accumulator.
+-spec fold(Function, Acc0, Set) -> Acc1 when
+ Function :: fun((Element, AccIn) -> AccOut),
+ Set :: set(Element),
+ Acc0 :: Acc,
+ Acc1 :: Acc,
+ AccIn :: Acc,
+ AccOut :: Acc.
+
+fold(F, Init, D) ->
+ lists:foldl(fun(E,Acc) -> F(E,Acc) end,Init,maps:keys(D)).
+
+%% filter(Fun, Set) -> Set.
+%% Filter Set with Fun.
+-spec filter(Pred, Set1) -> Set2 when
+ Pred :: fun((Element) -> boolean()),
+ Set1 :: set(Element),
+ Set2 :: set(Element).
+
+filter(F, D) ->
+ maps:from_list(lists:filter(fun({K,_}) -> F(K) end, maps:to_list(D))).
diff --git a/lib/compiler/src/cerl_trees.erl b/lib/compiler/src/cerl_trees.erl
index b93da8e97f..f1bf0e02e7 100644
--- a/lib/compiler/src/cerl_trees.erl
+++ b/lib/compiler/src/cerl_trees.erl
@@ -19,7 +19,7 @@
%% @doc Basic functions on Core Erlang abstract syntax trees.
%%
%% <p>Syntax trees are defined in the module <a
-%% href=""><code>cerl</code></a>.</p>
+%% href="cerl"><code>cerl</code></a>.</p>
%%
%% @type cerl() = cerl:cerl()
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index f347438509..0158cf64db 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -41,7 +41,7 @@
-type option() :: atom() | {atom(), term()} | {'d', atom(), term()}.
--type err_info() :: {erl_scan:line() | 'none',
+-type err_info() :: {erl_anno:line() | 'none',
module(), term()}. %% ErrorDescriptor
-type errors() :: [{file:filename(), [err_info()]}].
-type warnings() :: [{file:filename(), [err_info()]}].
@@ -132,7 +132,8 @@ env_default_opts() ->
Str when is_list(Str) ->
case erl_scan:string(Str) of
{ok,Tokens,_} ->
- case erl_parse:parse_term(Tokens ++ [{dot, 1}]) of
+ Dot = {dot, erl_anno:new(1)},
+ case erl_parse:parse_term(Tokens ++ [Dot]) of
{ok,List} when is_list(List) -> List;
{ok,Term} -> [Term];
{error,_Reason} ->
@@ -285,11 +286,20 @@ internal_comp(Passes, File, Suffix, St0) ->
St1 = St0#compile{filename=File, dir=Dir, base=Base,
ifile=erlfile(Dir, Base, Suffix),
ofile=objfile(Base, St0)},
- Run = case member(time, St1#compile.options) of
- true ->
- io:format("Compiling ~tp\n", [File]),
- fun run_tc/2;
- false -> fun({_Name,Fun}, St) -> catch Fun(St) end
+ Opts = St1#compile.options,
+ Run0 = case member(time, Opts) of
+ true ->
+ io:format("Compiling ~tp\n", [File]),
+ fun run_tc/2;
+ false -> fun({_Name,Fun}, St) -> catch Fun(St) end
+ end,
+ Run = case keyfind(eprof, 1, Opts) of
+ {eprof,EprofPass} ->
+ fun(P, St) ->
+ run_eprof(P, EprofPass, St)
+ end;
+ false ->
+ Run0
end,
case fold_comp(Passes, Run, St1) of
{ok,St2} -> comp_ret_ok(St2);
@@ -320,17 +330,26 @@ fold_comp([{Name,Pass}|Ps], Run, St0) ->
fold_comp([], _Run, St) -> {ok,St}.
run_tc({Name,Fun}, St) ->
- Before0 = statistics(runtime),
+ T1 = erlang:monotonic_time(),
Val = (catch Fun(St)),
- After0 = statistics(runtime),
- {Before_c, _} = Before0,
- {After_c, _} = After0,
+ T2 = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(T2 - T1, native, milli_seconds),
Mem0 = erts_debug:flat_size(Val)*erlang:system_info(wordsize),
Mem = lists:flatten(io_lib:format("~.1f kB", [Mem0/1024])),
- io:format(" ~-30s: ~10.2f s ~12s\n",
- [Name,(After_c-Before_c) / 1000,Mem]),
+ io:format(" ~-30s: ~10.3f s ~12s\n",
+ [Name,Elapsed/1000,Mem]),
Val.
+run_eprof({Name,Fun}, Name, St) ->
+ io:format("~p: Running eprof\n", [Name]),
+ c:appcall(tools, eprof, start_profiling, [[self()]]),
+ Val = (catch Fun(St)),
+ c:appcall(tools, eprof, stop_profiling, []),
+ c:appcall(tools, eprof, analyze, []),
+ Val;
+run_eprof({_,Fun}, _, St) ->
+ catch Fun(St).
+
comp_ret_ok(#compile{code=Code,warnings=Warn0,module=Mod,options=Opts}=St) ->
case werror(St) of
true ->
@@ -606,7 +625,7 @@ standard_passes() ->
{iff,'to_exp',{done,"E"}},
%% Conversion to Core Erlang.
- ?pass(core_module),
+ {pass,v3_core},
{iff,'dcore',{listing,"core"}},
{iff,'to_core0',{done,"core"}}
| core_passes()].
@@ -618,7 +637,7 @@ core_passes() ->
[{unless,no_copt,
[{core_old_inliner,fun test_old_inliner/1,fun core_old_inliner/1},
{iff,doldinline,{listing,"oldinline"}},
- ?pass(core_fold_module),
+ {pass,sys_core_fold},
{iff,dcorefold,{listing,"corefold"}},
{core_inline_module,fun test_core_inliner/1,fun core_inline_module/1},
{iff,dinline,{listing,"inline"}},
@@ -631,14 +650,14 @@ core_passes() ->
kernel_passes() ->
%% Destructive setelement/3 optimization and core lint.
- [?pass(core_dsetel_module),
+ [{pass,sys_core_dsetel},
{iff,dsetel,{listing,"dsetel"}},
{iff,clint,?pass(core_lint_module)},
{iff,core,?pass(save_core_code)},
%% Kernel Erlang and code generation.
- ?pass(kernel_module),
+ {pass,v3_kernel},
{iff,dkern,{listing,"kernel"}},
{iff,'to_kernel',{done,"kernel"}},
{pass,v3_life},
@@ -901,28 +920,35 @@ transform_module(#compile{options=Opt,code=Code0}=St0) ->
foldl_transform(St, [T|Ts]) ->
Name = "transform " ++ atom_to_list(T),
- Fun = fun(S) -> T:parse_transform(S#compile.code, S#compile.options) end,
- Run = case member(time, St#compile.options) of
- true -> fun run_tc/2;
- false -> fun({_Name,F}, S) -> catch F(S) end
- end,
- case Run({Name, Fun}, St) of
- {error,Es,Ws} ->
- {error,St#compile{warnings=St#compile.warnings ++ Ws,
- errors=St#compile.errors ++ Es}};
- {'EXIT',{undef,_}} ->
- Es = [{St#compile.ifile,[{none,compile,
- {undef_parse_transform,T}}]}],
- {error,St#compile{errors=St#compile.errors ++ Es}};
- {'EXIT',R} ->
- Es = [{St#compile.ifile,[{none,compile,{parse_transform,T,R}}]}],
- {error,St#compile{errors=St#compile.errors ++ Es}};
- {warning, Forms, Ws} ->
- foldl_transform(
- St#compile{code=Forms,
- warnings=St#compile.warnings ++ Ws}, Ts);
- Forms ->
- foldl_transform(St#compile{code=Forms}, Ts)
+ case code:ensure_loaded(T) =:= {module,T} andalso
+ erlang:function_exported(T, parse_transform, 2) of
+ true ->
+ Fun = fun(S) ->
+ T:parse_transform(S#compile.code, S#compile.options)
+ end,
+ Run = case member(time, St#compile.options) of
+ true -> fun run_tc/2;
+ false -> fun({_Name,F}, S) -> catch F(S) end
+ end,
+ case Run({Name, Fun}, St) of
+ {error,Es,Ws} ->
+ {error,St#compile{warnings=St#compile.warnings ++ Ws,
+ errors=St#compile.errors ++ Es}};
+ {'EXIT',R} ->
+ Es = [{St#compile.ifile,[{none,compile,
+ {parse_transform,T,R}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}};
+ {warning, Forms, Ws} ->
+ foldl_transform(
+ St#compile{code=Forms,
+ warnings=St#compile.warnings ++ Ws}, Ts);
+ Forms ->
+ foldl_transform(St#compile{code=Forms}, Ts)
+ end;
+ false ->
+ Es = [{St#compile.ifile,[{none,compile,
+ {undef_parse_transform,T}}]}],
+ {error,St#compile{errors=St#compile.errors ++ Es}}
end;
foldl_transform(St, []) -> {ok,St}.
@@ -1176,14 +1202,6 @@ expand_module(#compile{code=Code,options=Opts0}=St0) ->
Opts = expand_opts(Opts1),
{ok,St0#compile{module=Mod,options=Opts,code={Mod,Exp,Forms}}}.
-core_module(#compile{code=Code0,options=Opts}=St) ->
- {ok,Code,Ws} = v3_core:module(Code0, Opts),
- {ok,St#compile{code=Code,warnings=St#compile.warnings ++ Ws}}.
-
-core_fold_module(#compile{code=Code0,options=Opts,warnings=Warns}=St) ->
- {ok,Code,Ws} = sys_core_fold:module(Code0, Opts),
- {ok,St#compile{code=Code,warnings=Warns ++ Ws}}.
-
core_fold_module_after_inlining(#compile{code=Code0,options=Opts}=St) ->
%% Inlining may produce code that generates spurious warnings.
%% Ignore all warnings.
@@ -1219,14 +1237,6 @@ core_inline_module(#compile{code=Code0,options=Opts}=St) ->
Code = cerl_inline:core_transform(Code0, Opts),
{ok,St#compile{code=Code}}.
-core_dsetel_module(#compile{code=Code0,options=Opts}=St) ->
- {ok,Code} = sys_core_dsetel:module(Code0, Opts),
- {ok,St#compile{code=Code}}.
-
-kernel_module(#compile{code=Code0,options=Opts}=St) ->
- {ok,Code,Ws} = v3_kernel:module(Code0, Opts),
- {ok,St#compile{code=Code,warnings=St#compile.warnings ++ Ws}}.
-
save_abstract_code(#compile{ifile=File}=St) ->
case abstract_code(St) of
{ok,Code} ->
@@ -1235,7 +1245,8 @@ save_abstract_code(#compile{ifile=File}=St) ->
{error,St#compile{errors=St#compile.errors ++ [{File,Es}]}}
end.
-abstract_code(#compile{code=Code,options=Opts,ofile=OFile}) ->
+abstract_code(#compile{code=Code0,options=Opts,ofile=OFile}) ->
+ Code = erl_parse:anno_to_term(Code0),
Abstr = erlang:term_to_binary({raw_abstract_v1,Code}, [compressed]),
case member(encrypt_debug_info, Opts) of
true ->
@@ -1295,8 +1306,9 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) ->
list_to_binary([0,length(TypeString),TypeString,Bin]).
random_bytes(N) ->
- {A,B,C} = now(),
- _ = random:seed(A, B, C),
+ _ = random:seed(erlang:time_offset(),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
random_bytes_1(N, []).
random_bytes_1(0, Acc) -> Acc;
diff --git a/lib/compiler/src/compiler.app.src b/lib/compiler/src/compiler.app.src
index 8f68915f8e..0bfd998301 100644
--- a/lib/compiler/src/compiler.app.src
+++ b/lib/compiler/src/compiler.app.src
@@ -45,6 +45,7 @@
cerl,
cerl_clauses,
cerl_inline,
+ cerl_sets,
cerl_trees,
compile,
core_scan,
@@ -56,6 +57,7 @@
rec_env,
sys_core_dsetel,
sys_core_fold,
+ sys_core_fold_lists,
sys_core_inline,
sys_pre_attributes,
sys_pre_expand,
@@ -68,5 +70,5 @@
{registered, []},
{applications, [kernel, stdlib]},
{env, []},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","hipe-3.10.3","erts-6.0",
- "crypto-3.3"]}]}.
+ {runtime_dependencies, ["stdlib-2.5","kernel-4.0","hipe-3.12","erts-7.0",
+ "crypto-3.6"]}]}.
diff --git a/lib/compiler/src/core_lib.erl b/lib/compiler/src/core_lib.erl
index 2792fd8fa5..66319dbd36 100644
--- a/lib/compiler/src/core_lib.erl
+++ b/lib/compiler/src/core_lib.erl
@@ -20,6 +20,12 @@
-module(core_lib).
+-deprecated({get_anno,1,next_major_release}).
+-deprecated({set_anno,2,next_major_release}).
+-deprecated({is_literal,1,next_major_release}).
+-deprecated({is_literal_list,1,next_major_release}).
+-deprecated({literal_value,1,next_major_release}).
+
-export([get_anno/1,set_anno/2]).
-export([is_literal/1,is_literal_list/1]).
-export([literal_value/1]).
@@ -33,59 +39,27 @@
%%
-spec get_anno(cerl:cerl()) -> term().
-get_anno(C) -> element(2, C).
+get_anno(C) -> cerl:get_ann(C).
-spec set_anno(cerl:cerl(), term()) -> cerl:cerl().
-set_anno(C, A) -> setelement(2, C, A).
+set_anno(C, A) -> cerl:set_ann(C, A).
-spec is_literal(cerl:cerl()) -> boolean().
-is_literal(#c_literal{}) -> true;
-is_literal(#c_cons{hd=H,tl=T}) ->
- is_literal(H) andalso is_literal(T);
-is_literal(#c_tuple{es=Es}) -> is_literal_list(Es);
-is_literal(#c_binary{segments=Es}) -> is_lit_bin(Es);
-is_literal(_) -> false.
+is_literal(Cerl) ->
+ cerl:is_literal(cerl:fold_literal(Cerl)).
-spec is_literal_list([cerl:cerl()]) -> boolean().
is_literal_list(Es) -> lists:all(fun is_literal/1, Es).
-is_lit_bin(Es) ->
- lists:all(fun (#c_bitstr{val=E,size=S}) ->
- is_literal(E) andalso is_literal(S)
- end, Es).
-
%% Return the value of LitExpr.
-spec literal_value(cerl:c_literal() | cerl:c_binary() |
cerl:c_map() | cerl:c_cons() | cerl:c_tuple()) -> term().
-literal_value(#c_literal{val=V}) -> V;
-literal_value(#c_binary{segments=Es}) ->
- list_to_binary([literal_value_bin(Bit) || Bit <- Es]);
-literal_value(#c_cons{hd=H,tl=T}) ->
- [literal_value(H)|literal_value(T)];
-literal_value(#c_tuple{es=Es}) ->
- list_to_tuple(literal_value_list(Es));
-literal_value(#c_map{arg=Cm,es=Cmps}) ->
- M = literal_value(Cm),
- lists:foldl(fun(#c_map_pair{ key=Ck, val=Cv },Mi) ->
- K = literal_value(Ck),
- V = literal_value(Cv),
- maps:put(K,V,Mi)
- end, M, Cmps).
-
-literal_value_list(Vals) -> [literal_value(V) || V <- Vals].
-
-literal_value_bin(#c_bitstr{val=Val,size=Sz,unit=U,type=T,flags=Fs}) ->
- %% We will only handle literals constructed by make_literal/1.
- %% Could be made more general in the future if the need arises.
- 8 = literal_value(Sz),
- 1 = literal_value(U),
- integer = literal_value(T),
- [unsigned,big] = literal_value(Fs),
- literal_value(Val).
+literal_value(Cerl) ->
+ cerl:concrete(cerl:fold_literal(Cerl)).
%% Make a suitable values structure, expr or values, depending on Expr.
-spec make_values([cerl:cerl()] | cerl:cerl()) -> cerl:cerl().
@@ -212,6 +186,8 @@ vu_pattern(V, #c_tuple{es=Es}, St) ->
vu_pattern_list(V, Es, St);
vu_pattern(V, #c_binary{segments=Ss}, St) ->
vu_pat_seg_list(V, Ss, St);
+vu_pattern(V, #c_map{es=Es}, St) ->
+ vu_map_pairs(V, Es, St);
vu_pattern(V, #c_alias{var=Var,pat=P}, St0) ->
case vu_pattern(V, Var, St0) of
{true,_}=St1 -> St1;
@@ -234,6 +210,18 @@ vu_pat_seg_list(V, Ss, St) ->
end
end, St, Ss).
+vu_map_pairs(V, [#c_map_pair{key=Key,val=Pat}|T], St0) ->
+ case vu_expr(V, Key) of
+ true ->
+ {true,false};
+ false ->
+ case vu_pattern(V, Pat, St0) of
+ {true,_}=St -> St;
+ St -> vu_map_pairs(V, T, St)
+ end
+ end;
+vu_map_pairs(_, [], St) -> St.
+
-spec vu_var_list(cerl:var_name(), [cerl:c_var()]) -> boolean().
vu_var_list(V, Vs) ->
diff --git a/lib/compiler/src/core_lint.erl b/lib/compiler/src/core_lint.erl
index c0e2bdaba0..f62b2bb0ba 100644
--- a/lib/compiler/src/core_lint.erl
+++ b/lib/compiler/src/core_lint.erl
@@ -173,7 +173,7 @@ check_exports(Es, St) ->
end.
check_attrs(As, St) ->
- case all(fun ({#c_literal{},V}) -> core_lib:is_literal(V);
+ case all(fun ({#c_literal{},#c_literal{}}) -> true;
(_) -> false
end, As) of
true -> St;
diff --git a/lib/compiler/src/core_parse.hrl b/lib/compiler/src/core_parse.hrl
index 4a00535360..7fd4a82e4e 100644
--- a/lib/compiler/src/core_parse.hrl
+++ b/lib/compiler/src/core_parse.hrl
@@ -72,7 +72,8 @@
-record(c_map, {anno=[],
arg=#c_literal{val=#{}} :: cerl:c_var() | cerl:c_literal(),
- es :: [cerl:c_map_pair()]}).
+ es :: [cerl:c_map_pair()],
+ is_pat=false :: boolean()}).
-record(c_map_pair, {anno=[],
op :: #c_literal{val::'assoc'} | #c_literal{val::'exact'},
diff --git a/lib/compiler/src/core_parse.yrl b/lib/compiler/src/core_parse.yrl
index a66ad4235f..eeb9f5dba7 100644
--- a/lib/compiler/src/core_parse.yrl
+++ b/lib/compiler/src/core_parse.yrl
@@ -58,7 +58,8 @@ Terminals
%% Separators
-'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#' '~' '::'
+'(' ')' '{' '}' '[' ']' '|' ',' '->' '=' '/' '<' '>' ':' '-|' '#'
+'~' '=>' ':='
%% Keywords (atoms are assumed to always be single-quoted).
@@ -123,7 +124,7 @@ function_definition ->
{'$1','$3'}.
anno_fun -> '(' fun_expr '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_fun -> fun_expr : '$1'.
%% Constant terms for annotations and attributes.
@@ -162,7 +163,7 @@ tail_constant -> ',' constant tail_constant : ['$2'|'$3'].
%% ( ( V -| <anno> ) = ( {a} -| <anno> ) -| <anno> )
anno_pattern -> '(' other_pattern '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_pattern -> other_pattern : '$1'.
anno_pattern -> anno_variable : '$1'.
@@ -182,23 +183,24 @@ atomic_pattern -> atomic_literal : '$1'.
tuple_pattern -> '{' '}' : c_tuple([]).
tuple_pattern -> '{' anno_patterns '}' : c_tuple('$2').
-map_pattern -> '~' '{' '}' '~' : #c_map{es=[]}.
+map_pattern -> '~' '{' '}' '~' : c_map_pattern([]).
map_pattern -> '~' '{' map_pair_patterns '}' '~' :
- #c_map{es=lists:sort('$3')}.
+ c_map_pattern(lists:sort('$3')).
map_pair_patterns -> map_pair_pattern : ['$1'].
map_pair_patterns -> map_pair_pattern ',' map_pair_patterns : ['$1' | '$3'].
-map_pair_pattern -> '~' '<' anno_pattern ',' anno_pattern '>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_pattern -> anno_expression ':=' anno_pattern :
+ #c_map_pair{op=#c_literal{val=exact},
+ key='$1',val='$3'}.
cons_pattern -> '[' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
tail_pattern -> ']' : #c_literal{val=[]}.
tail_pattern -> '|' anno_pattern ']' : '$2'.
tail_pattern -> ',' anno_pattern tail_pattern :
- #c_cons{hd='$2',tl='$3'}.
+ c_cons('$2', '$3').
binary_pattern -> '#' '{' '}' '#' : #c_binary{segments=[]}.
binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
@@ -206,7 +208,7 @@ binary_pattern -> '#' '{' segment_patterns '}' '#' : #c_binary{segments='$3'}.
segment_patterns -> segment_pattern ',' segment_patterns : ['$1' | '$3'].
segment_patterns -> segment_pattern : ['$1'].
-segment_pattern -> '#' '<' anno_pattern '>' '(' anno_patterns ')':
+segment_pattern -> '#' '<' anno_pattern '>' '(' anno_expressions ')':
case '$6' of
[S,U,T,Fs] ->
#c_bitstr{val='$3',size=S,unit=U,type=T,flags=Fs};
@@ -222,7 +224,7 @@ anno_variables -> anno_variable : ['$1'].
anno_variable -> variable : '$1'.
anno_variable -> '(' variable '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
%% Expressions
%% Must split expressions into two levels as nested value expressions
@@ -230,7 +232,7 @@ anno_variable -> '(' variable '-|' annotation ')' :
anno_expression -> expression : '$1'.
anno_expression -> '(' expression '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
anno_expressions -> anno_expression ',' anno_expressions : ['$1' | '$3'].
anno_expressions -> anno_expression : ['$1'].
@@ -279,15 +281,15 @@ cons_literal -> '[' literal tail_literal : c_cons('$2', '$3').
tail_literal -> ']' : #c_literal{val=[]}.
tail_literal -> '|' literal ']' : '$2'.
-tail_literal -> ',' literal tail_literal : #c_cons{hd='$2',tl='$3'}.
+tail_literal -> ',' literal tail_literal : c_cons('$2', '$3').
tuple -> '{' '}' : c_tuple([]).
tuple -> '{' anno_expressions '}' : c_tuple('$2').
-map_expr -> '~' '{' '}' '~' : #c_map{es=[]}.
-map_expr -> '~' '{' map_pairs '}' '~' : #c_map{es='$3'}.
-map_expr -> '~' '{' map_pairs '|' variable '}' '~' : #c_map{arg='$5',es='$3'}.
-map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : #c_map{arg='$5',es='$3'}.
+map_expr -> '~' '{' '}' '~' : c_map([]).
+map_expr -> '~' '{' map_pairs '}' '~' : c_map('$3').
+map_expr -> '~' '{' map_pairs '|' variable '}' '~' : ann_c_map([], '$5', '$3').
+map_expr -> '~' '{' map_pairs '|' map_expr '}' '~' : ann_c_map([], '$5', '$3').
map_pairs -> map_pair : ['$1'].
map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
@@ -295,10 +297,10 @@ map_pairs -> map_pair ',' map_pairs : ['$1' | '$3'].
map_pair -> map_pair_assoc : '$1'.
map_pair -> map_pair_exact : '$1'.
-map_pair_assoc -> '::' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=assoc},key='$3',val='$5'}.
-map_pair_exact -> '~' '<' anno_expression ',' anno_expression'>' :
- #c_map_pair{op=#c_literal{val=exact},key='$3',val='$5'}.
+map_pair_assoc -> anno_expression '=>' anno_expression :
+ #c_map_pair{op=#c_literal{val=assoc},key='$1',val='$3'}.
+map_pair_exact -> anno_expression ':=' anno_expression :
+ #c_map_pair{op=#c_literal{val=exact},key='$1',val='$3'}.
cons -> '[' anno_expression tail : c_cons('$2', '$3').
@@ -307,7 +309,7 @@ tail -> '|' anno_expression ']' : '$2'.
tail -> ',' anno_expression tail : c_cons('$2', '$3').
binary -> '#' '{' '}' '#' : #c_literal{val = <<>>}.
-binary -> '#' '{' segments '}' '#' : #c_binary{segments='$3'}.
+binary -> '#' '{' segments '}' '#' : make_binary('$3').
segments -> segment ',' segments : ['$1' | '$3'].
segments -> segment : ['$1'].
@@ -326,7 +328,7 @@ function_name -> atom '/' integer :
anno_function_name -> function_name : '$1'.
anno_function_name -> '(' function_name '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
let_vars -> anno_variable : ['$1'].
let_vars -> '<' '>' : [].
@@ -354,7 +356,7 @@ anno_clauses -> anno_clause : ['$1'].
anno_clause -> clause : '$1'.
anno_clause -> '(' clause '-|' annotation ')' :
- core_lib:set_anno('$2', '$4').
+ cerl:set_ann('$2', '$4').
clause -> clause_pattern 'when' anno_expression '->' anno_expression :
#c_clause{pats='$1',guard='$3',body='$5'}.
@@ -410,9 +412,55 @@ Erlang code.
-include("core_parse.hrl").
--import(cerl, [c_cons/2,c_tuple/1]).
+-import(cerl, [ann_c_map/3,c_cons/2,c_map/1,c_map_pattern/1,c_tuple/1]).
tok_val(T) -> element(3, T).
tok_line(T) -> element(2, T).
+%% make_binary([#c_bitstr{}]) -> #c_binary{} | #c_literal{}
+%% Create either #c_binary{} or #c_literal{} from the binary segments.
+%% In certain contexts, such as keys for maps, only literals and
+%% variables are allowed, so we must not create a #c_binary{}
+%% record in those situation.
+%%
+%% To keep this function simple, we use a crude heuristic. We will
+%% assume that Core Erlang has been produced by core_pp. If the
+%% segments *could* have been output from a literal binary by
+%% core_pp, we will create a #c_literal{}. Otherwise we will create a
+%% #c_binary{} record.
+
+make_binary(Segs) ->
+ try make_lit_bin(<<>>, Segs) of
+ Bs when is_bitstring(Bs) ->
+ #c_literal{val=Bs}
+ catch
+ throw:impossible ->
+ #c_binary{segments=Segs}
+ end.
+
+make_lit_bin(Acc, [#c_bitstr{val=I0,size=Sz0,unit=U0,type=Type0,flags=F0}|T]) ->
+ I = get_lit_val(I0),
+ Sz = get_lit_val(Sz0),
+ U = get_lit_val(U0),
+ Type = get_lit_val(Type0),
+ F = get_lit_val(F0),
+ if
+ is_integer(I), U =:= 1, Type =:= integer, F =:= [unsigned,big] ->
+ ok;
+ true ->
+ throw(impossible)
+ end,
+ if
+ Sz =< 8, T =:= [] ->
+ <<Acc/binary,I:Sz>>;
+ Sz =:= 8 ->
+ make_lit_bin(<<Acc/binary,I:8>>, T);
+ true ->
+ throw(impossible)
+ end;
+make_lit_bin(Acc, []) -> Acc.
+
+get_lit_val(#c_literal{val=Val}) -> Val;
+get_lit_val(_) -> throw(impossible).
+
%% vim: syntax=erlang
diff --git a/lib/compiler/src/core_pp.erl b/lib/compiler/src/core_pp.erl
index 03801a9b6d..9cfca88e8c 100644
--- a/lib/compiler/src/core_pp.erl
+++ b/lib/compiler/src/core_pp.erl
@@ -45,7 +45,7 @@ format(Node) ->
format(Node, #ctxt{}).
maybe_anno(Node, Fun, Ctxt) ->
- As = core_lib:get_anno(Node),
+ As = cerl:get_ann(Node),
case get_line(As) of
none ->
maybe_anno(Node, Fun, Ctxt, As);
@@ -183,15 +183,9 @@ format_1(#c_map{arg=Var,es=Es}, Ctxt) ->
"}~"
];
format_1(#c_map_pair{op=#c_literal{val=assoc},key=K,val=V}, Ctxt) ->
- ["::<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair("=>", K, V, Ctxt);
format_1(#c_map_pair{op=#c_literal{val=exact},key=K,val=V}, Ctxt) ->
- ["~<",
- format_hseq([K,V], ",", add_indent(Ctxt, 1), fun format/2),
- ">"
- ];
+ format_map_pair(":=", K, V, Ctxt);
format_1(#c_cons{hd=H,tl=T}, Ctxt) ->
Txt = ["["|format(H, add_indent(Ctxt, 1))],
[Txt|format_list_tail(T, add_indent(Ctxt, width(Txt, Ctxt)))];
@@ -201,7 +195,7 @@ format_1(#c_alias{var=V,pat=P}, Ctxt) ->
Txt = [format(V, Ctxt)|" = "],
[Txt|format(P, add_indent(Ctxt, width(Txt, Ctxt)))];
format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
- Vs = [core_lib:set_anno(V, []) || V <- Vs0],
+ Vs = [cerl:set_ann(V, []) || V <- Vs0],
case is_simple_term(A) of
false ->
Ctxt1 = add_indent(Ctxt, Ctxt#ctxt.body_indent),
@@ -219,7 +213,7 @@ format_1(#c_let{vars=Vs0,arg=A,body=B}, Ctxt) ->
["let ",
format_values(Vs, add_indent(Ctxt, 4)),
" = ",
- format(core_lib:set_anno(A, []), Ctxt1),
+ format(cerl:set_ann(A, []), Ctxt1),
nl_indent(Ctxt),
"in "
| format(B, add_indent(Ctxt, 4))
@@ -448,6 +442,12 @@ format_list_tail(#c_cons{anno=[],hd=H,tl=T}, Ctxt) ->
format_list_tail(Tail, Ctxt) ->
["|",format(Tail, add_indent(Ctxt, 1)),"]"].
+format_map_pair(Op, K, V, Ctxt0) ->
+ Ctxt1 = add_indent(Ctxt0, 1),
+ Txt = format(K, set_class(Ctxt1, expr)),
+ Ctxt2 = add_indent(Ctxt0, width(Txt, Ctxt1)),
+ [Txt,Op,format(V, Ctxt2)].
+
indent(Ctxt) -> indent(Ctxt#ctxt.indent, Ctxt).
indent(N, _) when N =< 0 -> "";
diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl
index b7799b373a..8ab20b1982 100644
--- a/lib/compiler/src/core_scan.erl
+++ b/lib/compiler/src/core_scan.erl
@@ -271,8 +271,10 @@ scan1("->" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'->',Pos}|Toks], Pos);
scan1("-|" ++ Cs, Toks, Pos) ->
scan1(Cs, [{'-|',Pos}|Toks], Pos);
-scan1("::" ++ Cs, Toks, Pos) ->
- scan1(Cs, [{'::',Pos}|Toks], Pos);
+scan1(":=" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{':=',Pos}|Toks], Pos);
+scan1("=>" ++ Cs, Toks, Pos) ->
+ scan1(Cs, [{'=>',Pos}|Toks], Pos);
scan1([C|Cs], Toks, Pos) -> %Punctuation character
P = list_to_atom([C]),
scan1(Cs, [{P,Pos}|Toks], Pos);
diff --git a/lib/compiler/src/erl_bifs.erl b/lib/compiler/src/erl_bifs.erl
index 6c75538194..bcc2297250 100644
--- a/lib/compiler/src/erl_bifs.erl
+++ b/lib/compiler/src/erl_bifs.erl
@@ -134,6 +134,7 @@ is_pure(math, erf, 1) -> true;
is_pure(math, erfc, 1) -> true;
is_pure(math, exp, 1) -> true;
is_pure(math, log, 1) -> true;
+is_pure(math, log2, 1) -> true;
is_pure(math, log10, 1) -> true;
is_pure(math, pow, 2) -> true;
is_pure(math, sin, 1) -> true;
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 82817a987a..7f4184fd30 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -70,7 +70,8 @@
-export([module/2,format_error/1]).
-import(lists, [map/2,foldl/3,foldr/3,mapfoldl/3,all/2,any/2,
- reverse/1,reverse/2,member/2,nth/2,flatten/1,unzip/1]).
+ reverse/1,reverse/2,member/2,nth/2,flatten/1,
+ unzip/1,keyfind/3]).
-import(cerl, [ann_c_cons/3,ann_c_map/3,ann_c_tuple/2]).
@@ -91,10 +92,14 @@
-endif.
%% Variable value info.
--record(sub, {v=[], %Variable substitutions
- s=[], %Variables in scope
- t=[], %Types
- in_guard=false}). %In guard or not.
+-record(sub, {v=[], %Variable substitutions
+ s=cerl_sets:new() :: cerl_sets:set(), %Variables in scope
+ t=#{} :: map(), %Types
+ in_guard=false}). %In guard or not.
+
+-type type_info() :: cerl:cerl() | 'bool' | 'integer'.
+-type yes_no_maybe() :: 'yes' | 'no' | 'maybe'.
+-type sub() :: #sub{}.
-spec module(cerl:c_module(), [compile:option()]) ->
{'ok', cerl:c_module(), [_]}.
@@ -293,7 +298,8 @@ expr(#c_seq{arg=Arg0,body=B0}=Seq0, Ctxt, Sub) ->
false -> Seq0#c_seq{arg=Arg,body=B1}
end
end;
-expr(#c_let{}=Let, Ctxt, Sub) ->
+expr(#c_let{}=Let0, Ctxt, Sub) ->
+ Let = opt_case_in_let(Let0),
case simplify_let(Let, Sub) of
impossible ->
%% The argument for the let is "simple", i.e. has no
@@ -313,7 +319,7 @@ expr(#c_letrec{defs=Fs0,body=B0}=Letrec, Ctxt, Sub) ->
Fs1 = map(fun ({Name,Fb}) ->
{Name,expr(Fb, {letrec,Ctxt}, Sub)}
end, Fs0),
- B1 = body(B0, value, Sub),
+ B1 = body(B0, Ctxt, Sub),
Letrec#c_letrec{defs=Fs1,body=B1};
expr(#c_case{}=Case0, Ctxt, Sub) ->
%% Ideally, the compiler should only emit warnings when there is
@@ -462,10 +468,7 @@ is_safe_simple(#c_call{module=#c_literal{val=erlang},
case erl_internal:bool_op(Name, NumArgs) of
true ->
%% Boolean operators are safe if the arguments are boolean.
- all(fun(#c_var{name=V}) -> is_boolean_type(V, Sub);
- (#c_literal{val=Lit}) -> is_boolean(Lit);
- (_) -> false
- end, Args);
+ all(fun(C) -> is_boolean_type(C, Sub) =:= yes end, Args);
false ->
%% We need a rather complicated test to ensure that
%% we only allow safe calls that are allowed in a guard.
@@ -607,14 +610,6 @@ eval_binary_1([#c_bitstr{val=#c_literal{val=Val},size=#c_literal{val=Sz},
error:_ ->
throw(impossible)
end;
-eval_binary_1([#c_bitstr{val=#c_literal{},size=#c_literal{},
- unit=#c_literal{},type=#c_literal{},
- flags=#c_cons{}=Flags}=Bitstr|Ss], Acc0) ->
- case cerl:fold_literal(Flags) of
- #c_literal{} = Flags1 ->
- eval_binary_1([Bitstr#c_bitstr{flags=Flags1}|Ss], Acc0);
- _ -> throw(impossible)
- end;
eval_binary_1([], Acc) -> Acc;
eval_binary_1(_, _) -> throw(impossible).
@@ -688,23 +683,15 @@ count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
%% a rewritten expression consisting of a sequence of
%% the arguments only is returned.
-useless_call(effect, #c_call{anno=Anno,
- module=#c_literal{val=Mod},
+useless_call(effect, #c_call{module=#c_literal{val=Mod},
name=#c_literal{val=Name},
args=Args}=Call) ->
A = length(Args),
case erl_bifs:is_safe(Mod, Name, A) of
false ->
case erl_bifs:is_pure(Mod, Name, A) of
- true ->
- case member(result_not_wanted, Anno) of
- false ->
- add_warning(Call, result_ignored);
- true ->
- ok
- end;
- false ->
- ok
+ true -> add_warning(Call, result_ignored);
+ false -> ok
end,
no;
true ->
@@ -730,385 +717,23 @@ make_effect_seq([], _) -> void().
call(#c_call{args=As}=Call, #c_literal{val=M}=M0, #c_literal{val=N}=N0, Sub) ->
case get(no_inline_list_funcs) of
true ->
- call_0(Call, M0, N0, As, Sub);
+ call_1(Call, M0, N0, As, Sub);
false ->
- call_1(Call, M, N, As, Sub)
+ case sys_core_fold_lists:call(Call, M, N, As) of
+ none ->
+ call_1(Call, M, N, As, Sub);
+ Core ->
+ expr(Core, Sub)
+ end
+
end;
call(#c_call{args=As}=Call, M, N, Sub) ->
- call_0(Call, M, N, As, Sub).
+ call_1(Call, M, N, As, Sub).
-call_0(Call, M, N, As0, Sub) ->
+call_1(Call, M, N, As0, Sub) ->
As1 = expr_list(As0, value, Sub),
fold_call(Call#c_call{args=As1}, M, N, As1, Sub).
-%% We inline some very common higher order list operations.
-%% We use the same evaluation order as the library function.
-
-call_1(#c_call{anno=Anno}, lists, all, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^all',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_literal{val=false}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=true}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, any, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^any',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_literal{val=true}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
- clauses = [CC1, CC2, CC3]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=false}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^foreach',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=ok}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, map, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^map',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
- op=F,
- args=[X]},
- body=#c_cons{hd=H,
- anno=[compiler_generated],
- tl=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^flatmap',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- H = #c_var{name='H'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[H],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_call{anno=[compiler_generated|Anno],
- module=#c_literal{val=erlang},
- name=#c_literal{val='++'},
- args=[H,
- #c_apply{anno=Anno,
- op=Loop,
- args=[Xs]}]}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2], Sub) ->
- Loop = #c_var{name={'lists^filter',1}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- B = #c_var{name='B'},
- Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
- CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
- body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
- CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
- body=Xs},
- CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err1)},
- Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_let{vars=[B],
- arg=#c_apply{anno=Anno, op=F, args=[X]},
- body=#c_let{vars=[Xs],
- arg=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs]},
- body=Case}}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=1}]},
- body=#c_literal{val=[]}},
- Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
- Fun = #c_fun{vars=[Xs],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, #c_apply{anno=Anno,
- op=F,
- args=[X, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^foldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- A = #c_var{name='A'},
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=#c_apply{anno=Anno,
- op=F,
- args=[X, #c_apply{anno=Anno,
- op=Loop,
- args=[Xs, A]}]}},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
- body=A},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, A],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
- body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldl',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
- body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
-%%% Tuple passing version
- Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs},
- Avar]})
-%%% Multiple-value version
-%%% #c_let{vars=[Xs,A],
-%%% %% The tuple here will be optimised
-%%% %% away later; no worries.
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]}}
- )},
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3], Sub) ->
- Loop = #c_var{name={'lists^mapfoldr',2}},
- F = #c_var{name='F'},
- Xs = #c_var{name='Xs'},
- X = #c_var{name='X'},
- Avar = #c_var{name='A'},
- Match =
- fun (A, P, E) ->
- C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
- Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
- C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
- body=match_fail(Anno, Err)},
- #c_case{arg=A, clauses=[C1, C2]}
- end,
- C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
-%%% Tuple passing version
- body=Match(#c_apply{anno=Anno,
- op=Loop,
- args=[Xs, Avar]},
- #c_tuple{es=[Xs, Avar]},
- Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
- #c_tuple{es=[X, Avar]},
- #c_tuple{anno=[compiler_generated],
- es=[#c_cons{anno=[compiler_generated],
- hd=X, tl=Xs}, Avar]}))
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs,A],
-%%% %% The tuple will be optimised away
-%%% arg=#c_apply{op=Loop, args=[Xs, A]},
-%%% body=Match(#c_apply{op=F, args=[X, A]},
-%%% #c_tuple{es=[X, A]},
-%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
-%%% A]})}
- },
- C2 = #c_clause{pats=[#c_literal{val=[]}],
- guard=#c_call{module=#c_literal{val=erlang},
- name=#c_literal{val=is_function},
- args=[F, #c_literal{val=2}]},
-%%% Tuple passing version
- body=#c_tuple{anno=[compiler_generated],
- es=[#c_literal{val=[]}, Avar]}},
-%%% Multiple-value version
-%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
- Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
- C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
- body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
- Fun = #c_fun{vars=[Xs, Avar],
- body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
- L = #c_var{name='L'},
- expr(#c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
- body=#c_letrec{defs=[{Loop,Fun}],
-%%% Tuple passing version
- body=#c_apply{anno=Anno,
- op=Loop,
- args=[L, Avar]}}},
-%%% Multiple-value version
-%%% body=#c_let{vars=[Xs, A],
-%%% arg=#c_apply{op=Loop,
-%%% args=[L, A]},
-%%% body=#c_tuple{es=[Xs, A]}}}},
- Sub);
-call_1(#c_call{module=M, name=N}=Call, _, _, As, Sub) ->
- call_0(Call, M, N, As, Sub).
-
-match_fail(Anno, Arg) ->
- #c_primop{anno=Anno,
- name=#c_literal{val='match_fail'},
- args=[Arg]}.
-
%% fold_call(Call, Mod, Name, Args, Sub) -> Expr.
%% Try to safely evaluate the call. Just try to evaluate arguments,
%% do the call and convert return values to literals. If this
@@ -1133,29 +758,33 @@ fold_call_1(Call, Mod, Name, Args, Sub) ->
true -> fold_call_2(Call, Mod, Name, Args, Sub)
end.
-fold_call_2(Call, Module, Name, Args0, Sub) ->
- try
- Args = [core_lib:literal_value(A) || A <- Args0],
- try apply(Module, Name, Args) of
- Val ->
- case cerl:is_literal_term(Val) of
- true ->
- #c_literal{val=Val};
- false ->
- %% Successful evaluation, but it was not
- %% possible to express the computed value as a literal.
- Call
- end
- catch
- error:Reason ->
- %% Evaluation of the function failed. Warn and replace
- %% the call with a call to erlang:error/1.
- eval_failure(Call, Reason)
- end
+fold_call_2(Call, Module, Name, Args, Sub) ->
+ case all(fun cerl:is_literal/1, Args) of
+ true ->
+ %% All arguments are literals.
+ fold_lit_args(Call, Module, Name, Args);
+ false ->
+ %% At least one non-literal argument.
+ fold_non_lit_args(Call, Module, Name, Args, Sub)
+ end.
+
+fold_lit_args(Call, Module, Name, Args0) ->
+ Args = [cerl:concrete(A) || A <- Args0],
+ try apply(Module, Name, Args) of
+ Val ->
+ case cerl:is_literal_term(Val) of
+ true ->
+ cerl:abstract(Val);
+ false ->
+ %% Successful evaluation, but it was not possible
+ %% to express the computed value as a literal.
+ Call
+ end
catch
- error:_ ->
- %% There was at least one non-literal argument.
- fold_non_lit_args(Call, Module, Name, Args0, Sub)
+ error:Reason ->
+ %% Evaluation of the function failed. Warn and replace
+ %% the call with a call to erlang:error/1.
+ eval_failure(Call, Reason)
end.
%% fold_non_lit_args(Call, Module, Name, Args, Sub) -> Expr.
@@ -1194,41 +823,53 @@ fold_non_lit_args(Call, _, _, _, _) -> Call.
%% Evaluate a relational operation using type information.
eval_rel_op(Call, Op, [#c_var{name=V},#c_var{name=V}], _) ->
Bool = erlang:Op(same, same),
- #c_literal{anno=core_lib:get_anno(Call),val=Bool};
-eval_rel_op(Call, '=:=', [#c_var{name=V}=Var,#c_literal{val=true}], Sub) ->
+ #c_literal{anno=cerl:get_ann(Call),val=Bool};
+eval_rel_op(Call, '=:=', [Term,#c_literal{val=true}], Sub) ->
%% BoolVar =:= true ==> BoolVar
- case is_boolean_type(V, Sub) of
- true -> Var;
- false -> Call
+ case is_boolean_type(Term, Sub) of
+ yes -> Term;
+ maybe -> Call;
+ no -> #c_literal{val=false}
end;
-eval_rel_op(Call, '==', Ops, _Sub) ->
- case is_exact_eq_ok(Ops) of
+eval_rel_op(Call, '==', Ops, Sub) ->
+ case is_exact_eq_ok(Ops, Sub) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=:='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=:='},
Call#c_call{name=Name};
false ->
Call
end;
-eval_rel_op(Call, '/=', Ops, _Sub) ->
- case is_exact_eq_ok(Ops) of
+eval_rel_op(Call, '/=', Ops, Sub) ->
+ case is_exact_eq_ok(Ops, Sub) of
true ->
- Name = #c_literal{anno=core_lib:get_anno(Call),val='=/='},
+ Name = #c_literal{anno=cerl:get_ann(Call),val='=/='},
Call#c_call{name=Name};
false ->
Call
end;
eval_rel_op(Call, _, _, _) -> Call.
-is_exact_eq_ok([#c_literal{val=Lit}|_]) ->
+is_exact_eq_ok([A,B]=L, Sub) ->
+ case is_int_type(A, Sub) =:= yes andalso is_int_type(B, Sub) =:= yes of
+ true -> true;
+ false -> is_exact_eq_ok_1(L)
+ end.
+
+is_exact_eq_ok_1([#c_literal{val=Lit}|_]) ->
is_non_numeric(Lit);
-is_exact_eq_ok([_|T]) ->
- is_exact_eq_ok(T);
-is_exact_eq_ok([]) -> false.
+is_exact_eq_ok_1([_|T]) ->
+ is_exact_eq_ok_1(T);
+is_exact_eq_ok_1([]) -> false.
is_non_numeric([H|T]) ->
is_non_numeric(H) andalso is_non_numeric(T);
is_non_numeric(Tuple) when is_tuple(Tuple) ->
is_non_numeric_tuple(Tuple, tuple_size(Tuple));
+is_non_numeric(Map) when is_map(Map) ->
+ %% Note that 17.x and 18.x compare keys in different ways.
+ %% Be very conservative -- require that both keys and values
+ %% are non-numeric.
+ is_non_numeric(maps:to_list(Map));
is_non_numeric(Num) when is_number(Num) ->
false;
is_non_numeric(_) -> true.
@@ -1242,40 +883,31 @@ is_non_numeric_tuple(_Tuple, 0) -> true.
%% there must be at least one non-literal argument (i.e.
%% there is no need to handle the case that all argments
%% are literal).
-eval_bool_op(Call, 'and', [#c_literal{val=true},#c_var{name=V}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V}=Res,#c_literal{val=true}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,#c_var{name=V}], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
-eval_bool_op(Call, 'and', [#c_var{name=V},#c_literal{val=false}=Res], Sub) ->
- case is_boolean_type(V, Sub) of
- true -> Res;
- false-> Call
- end;
+
+eval_bool_op(Call, 'and', [#c_literal{val=true},Term], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=true}], Sub) ->
+ eval_bool_op_1(Call, Term, Term, Sub);
+eval_bool_op(Call, 'and', [#c_literal{val=false}=Res,Term], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
+eval_bool_op(Call, 'and', [Term,#c_literal{val=false}=Res], Sub) ->
+ eval_bool_op_1(Call, Res, Term, Sub);
eval_bool_op(Call, _, _, _) -> Call.
+eval_bool_op_1(Call, Res, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ yes -> Res;
+ no -> eval_failure(Call, badarg);
+ maybe -> Call
+ end.
+
%% Evaluate is_boolean/1 using type information.
-eval_is_boolean(Call, #c_var{name=V}, Sub) ->
- case is_boolean_type(V, Sub) of
- true -> #c_literal{val=true};
- false -> Call
- end;
-eval_is_boolean(_, #c_cons{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(_, #c_tuple{}, _) ->
- #c_literal{val=false};
-eval_is_boolean(Call, _, _) ->
- Call.
+eval_is_boolean(Call, Term, Sub) ->
+ case is_boolean_type(Term, Sub) of
+ no -> #c_literal{val=false};
+ yes -> #c_literal{val=true};
+ maybe -> Call
+ end.
%% eval_length(Call, List) -> Val.
%% Evaluates the length for the prefix of List which has a known
@@ -1325,33 +957,33 @@ eval_append(Call, X, Y) ->
%% Evaluates element/2 if the position Pos is a literal and
%% the shape of the tuple Tuple is known.
%%
-eval_element(Call, #c_literal{val=Pos}, #c_tuple{es=Es}, _Types) when is_integer(Pos) ->
- if
- 1 =< Pos, Pos =< length(Es) ->
- lists:nth(Pos, Es);
- true ->
- eval_failure(Call, badarg)
- end;
-eval_element(Call, #c_literal{val=Pos}, #c_var{name=V}, Types)
+eval_element(Call, #c_literal{val=Pos}, Tuple, Types)
when is_integer(Pos) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=Elements}} ->
+ case get_type(Tuple, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
if
- 1 =< Pos, Pos =< length(Elements) ->
- case lists:nth(Pos, Elements) of
- #c_alias{var=Alias} -> Alias;
- Res -> Res
+ 1 =< Pos, Pos =< length(Es) ->
+ El = lists:nth(Pos, Es),
+ try
+ cerl:set_ann(pat_to_expr(El), [compiler_generated])
+ catch
+ throw:impossible ->
+ Call
end;
true ->
+ %% Index outside tuple or not a tuple.
eval_failure(Call, badarg)
- end;
- {ok,_} ->
- eval_failure(Call, badarg);
- error ->
- Call
+ end
end;
-eval_element(Call, Pos, Tuple, _Types) ->
- case is_not_integer(Pos) orelse is_not_tuple(Tuple) of
+eval_element(Call, Pos, Tuple, Sub) ->
+ case is_int_type(Pos, Sub) =:= no orelse
+ is_tuple_type(Tuple, Sub) =:= no of
true ->
eval_failure(Call, badarg);
false ->
@@ -1361,58 +993,55 @@ eval_element(Call, Pos, Tuple, _Types) ->
%% eval_is_record(Call, Var, Tag, Size, Types) -> Val.
%% Evaluates is_record/3 using type information.
%%
-eval_is_record(Call, #c_var{name=V}, #c_literal{val=NeededTag}=Lit,
+eval_is_record(Call, Term, #c_literal{val=NeededTag},
#c_literal{val=Size}, Types) ->
- case orddict:find(V, Types#sub.t) of
- {ok,#c_tuple{es=[#c_literal{val=Tag}|_]=Es}} ->
- Lit#c_literal{val=Tag =:= NeededTag andalso
- length(Es) =:= Size};
- _ ->
- Call
+ case get_type(Term, Types) of
+ none ->
+ Call;
+ Type ->
+ Es = case cerl:is_c_tuple(Type) of
+ false -> [];
+ true -> cerl:tuple_es(Type)
+ end,
+ case Es of
+ [#c_literal{val=Tag}|_] ->
+ Bool = Tag =:= NeededTag andalso
+ length(Es) =:= Size,
+ #c_literal{val=Bool};
+ _ ->
+ #c_literal{val=false}
+ end
end;
eval_is_record(Call, _, _, _, _) -> Call.
-%% is_not_integer(Core) -> true | false.
-%% Returns true if Core is definitely not an integer.
-
-is_not_integer(#c_literal{val=Val}) when not is_integer(Val) -> true;
-is_not_integer(#c_tuple{}) -> true;
-is_not_integer(#c_cons{}) -> true;
-is_not_integer(#c_map{}) -> true;
-is_not_integer(_) -> false.
-
-%% is_not_tuple(Core) -> true | false.
-%% Returns true if Core is definitely not a tuple.
-
-is_not_tuple(#c_literal{val=Val}) when not is_tuple(Val) -> true;
-is_not_tuple(#c_cons{}) -> true;
-is_not_tuple(#c_map{}) -> true;
-is_not_tuple(_) -> false.
-
%% eval_setelement(Call, Pos, Tuple, NewVal) -> Core.
%% Evaluates setelement/3 if position Pos is an integer
-%% the shape of the tuple Tuple is known.
+%% and the shape of the tuple Tuple is known.
%%
-eval_setelement(Call, Pos, Tuple, NewVal) ->
- try
- eval_setelement_1(Pos, Tuple, NewVal)
- catch
- error:_ ->
- Call
- end.
-
-eval_setelement_1(#c_literal{val=Pos}, #c_tuple{anno=A,es=Es}, NewVal)
- when is_integer(Pos) ->
- ann_c_tuple(A, eval_setelement_2(Pos, Es, NewVal));
-eval_setelement_1(#c_literal{val=Pos}, #c_literal{anno=A,val=Es0}, NewVal)
+eval_setelement(Call, #c_literal{val=Pos}, Tuple, NewVal)
when is_integer(Pos) ->
- Es = [#c_literal{anno=A,val=E} || E <- tuple_to_list(Es0)],
- ann_c_tuple(A, eval_setelement_2(Pos, Es, NewVal)).
+ case cerl:is_data(Tuple) of
+ false ->
+ Call;
+ true ->
+ Es0 = case cerl:is_c_tuple(Tuple) of
+ false -> [];
+ true -> cerl:tuple_es(Tuple)
+ end,
+ if
+ 1 =< Pos, Pos =< length(Es0) ->
+ Es = eval_setelement_1(Pos, Es0, NewVal),
+ cerl:update_c_tuple(Tuple, Es);
+ true ->
+ eval_failure(Call, badarg)
+ end
+ end;
+eval_setelement(Call, _, _, _) -> Call.
-eval_setelement_2(1, [_|T], NewVal) ->
+eval_setelement_1(1, [_|T], NewVal) ->
[NewVal|T];
-eval_setelement_2(Pos, [H|T], NewVal) when Pos > 1 ->
- [H|eval_setelement_2(Pos-1, T, NewVal)].
+eval_setelement_1(Pos, [H|T], NewVal) when Pos > 1 ->
+ [H|eval_setelement_1(Pos-1, T, NewVal)].
%% eval_failure(Call, Reason) -> Core.
%% Warn for a call that will fail and replace the call with
@@ -1492,20 +1121,32 @@ clause(#c_clause{pats=Ps0,guard=G0,body=B0}=Cl, Cexpr, Ctxt, Sub0) ->
let_substs(Vs0, As0, Sub0) ->
{Vs1,Sub1} = pattern_list(Vs0, Sub0),
{Vs2,As1,Ss} = let_substs_1(Vs1, As0, Sub1),
- Sub2 = scope_add([V || #c_var{name=V} <- Vs2], Sub1),
+ Sub2 = sub_add_scope([V || #c_var{name=V} <- Vs2], Sub1),
{Vs2,As1,
- foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}.
+ foldl(fun ({V,S}, Sub) -> sub_set_name(V, S, Sub) end, Sub2, Ss)}.
let_substs_1(Vs, #c_values{es=As}, Sub) ->
let_subst_list(Vs, As, Sub);
let_substs_1([V], A, Sub) -> let_subst_list([V], [A], Sub);
let_substs_1(Vs, A, _) -> {Vs,A,[]}.
-let_subst_list([V|Vs0], [A|As0], Sub) ->
+let_subst_list([V|Vs0], [A0|As0], Sub) ->
{Vs1,As1,Ss} = let_subst_list(Vs0, As0, Sub),
- case is_subst(A) of
- true -> {Vs1,As1,sub_subst_var(V, A, Sub) ++ Ss};
- false -> {[V|Vs1],[A|As1],Ss}
+ case is_subst(A0) of
+ true ->
+ A = case is_compiler_generated(V) andalso
+ not is_compiler_generated(A0) of
+ true ->
+ %% Propagate the 'compiler_generated' annotation
+ %% along with the value.
+ Ann = [compiler_generated|cerl:get_ann(A0)],
+ cerl:set_ann(A0, Ann);
+ false ->
+ A0
+ end,
+ {Vs1,As1,sub_subst_var(V, A, Sub) ++ Ss};
+ false ->
+ {[V|Vs1],[A0|As1],Ss}
end;
let_subst_list([], [], _) -> {[],[],[]}.
@@ -1527,7 +1168,7 @@ pattern(#c_var{}=Pat, Isub, Osub) ->
true ->
V1 = make_var_name(),
Pat1 = #c_var{name=V1},
- {Pat1,sub_set_var(Pat, Pat1, scope_add([V1], Osub))};
+ {Pat1,sub_set_var(Pat, Pat1, sub_add_scope([V1], Osub))};
false ->
{Pat,sub_del_var(Pat, Osub)}
end;
@@ -1597,6 +1238,7 @@ is_subst(_) -> false.
%% sub_del_var(Var, #sub{}) -> #sub{}.
%% sub_subst_var(Var, Value, #sub{}) -> [{Name,Value}].
%% sub_is_val(Var, #sub{}) -> boolean().
+%% sub_add_scope(#sub{}) -> #sub{}
%% sub_subst_scope(#sub{}) -> #sub{}
%%
%% We use the variable name as key so as not have problems with
@@ -1607,14 +1249,15 @@ is_subst(_) -> false.
%% In addition to the list of substitutions, we also keep track of
%% all variable currently live (the scope).
%%
-%% sub_subst_scope/1 adds dummy substitutions for all variables
-%% in the scope in order to force renaming if variables in the
-%% scope occurs as pattern variables.
+%% sub_add_scope/2 adds variables to the scope. sub_subst_scope/1
+%% adds dummy substitutions for all variables in the scope in order
+%% to force renaming if variables in the scope occurs as pattern
+%% variables.
-sub_new() -> #sub{v=orddict:new(),s=gb_trees:empty(),t=[]}.
+sub_new() -> #sub{v=orddict:new(),s=cerl_sets:new(),t=#{}}.
sub_new(#sub{}=Sub) ->
- Sub#sub{v=orddict:new(),t=[]}.
+ Sub#sub{v=orddict:new(),t=#{}}.
sub_new_preserve_types(#sub{}=Sub) ->
Sub#sub{v=orddict:new()}.
@@ -1631,16 +1274,16 @@ sub_set_var(#c_var{name=V}, Val, Sub) ->
sub_set_name(V, Val, #sub{v=S,s=Scope,t=Tdb0}=Sub) ->
Tdb1 = kill_types(V, Tdb0),
Tdb = copy_type(V, Val, Tdb1),
- Sub#sub{v=orddict:store(V, Val, S),s=gb_sets:add(V, Scope),t=Tdb}.
+ Sub#sub{v=orddict:store(V, Val, S),s=cerl_sets:add_element(V, Scope),t=Tdb}.
sub_del_var(#c_var{name=V}, #sub{v=S,s=Scope,t=Tdb}=Sub) ->
%% Profiling shows that for programs with many record operations,
%% sub_del_var/2 is a bottleneck. Since the scope contains all
%% variables that are live, we know that V cannot be present in S
%% if it is not in the scope.
- case gb_sets:is_member(V, Scope) of
+ case cerl_sets:is_element(V, Scope) of
false ->
- Sub#sub{s=gb_sets:insert(V, Scope)};
+ Sub#sub{s=cerl_sets:add_element(V, Scope)};
true ->
Sub#sub{v=orddict:erase(V, S),t=kill_types(V, Tdb)}
end.
@@ -1649,8 +1292,14 @@ sub_subst_var(#c_var{name=V}, Val, #sub{v=S0}) ->
%% Fold chained substitutions.
[{V,Val}] ++ [ {K,Val} || {K,#c_var{name=V1}} <- S0, V1 =:= V].
+sub_add_scope(Vs, #sub{s=Scope0}=Sub) ->
+ Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
+ cerl_sets:add_element(V, S)
+ end, Scope0, Vs),
+ Sub#sub{s=Scope}.
+
sub_subst_scope(#sub{v=S0,s=Scope}=Sub) ->
- S = [{-1,#c_var{name=Sv}} || Sv <- gb_sets:to_list(Scope)]++S0,
+ S = [{-1,#c_var{name=Sv}} || Sv <- cerl_sets:to_list(Scope)]++S0,
Sub#sub{v=S}.
sub_is_val(#c_var{name=V}, #sub{v=S,s=Scope}) ->
@@ -1658,7 +1307,7 @@ sub_is_val(#c_var{name=V}, #sub{v=S,s=Scope}) ->
%% became the new bottleneck. Since the scope contains all
%% live variables, a variable V can only be the target for
%% a substitution if it is in the scope.
- gb_sets:is_member(V, Scope) andalso v_is_value(V, S).
+ cerl_sets:is_element(V, Scope) andalso v_is_value(V, S).
v_is_value(Var, [{_,#c_var{name=Var}}|_]) -> true;
v_is_value(Var, [_|T]) -> v_is_value(Var, T);
@@ -1696,7 +1345,7 @@ clauses(E, [C0|Cs], Ctxt, Sub, LitExpr) ->
{yes,yes} ->
case LitExpr of
false ->
- Line = get_line(core_lib:get_anno(C1)),
+ Line = get_line(cerl:get_ann(C1)),
shadow_warning(Cs, Line);
true ->
%% If the case expression is a literal,
@@ -1930,7 +1579,7 @@ opt_bool_case_guard(#c_case{arg=Arg,clauses=Cs0}=Case) ->
Case;
true ->
Cs = opt_bool_case_guard(Arg, Cs0),
- Case#c_case{arg=#c_values{anno=core_lib:get_anno(Arg),es=[]},
+ Case#c_case{arg=#c_values{anno=cerl:get_ann(Arg),es=[]},
clauses=Cs}
end.
@@ -1978,6 +1627,7 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0,
%% is correct, the clause will always match at run-time.
Case;
{true,Bs} ->
+ eval_case_warn(B),
{Ps,As} = unzip(Bs),
InnerLet = cerl:c_let(Ps, core_lib:make_values(As), B),
Let = cerl:c_let(Vs, E, InnerLet),
@@ -1985,6 +1635,18 @@ eval_case(#c_case{arg=E,clauses=[#c_clause{pats=Ps0,
end;
eval_case(Case, _) -> Case.
+eval_case_warn(#c_primop{anno=Anno,
+ name=#c_literal{val=match_fail},
+ args=[_]}=Core) ->
+ case keyfind(eval_failure, 1, Anno) of
+ false ->
+ ok;
+ {eval_failure,Reason} ->
+ %% Example: M = not_map, M#{k:=v}
+ add_warning(Core, {eval_failure,Reason})
+ end;
+eval_case_warn(_) -> ok.
+
%% case_opt(CaseArg, [Clause]) -> {CaseArg,[Clause]}.
%% Try and optimise a case by avoid building tuples or lists
%% in the case expression. Instead combine the variable parts
@@ -2041,182 +1703,260 @@ case_opt_args([], Cs, _Sub, _LitExpr, Acc) ->
%% or to remove a literal argument.
%%
case_opt_arg(E0, Sub, Cs, LitExpr) ->
- E = maybe_replace_var(E0, Sub),
- case cerl:is_data(E) of
+ case cerl:is_c_var(E0) of
+ false ->
+ case_opt_arg_1(E0, Cs, LitExpr);
+ true ->
+ case case_will_var_match(Cs) of
+ true ->
+ %% All clauses will match a variable in the
+ %% current position. Don't expand this variable
+ %% (that can only make the code worse).
+ {error,Cs};
+ false ->
+ %% If possible, expand this variable to a previously
+ %% matched term.
+ E = case_expand_var(E0, Sub),
+ case_opt_arg_1(E, Cs, LitExpr)
+ end
+ end.
+
+case_opt_arg_1(E0, Cs0, LitExpr) ->
+ case cerl:is_data(E0) of
false ->
- {error,Cs};
+ {error,Cs0};
true ->
+ E = case_opt_compiler_generated(E0),
+ Cs = case_opt_nomatch(E, Cs0, LitExpr),
case cerl:data_type(E) of
{atomic,_} ->
- case_opt_lit(E, Cs, LitExpr);
+ case_opt_lit(E, Cs);
_ ->
- case_opt_data(E, Cs, LitExpr)
+ case_opt_data(E, Cs)
end
end.
-%% maybe_replace_var(Expr0, Sub) -> Expr
+%% case_will_var_match([Clause]) -> true | false.
+%% Return if all clauses will match a variable in the
+%% current position.
+%%
+case_will_var_match(Cs) ->
+ all(fun({[P|_],_,_,_}) ->
+ case cerl_clauses:match(P, any) of
+ {true,_} -> true;
+ _ -> false
+ end
+ end, Cs).
+
+
+%% case_opt_compiler_generated(Core) -> Core'
+%% Mark Core expressions as compiler generated to ensure that
+%% no warnings are generated if they turn out to be unused.
+%% To pretty-printed Core Erlang easier to read, don't mark
+%% constructs that can't cause warnings to be emitted.
+%%
+case_opt_compiler_generated(Core) ->
+ F = fun(C) ->
+ case cerl:type(C) of
+ alias -> C;
+ var -> C;
+ _ -> cerl:set_ann(C, [compiler_generated])
+ end
+ end,
+ cerl_trees:map(F, Core).
+
+
+%% case_expand_var(Expr0, Sub) -> Expr
%% If Expr0 is a variable that has been previously matched and
%% is known to be a tuple, return the tuple instead. Otherwise
%% return Expr0 unchanged.
%%
-maybe_replace_var(E, Sub) ->
- case cerl:is_c_var(E) of
- false -> E;
- true -> maybe_replace_var_1(E, Sub)
- end.
-
-maybe_replace_var_1(E, #sub{t=Tdb}) ->
- case orddict:find(cerl:var_name(E), Tdb) of
- {ok,T0} ->
+case_expand_var(E, #sub{t=Tdb}) ->
+ Key = cerl:var_name(E),
+ case Tdb of
+ #{Key:=T0} ->
case cerl:is_c_tuple(T0) of
false ->
E;
true ->
- cerl_trees:map(fun(C) ->
- case cerl:is_c_alias(C) of
- false -> C;
- true -> cerl:alias_pat(C)
- end
- end, T0)
+ %% The pattern was a tuple. Now we must make sure
+ %% that the elements of the tuple are suitable. In
+ %% particular, we don't want binary or map
+ %% construction here, since that means that the
+ %% binary or map will be constructed in the 'case'
+ %% argument. That is wasteful for binaries. Even
+ %% worse is that any map pattern that use the ':='
+ %% operator will fail when used in map
+ %% construction (only the '=>' operator is allowed
+ %% when constructing a map from scratch).
+ try
+ cerl_trees:map(fun coerce_to_data/1, T0)
+ catch
+ throw:impossible ->
+ %% Something unsuitable was found (map or
+ %% or binary). Keep the variable.
+ E
+ end
end;
- error ->
+ _ ->
E
end.
-%% case_opt_lit(Literal, Clauses0, LitExpr) ->
-%% {ok,[],Clauses} | error
-%% The current part of the case expression is a literal. That
-%% means that we will know at compile-time whether a clause
-%% will match, and we can remove the corresponding pattern from
-%% each clause.
-%%
-%% The only complication is if the literal is a binary. Binary
-%% pattern matching is tricky, so we will give up in that case.
+%% coerce_to_data(Core) -> Core'
+%% Coerce an element originally from a pattern to an data item or or
+%% variable. Throw an 'impossible' exception if non-data Core Erlang
+%% terms such as binary construction or map construction are
+%% encountered.
-case_opt_lit(Lit, Cs0, LitExpr) ->
- Cs1 = case_opt_lit_1(Lit, Cs0, LitExpr),
- try case_opt_lit_2(Lit, Cs1) of
- Cs ->
- {ok,[],Cs}
- catch
- throw:impossible ->
- {error,Cs1}
+coerce_to_data(C) ->
+ case cerl:is_c_alias(C) of
+ false ->
+ case cerl:is_data(C) orelse cerl:is_c_var(C) of
+ true -> C;
+ false -> throw(impossible)
+ end;
+ true ->
+ coerce_to_data(cerl:alias_pat(C))
end.
-case_opt_lit_1(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
+%% case_opt_nomatch(E, Clauses, LitExpr) -> Clauses'
+%% Remove all clauses that cannot possibly match.
+
+case_opt_nomatch(E, [{[P|_],C,_,_}=Current|Cs], LitExpr) ->
case cerl_clauses:match(P, E) of
none ->
- %% The pattern will not match the literal. Remove the clause.
- %% Unless the entire case expression is a literal, also
- %% emit a warning.
+ %% The pattern will not match the case expression. Remove
+ %% the clause. Unless the entire case expression is a
+ %% literal, also emit a warning.
case LitExpr of
false -> add_warning(C, nomatch_clause_type);
true -> ok
end,
- case_opt_lit_1(E, Cs, LitExpr);
+ case_opt_nomatch(E, Cs, LitExpr);
_ ->
- [Current|case_opt_lit_1(E, Cs, LitExpr)]
+ [Current|case_opt_nomatch(E, Cs, LitExpr)]
end;
-case_opt_lit_1(_, [], _) -> [].
+case_opt_nomatch(_, [], _) -> [].
+
+%% case_opt_lit(Literal, Clauses0) -> {ok,[],Clauses} | error
+%% The current part of the case expression is a literal. That
+%% means that we will know at compile-time whether a clause
+%% will match, and we can remove the corresponding pattern from
+%% each clause.
+%%
+%% The only complication is if the literal is a binary or map.
+%% In general, it is difficult to know whether a binary or
+%% map pattern will match, so we give up in that case.
+
+case_opt_lit(Lit, Cs0) ->
+ try case_opt_lit_1(Lit, Cs0) of
+ Cs ->
+ {ok,[],Cs}
+ catch
+ throw:impossible ->
+ {error,Cs0}
+ end.
-case_opt_lit_2(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
- %% Non-matching clauses have already been removed in case_opt_lit_1/3.
+case_opt_lit_1(E, [{[P|Ps],C,PsAcc,Bs0}|Cs]) ->
+ %% Non-matching clauses have already been removed
+ %% in case_opt_nomatch/3.
case cerl_clauses:match(P, E) of
{true,Bs} ->
%% The pattern matches the literal. Remove the pattern
%% and update the bindings.
- [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_2(E, Cs)];
+ [{Ps,C,PsAcc,Bs++Bs0}|case_opt_lit_1(E, Cs)];
{false,_} ->
%% Binary literal and pattern. We are not sure whether
%% the pattern will match.
throw(impossible)
end;
-case_opt_lit_2(_, []) -> [].
+case_opt_lit_1(_, []) -> [].
%% case_opt_data(Expr, Clauses0, LitExpr) -> {ok,Exprs,Clauses}
+%% The case expression is a non-atomic data constructor (cons
+%% or tuple). We can know at compile time whether each clause
+%% will match, and we can delay the building of the data to
+%% the clauses where it is actually needed.
-case_opt_data(E, Cs0, LitExpr) ->
+case_opt_data(E, Cs0) ->
Es = cerl:data_es(E),
- Cs = case_opt_data_1(Cs0, Es,
- {cerl:data_type(E),cerl:data_arity(E)},
- LitExpr),
- {ok,Es,Cs}.
-
-case_opt_data_1([{[P|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig, LitExpr) ->
- case case_data_pat(P, TypeSig) of
- {ok,Ps1,Bs1} ->
- [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|
- case_opt_data_1(Cs, Es, TypeSig,LitExpr)];
- error ->
- case LitExpr of
- false -> add_warning(C, nomatch_clause_type);
- true -> ok
- end,
- case_opt_data_1(Cs, Es, TypeSig, LitExpr)
- end;
-case_opt_data_1([], _, _, _) -> [].
-
-%% case_data_pat(Pattern, Type, Arity) -> {ok,[Pattern],[{AliasVar,Pat}]} | error.
-
-case_data_pat(P, TypeSig) ->
- case cerl:is_data(P) of
- false ->
- case_data_pat_var(P, TypeSig);
- true ->
- case {cerl:data_type(P),cerl:data_arity(P)} of
- TypeSig ->
- {ok,cerl:data_es(P),[]};
- {_,_} ->
- error
- end
+ TypeSig = {cerl:data_type(E),cerl:data_arity(E)},
+ try case_opt_data_1(Cs0, Es, TypeSig) of
+ Cs ->
+ {ok,Es,Cs}
+ catch
+ throw:impossible ->
+ %% The pattern contained a binary or map.
+ {error,Cs0}
end.
-%% case_data_pat_var(Pattern, {DataType,ArityType}) ->
-%% {ok,[Pattern],[{AliasVar,Pat}]}
+case_opt_data_1([{[P0|Ps0],C,PsAcc,Bs0}|Cs], Es, TypeSig) ->
+ P = case_opt_compiler_generated(P0),
+ BindTo = #c_var{name=dummy},
+ {Ps1,[{BindTo,_}|Bs1]} = case_data_pat_alias(P, BindTo, TypeSig, []),
+ [{Ps1++Ps0,C,PsAcc,Bs1++Bs0}|case_opt_data_1(Cs, Es, TypeSig)];
+case_opt_data_1([], _, _) -> [].
-case_data_pat_var(P, {Type,Arity}=TypeSig) ->
- %% If the entire case statement is evaluated in an effect
- %% context (e.g. "case {A,B} of ... end, ok"), there will
- %% be a warning that a term is constructed but never used.
- %% To avoid that warning, we must annotate the data
- %% constructor as compiler generated.
- Ann = [compiler_generated|cerl:get_ann(P)],
+case_data_pat_alias(P, BindTo0, TypeSig, Bs0) ->
case cerl:type(P) of
- var ->
- Vars = make_vars(cerl:get_ann(P), Arity),
- {ok,Vars,[{P,cerl:ann_make_data(Ann, Type, Vars)}]};
alias ->
- V = cerl:alias_var(P),
- Apat = cerl:alias_pat(P),
- case case_data_pat(Apat, TypeSig) of
- {ok,Ps,Bs} ->
- {ok,Ps,[{V,cerl:ann_make_data(Ann, Type, unalias_pat_list(Ps))}|Bs]};
- error ->
- error
- end;
+ %% Recursively handle the pattern and bind to
+ %% the alias variable.
+ BindTo = cerl:alias_var(P),
+ Apat0 = cerl:alias_pat(P),
+ Ann = [compiler_generated],
+ Apat = cerl:set_ann(Apat0, Ann),
+ {Ps,Bs} = case_data_pat_alias(Apat, BindTo, TypeSig, Bs0),
+ {Ps,[{BindTo0,BindTo}|Bs]};
+ var ->
+ %% Here we will need to actually build the data and bind
+ %% it to the variable.
+ {Type,Arity} = TypeSig,
+ Ann = [compiler_generated],
+ Vars = make_vars(Ann, Arity),
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ Bs = [{BindTo0,P},{P,Data}|Bs0],
+ {Vars,Bs};
_ ->
- error
+ %% Since case_opt_nomatch/3 has removed all clauses that
+ %% cannot match, we KNOW that this clause must match and
+ %% that the pattern must be a data constructor.
+ %% Here we must build the data and bind it to the variable.
+ {Type,_} = TypeSig,
+ DataEs = cerl:data_es(P),
+ Vars = pat_to_expr_list(DataEs),
+ Ann = [compiler_generated],
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ {DataEs,[{BindTo0,Data}]}
end.
-%% unalias_pat(Pattern) -> Pattern.
-%% Remove all the aliases in a pattern but using the alias variables
-%% instead of the values. We KNOW they will be bound.
+%% pat_to_expr(Pattern) -> Expression.
+%% Convert a pattern to an expression if possible. We KNOW that
+%% all variables in the pattern will be bound.
+%%
+%% Throw an 'impossible' exception if a map or (non-literal)
+%% binary is encountered. Trying to use a map pattern as an
+%% expression is incorrect, while rebuilding a potentially
+%% huge binary in an expression would be wasteful.
-unalias_pat(P) ->
- case cerl:is_c_alias(P) of
- true ->
+pat_to_expr(P) ->
+ case cerl:type(P) of
+ alias ->
cerl:alias_var(P);
- false ->
+ var ->
+ P;
+ _ ->
case cerl:is_data(P) of
false ->
- P;
+ %% Map or binary.
+ throw(impossible);
true ->
- Es = unalias_pat_list(cerl:data_es(P)),
+ Es = pat_to_expr_list(cerl:data_es(P)),
cerl:update_data(P, cerl:data_type(P), Es)
end
end.
-unalias_pat_list(Ps) -> [unalias_pat(P) || P <- Ps].
+pat_to_expr_list(Ps) -> [pat_to_expr(P) || P <- Ps].
make_vars(A, Max) ->
make_vars(A, 1, Max).
@@ -2234,58 +1974,130 @@ make_var_name() ->
list_to_atom("fol"++integer_to_list(N)).
letify(Bs, Body) ->
+ Ann = cerl:get_ann(Body),
foldr(fun({V,Val}, B) ->
- letify(V, Val, B)
+ cerl:ann_c_let(Ann, [V], Val, B)
end, Body, Bs).
-letify(#c_var{name=Vname}=Var, Val, Body) ->
- case core_lib:is_var_used(Vname, Body) of
- true ->
- A = element(2, Body),
- #c_let{anno=A,vars=[Var],arg=Val,body=Body};
- false -> Body
- end.
-
-%% opt_case_in_let(LetExpr) -> LetExpr'
+%% opt_not_in_let(Let) -> Cerl
+%% Try to optimize away a 'not' operator in a 'let'.
-opt_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) ->
- opt_case_in_let_0(Vs, Arg, B, Let, Sub).
+-spec opt_not_in_let(cerl:c_let()) -> cerl:cerl().
-opt_case_in_let_0([#c_var{name=V}], Arg,
- #c_case{arg=#c_var{name=V},clauses=Cs}=Case, Let, Sub) ->
- case opt_case_in_let_1(V, Arg, Cs) of
- impossible ->
- case is_simple_case_arg(Arg) andalso
- not core_lib:is_var_used(V, Case#c_case{arg=#c_literal{val=nil}}) of
- true ->
- expr(opt_bool_case(Case#c_case{arg=Arg,clauses=Cs}), sub_new(Sub));
- false ->
- Let
+opt_not_in_let(#c_let{vars=[_]=Vs0,arg=Arg0,body=Body0}=Let) ->
+ case opt_not_in_let(Vs0, Arg0, Body0) of
+ {[],#c_values{es=[]},Body} ->
+ Body;
+ {Vs,Arg,Body} ->
+ Let#c_let{vars=Vs,arg=Arg,body=Body}
+ end;
+opt_not_in_let(Let) -> Let.
+
+%% opt_not_in_let(Vs, Arg, Body) -> {Vs',Arg',Body'}
+%% Try to optimize away a 'not' operator in a 'let'.
+
+-spec opt_not_in_let([cerl:c_var()], cerl:cerl(), cerl:cerl()) ->
+ {[cerl:c_var()],cerl:cerl(),cerl:cerl()}.
+
+opt_not_in_let([#c_var{name=V}]=Vs0, Arg0, Body0) ->
+ case cerl:type(Body0) of
+ call ->
+ %% let <V> = Expr in not V ==>
+ %% let <> = <> in notExpr
+ case opt_not_in_let_1(V, Body0, Arg0) of
+ no ->
+ {Vs0,Arg0,Body0};
+ {yes,Body} ->
+ {[],#c_values{es=[]},Body}
+ end;
+ 'let' ->
+ %% let <V> = Expr in let <Var> = not V in Body ==>
+ %% let <Var> = notExpr in Body
+ %% V must not be used in Body.
+ LetArg = cerl:let_arg(Body0),
+ case opt_not_in_let_1(V, LetArg, Arg0) of
+ no ->
+ {Vs0,Arg0,Body0};
+ {yes,Arg} ->
+ LetBody = cerl:let_body(Body0),
+ case core_lib:is_var_used(V, LetBody) of
+ true ->
+ {Vs0,Arg0,Body0};
+ false ->
+ LetVars = cerl:let_vars(Body0),
+ {LetVars,Arg,LetBody}
+ end
end;
- Expr -> Expr
+ _ ->
+ {Vs0,Arg0,Body0}
end;
-opt_case_in_let_0(_, _, _, Let, _) -> Let.
+opt_not_in_let(Vs, Arg, Body) ->
+ {Vs,Arg,Body}.
-opt_case_in_let_1(V, Arg, Cs) ->
- try
- opt_case_in_let_2(V, Arg, Cs)
- catch
- _:_ -> impossible
+opt_not_in_let_1(V, Call, Body) ->
+ case Call of
+ #c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val='not'},
+ args=[#c_var{name=V}]} ->
+ opt_not_in_let_2(Body);
+ _ ->
+ no
end.
-opt_case_in_let_2(V, Arg0,
- [#c_clause{pats=[#c_tuple{es=Es}],
- guard=#c_literal{val=true},body=B}|_]) ->
-
- %% In {V1,V2,...} = case E of P -> ... {Val1,Val2,...}; ... end.
- %% avoid building tuples, by converting tuples to multiple values.
- %% (The optimisation is not done if the built tuple is used or returned.)
-
- true = all(fun (#c_var{}) -> true;
- (_) -> false end, Es), %Only variables in tuple
- false = core_lib:is_var_used(V, B), %Built tuple must not be used.
- Arg1 = tuple_to_values(Arg0, length(Es)), %Might fail.
- #c_let{vars=Es,arg=Arg1,body=B}.
+opt_not_in_let_2(#c_case{clauses=Cs0}=Case) ->
+ Vars = make_vars([], 1),
+ Body = #c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val='not'},
+ args=Vars},
+ Cs = [begin
+ Let = #c_let{vars=Vars,arg=B,body=Body},
+ C#c_clause{body=opt_not_in_let(Let)}
+ end || #c_clause{body=B}=C <- Cs0],
+ {yes,Case#c_case{clauses=Cs}};
+opt_not_in_let_2(#c_call{}=Call0) ->
+ invert_call(Call0);
+opt_not_in_let_2(_) -> no.
+
+invert_call(#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=Name0},
+ args=[_,_]}=Call) ->
+ case inverse_rel_op(Name0) of
+ no -> no;
+ Name -> {yes,Call#c_call{name=#c_literal{val=Name}}}
+ end;
+invert_call(#c_call{}) -> no.
+
+%% inverse_rel_op(Op) -> no | RevOp
+
+inverse_rel_op('=:=') -> '=/=';
+inverse_rel_op('=/=') -> '=:=';
+inverse_rel_op('==') -> '/=';
+inverse_rel_op('/=') -> '==';
+inverse_rel_op('>') -> '=<';
+inverse_rel_op('<') -> '>=';
+inverse_rel_op('>=') -> '<';
+inverse_rel_op('=<') -> '>';
+inverse_rel_op(_) -> no.
+
+
+%% opt_bool_case_in_let(LetExpr, Sub) -> Core
+
+opt_bool_case_in_let(#c_let{vars=Vs,arg=Arg,body=B}=Let, Sub) ->
+ opt_case_in_let_1(Vs, Arg, B, Let, Sub).
+
+opt_case_in_let_1([#c_var{name=V}], Arg,
+ #c_case{arg=#c_var{name=V}}=Case0, Let, Sub) ->
+ case is_simple_case_arg(Arg) of
+ true ->
+ Case = opt_bool_case(Case0#c_case{arg=Arg}),
+ case core_lib:is_var_used(V, Case) of
+ false -> expr(Case, sub_new(Sub));
+ true -> Let
+ end;
+ false ->
+ Let
+ end;
+opt_case_in_let_1(_, _, _, Let, _) -> Let.
%% is_simple_case_arg(Expr) -> true|false
%% Determine whether the Expr is simple enough to be worth
@@ -2327,18 +2139,15 @@ is_bool_expr(#c_clause{body=B}, Sub) ->
is_bool_expr(B, Sub);
is_bool_expr(#c_let{vars=[V],arg=Arg,body=B}, Sub0) ->
Sub = case is_bool_expr(Arg, Sub0) of
- true -> update_types(V, [#c_literal{val=true}], Sub0);
+ true -> update_types(V, [bool], Sub0);
false -> Sub0
end,
is_bool_expr(B, Sub);
is_bool_expr(#c_let{body=B}, Sub) ->
%% Binding of multiple variables.
is_bool_expr(B, Sub);
-is_bool_expr(#c_literal{val=Bool}, _) when is_boolean(Bool) ->
- true;
-is_bool_expr(#c_var{name=V}, Sub) ->
- is_boolean_type(V, Sub);
-is_bool_expr(_, _) -> false.
+is_bool_expr(C, Sub) ->
+ is_boolean_type(C, Sub) =:= yes.
is_bool_expr_list([C|Cs], Sub) ->
is_bool_expr(C, Sub) andalso is_bool_expr_list(Cs, Sub);
@@ -2351,7 +2160,7 @@ is_bool_expr_list([], _) -> true.
%% functions, or is_record/2).
%%
is_safe_bool_expr(Core, Sub) ->
- is_safe_bool_expr_1(Core, Sub, gb_sets:empty()).
+ is_safe_bool_expr_1(Core, Sub, cerl_sets:new()).
is_safe_bool_expr_1(#c_call{module=#c_literal{val=erlang},
name=#c_literal{val=is_record},
@@ -2397,7 +2206,7 @@ is_safe_bool_expr_1(#c_let{vars=Vars,arg=Arg,body=B}, Sub, BoolVars) ->
true ->
case {is_safe_bool_expr_1(Arg, Sub, BoolVars),Vars} of
{true,[#c_var{name=V}]} ->
- is_safe_bool_expr_1(B, Sub, gb_sets:add(V, BoolVars));
+ is_safe_bool_expr_1(B, Sub, cerl_sets:add_element(V, BoolVars));
{false,_} ->
is_safe_bool_expr_1(B, Sub, BoolVars)
end;
@@ -2406,7 +2215,7 @@ is_safe_bool_expr_1(#c_let{vars=Vars,arg=Arg,body=B}, Sub, BoolVars) ->
is_safe_bool_expr_1(#c_literal{val=Val}, _Sub, _) ->
is_boolean(Val);
is_safe_bool_expr_1(#c_var{name=V}, _Sub, BoolVars) ->
- gb_sets:is_element(V, BoolVars);
+ cerl_sets:is_element(V, BoolVars);
is_safe_bool_expr_1(_, _, _) -> false.
is_safe_bool_expr_list([C|Cs], Sub, BoolVars) ->
@@ -2416,38 +2225,6 @@ is_safe_bool_expr_list([C|Cs], Sub, BoolVars) ->
end;
is_safe_bool_expr_list([], _, _) -> true.
-%% tuple_to_values(Expr, TupleArity) -> Expr'
-%% Convert tuples in return position of arity TupleArity to values.
-%% Throws an exception for constructs that are not handled.
-
-tuple_to_values(#c_tuple{es=Es}, Arity) when length(Es) =:= Arity ->
- core_lib:make_values(Es);
-tuple_to_values(#c_literal{val=Tuple}=Lit, Arity) when tuple_size(Tuple) =:= Arity ->
- Es = [Lit#c_literal{val=E} || E <- tuple_to_list(Tuple)],
- core_lib:make_values(Es);
-tuple_to_values(#c_case{clauses=Cs0}=Case, Arity) ->
- Cs1 = [tuple_to_values(E, Arity) || E <- Cs0],
- Case#c_case{clauses=Cs1};
-tuple_to_values(#c_seq{body=B0}=Seq, Arity) ->
- Seq#c_seq{body=tuple_to_values(B0, Arity)};
-tuple_to_values(#c_let{body=B0}=Let, Arity) ->
- Let#c_let{body=tuple_to_values(B0, Arity)};
-tuple_to_values(#c_receive{clauses=Cs0,timeout=Timeout,action=A0}=Rec, Arity) ->
- Cs = [tuple_to_values(E, Arity) || E <- Cs0],
- A = case Timeout of
- #c_literal{val=infinity} -> A0;
- _ -> tuple_to_values(A0, Arity)
- end,
- Rec#c_receive{clauses=Cs,action=A};
-tuple_to_values(#c_clause{body=B0}=Clause, Arity) ->
- B = tuple_to_values(B0, Arity),
- Clause#c_clause{body=B};
-tuple_to_values(Expr, _) ->
- case will_fail(Expr) of
- true -> Expr;
- false -> erlang:error({not_handled,Expr})
- end.
-
%% simplify_let(Let, Sub) -> Expr | impossible
%% If the argument part of an let contains a complex expression, such
%% as a let or a sequence, move the original let body into the complex
@@ -2472,9 +2249,9 @@ move_let_into_expr(#c_let{vars=InnerVs0,body=InnerBody0}=Inner,
%% in <InnerBody>
%%
Arg = body(Arg0, Sub0),
- ScopeSub0 = sub_subst_scope(Sub0#sub{t=[]}),
+ ScopeSub0 = sub_subst_scope(Sub0#sub{t=#{}}),
{OuterVs,ScopeSub} = pattern_list(OuterVs0, ScopeSub0),
-
+
OuterBody = body(OuterBody0, ScopeSub),
{InnerVs,Sub} = pattern_list(InnerVs0, Sub0),
@@ -2511,15 +2288,15 @@ move_let_into_expr(#c_let{vars=Lvs0,body=Lbody0}=Let,
CaVars0 = Ca0#c_clause.pats,
G0 = Ca0#c_clause.guard,
B0 = Ca0#c_clause.body,
- ScopeSub0 = sub_subst_scope(Sub0#sub{t=[]}),
+ ScopeSub0 = sub_subst_scope(Sub0#sub{t=#{}}),
{CaVars,ScopeSub} = pattern_list(CaVars0, ScopeSub0),
G = guard(G0, ScopeSub),
B1 = body(B0, ScopeSub),
{Lvs,B2,Sub1} = let_substs(Lvs0, B1, Sub0),
- Sub2 = Sub1#sub{s=gb_sets:union(ScopeSub#sub.s,
- Sub1#sub.s)},
+ Sub2 = Sub1#sub{s=cerl_sets:union(ScopeSub#sub.s,
+ Sub1#sub.s)},
Lbody = body(Lbody0, Sub2),
B = Let#c_let{vars=Lvs,arg=core_lib:make_values(B2),body=Lbody},
@@ -2552,88 +2329,257 @@ move_let_into_expr(_Let, _Expr, _Sub) -> impossible.
is_failing_clause(#c_clause{body=B}) ->
will_fail(B).
-scope_add(Vs, #sub{s=Scope0}=Sub) ->
- Scope = foldl(fun(V, S) when is_integer(V); is_atom(V) ->
- gb_sets:add(V, S)
- end, Scope0, Vs),
- Sub#sub{s=Scope}.
+%% opt_case_in_let(Let) -> Let'
+%% Try to avoid building tuples that are immediately matched.
+%% A common pattern is:
+%%
+%% {V1,V2,...} = case E of P -> ... {Val1,Val2,...}; ... end
+%%
+%% In Core Erlang the pattern would look like this:
+%%
+%% let <V> = case E of
+%% ... -> ... {Val1,Val2}
+%% ...
+%% end,
+%% in case V of
+%% {A,B} -> ... <use A and B> ...
+%% end
+%%
+%% Rewrite this to:
+%%
+%% let <V1,V2> = case E of
+%% ... -> ... <Val1,Val2>
+%% ...
+%% end,
+%% in
+%% let <V> = {V1,V2}
+%% in case V of
+%% {A,B} -> ... <use A and B> ...
+%% end
+%%
+%% Note that the second 'case' is unchanged. The other optimizations
+%% in this module will eliminate the building of the tuple and
+%% rewrite the second case to:
+%%
+%% case <V1,V2> of
+%% <A,B> -> ... <use A and B> ...
+%% end
+%%
+
+opt_case_in_let(#c_let{vars=Vs,arg=Arg0,body=B}=Let0) ->
+ case matches_data(Vs, B) of
+ {yes,TypeSig} ->
+ case delay_build(Arg0, TypeSig) of
+ no ->
+ Let0;
+ {yes,Vars,Arg,Data} ->
+ InnerLet = Let0#c_let{arg=Data},
+ Let0#c_let{vars=Vars,arg=Arg,body=InnerLet}
+ end;
+ no ->
+ Let0
+ end.
+
+matches_data([#c_var{name=V}], #c_case{arg=#c_var{name=V},
+ clauses=[#c_clause{pats=[P]}|_]}) ->
+ case cerl:is_data(P) of
+ false ->
+ no;
+ true ->
+ case cerl:data_type(P) of
+ {atomic,_} ->
+ no;
+ Type ->
+ {yes,{Type,cerl:data_arity(P)}}
+ end
+ end;
+matches_data(_, _) -> no.
+
+delay_build(Core, TypeSig) ->
+ case cerl:is_data(Core) of
+ true -> no;
+ false -> delay_build_1(Core, TypeSig)
+ end.
+
+delay_build_1(Core0, TypeSig) ->
+ try delay_build_expr(Core0, TypeSig) of
+ Core ->
+ {Type,Arity} = TypeSig,
+ Ann = [compiler_generated],
+ Vars = make_vars(Ann, Arity),
+ Data = cerl:ann_make_data(Ann, Type, Vars),
+ {yes,Vars,Core,Data}
+ catch
+ throw:impossible ->
+ no
+ end.
+
+delay_build_cs([#c_clause{body=B0}=C0|Cs], TypeSig) ->
+ B = delay_build_expr(B0, TypeSig),
+ C = C0#c_clause{body=B},
+ [C|delay_build_cs(Cs, TypeSig)];
+delay_build_cs([], _) -> [].
+
+delay_build_expr(Core, {Type,Arity}=TypeSig) ->
+ case cerl:is_data(Core) of
+ false ->
+ delay_build_expr_1(Core, TypeSig);
+ true ->
+ case {cerl:data_type(Core),cerl:data_arity(Core)} of
+ {Type,Arity} ->
+ core_lib:make_values(cerl:data_es(Core));
+ {_,_} ->
+ throw(impossible)
+ end
+ end.
+
+delay_build_expr_1(#c_case{clauses=Cs0}=Case, TypeSig) ->
+ Cs = delay_build_cs(Cs0, TypeSig),
+ Case#c_case{clauses=Cs};
+delay_build_expr_1(#c_let{body=B0}=Let, TypeSig) ->
+ B = delay_build_expr(B0, TypeSig),
+ Let#c_let{body=B};
+delay_build_expr_1(#c_receive{clauses=Cs0,
+ timeout=Timeout,
+ action=A0}=Rec, TypeSig) ->
+ Cs = delay_build_cs(Cs0, TypeSig),
+ A = case Timeout of
+ #c_literal{val=infinity} -> A0;
+ _ -> delay_build_expr(A0, TypeSig)
+ end,
+ Rec#c_receive{clauses=Cs,action=A};
+delay_build_expr_1(#c_seq{body=B0}=Seq, TypeSig) ->
+ B = delay_build_expr(B0, TypeSig),
+ Seq#c_seq{body=B};
+delay_build_expr_1(Core, _TypeSig) ->
+ case will_fail(Core) of
+ true -> Core;
+ false -> throw(impossible)
+ end.
%% opt_simple_let(#c_let{}, Context, Sub) -> CoreTerm
%% Optimize a let construct that does not contain any lets in
%% in its argument.
-opt_simple_let(#c_let{arg=Arg0}=Let, Ctxt, Sub0) ->
- Arg = body(Arg0, value, Sub0), %This is a body
+opt_simple_let(Let0, Ctxt, Sub) ->
+ case opt_not_in_let(Let0) of
+ #c_let{}=Let ->
+ opt_simple_let_0(Let, Ctxt, Sub);
+ Expr ->
+ expr(Expr, Ctxt, Sub)
+ end.
+
+opt_simple_let_0(#c_let{arg=Arg0}=Let, Ctxt, Sub) ->
+ Arg = body(Arg0, value, Sub), %This is a body
case will_fail(Arg) of
true -> Arg;
- false -> opt_simple_let_1(Let, Arg, Ctxt, Sub0)
+ false -> opt_simple_let_1(Let, Arg, Ctxt, Sub)
end.
opt_simple_let_1(#c_let{vars=Vs0,body=B0}=Let, Arg0, Ctxt, Sub0) ->
%% Optimise let and add new substitutions.
- {Vs,Args,Sub1} = let_substs(Vs0, Arg0, Sub0),
- BodySub = case {Vs,Args} of
- {[V],[A]} ->
- case is_bool_expr(A, Sub0) of
- true ->
- update_types(V, [#c_literal{val=true}], Sub1);
- false ->
- Sub1
- end;
- {_,_} -> Sub1
- end,
- B = body(B0, Ctxt, BodySub),
- Arg = core_lib:make_values(Args),
- opt_simple_let_2(Let, Vs, Arg, B, Ctxt, Sub1).
-
-opt_simple_let_2(Let0, Vs0, Arg0, Body0, effect, Sub) ->
- case {Vs0,Arg0,Body0} of
- {[],#c_values{es=[]},Body} ->
- %% No variables left (because of substitutions).
- Body;
- {[_|_],Arg,#c_literal{}} ->
- %% The body is a literal. That means that we can ignore
- %% it and that the return value is Arg revisited in
- %% effect context.
- body(Arg, effect, sub_new_preserve_types(Sub));
- {Vs,Arg,Body} ->
- %% Since we are in effect context, there is a chance
- %% that the body no longer references the variables.
- %% In that case we can construct a sequence and visit
- %% that in effect context:
- %% let <Var> = Arg in BodyWithoutVar ==> seq Arg BodyWithoutVar
- case is_any_var_used(Vs, Body) of
- false ->
- expr(#c_seq{arg=Arg,body=Body}, effect, sub_new_preserve_types(Sub));
- true ->
- Let = Let0#c_let{vars=Vs,arg=Arg,body=Body},
- opt_case_in_let_arg(opt_case_in_let(Let, Sub), effect, Sub)
- end
- end;
-opt_simple_let_2(Let, Vs0, Arg0, Body, value, Sub) ->
+ {Vs1,Args,Sub1} = let_substs(Vs0, Arg0, Sub0),
+ BodySub = update_let_types(Vs1, Args, Sub1),
+ B1 = body(B0, Ctxt, BodySub),
+ Arg1 = core_lib:make_values(Args),
+ {Vs,Arg,B} = opt_not_in_let(Vs1, Arg1, B1),
+ opt_simple_let_2(Let, Vs, Arg, B, B0, Ctxt, Sub1).
+
+opt_simple_let_2(Let0, Vs0, Arg0, Body, PrevBody, Ctxt, Sub) ->
case {Vs0,Arg0,Body} of
- {[#c_var{name=N1}],Arg,#c_var{name=N2}} ->
+ {[#c_var{name=N1}],Arg1,#c_var{name=N2}} ->
case N1 =:= N2 of
true ->
%% let <Var> = Arg in <Var> ==> Arg
- Arg;
+ Arg1;
false ->
%% let <Var> = Arg in <OtherVar> ==> seq Arg OtherVar
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub))
+ Arg = maybe_suppress_warnings(Arg1, Vs0, PrevBody),
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub))
end;
{[],#c_values{es=[]},_} ->
%% No variables left.
Body;
- {_,Arg,#c_literal{}} ->
- %% The variable is not used in the body. The argument
- %% can be evaluated in effect context to simplify it.
- expr(#c_seq{arg=Arg,body=Body}, value, sub_new_preserve_types(Sub));
- {Vs,Arg,Body} ->
- opt_case_in_let_arg(
- opt_case_in_let(Let#c_let{vars=Vs,arg=Arg,body=Body}, Sub),
- value, Sub)
+ {Vs,Arg1,#c_literal{}} ->
+ Arg = maybe_suppress_warnings(Arg1, Vs, PrevBody),
+ E = case Ctxt of
+ effect ->
+ %% Throw away the literal body.
+ Arg;
+ value ->
+ %% Since the variable is not used in the body, we
+ %% can rewrite the let to a sequence.
+ %% let <Var> = Arg in Literal ==> seq Arg Literal
+ #c_seq{arg=Arg,body=Body}
+ end,
+ expr(E, Ctxt, sub_new_preserve_types(Sub));
+ {Vs,Arg1,Body} ->
+ %% If none of the variables are used in the body, we can
+ %% rewrite the let to a sequence:
+ %% let <Var> = Arg in BodyWithoutVar ==>
+ %% seq Arg BodyWithoutVar
+ case is_any_var_used(Vs, Body) of
+ false ->
+ Arg = maybe_suppress_warnings(Arg1, Vs, PrevBody),
+ expr(#c_seq{arg=Arg,body=Body}, Ctxt,
+ sub_new_preserve_types(Sub));
+ true ->
+ Let1 = Let0#c_let{vars=Vs,arg=Arg1,body=Body},
+ Let2 = opt_bool_case_in_let(Let1, Sub),
+ opt_case_in_let_arg(Let2, Ctxt, Sub)
+ end
+ end.
+
+%% maybe_suppress_warnings(Arg, [#c_var{}], PreviousBody) -> Arg'
+%% Try to suppress false warnings when a variable is not used.
+%% For instance, we don't expect a warning for useless building in:
+%%
+%% R = #r{}, %No warning expected.
+%% R#r.f %Optimization would remove the reference to R.
+%%
+%% To avoid false warnings, we will check whether the variables were
+%% referenced in the original unoptimized code. If they were, we will
+%% consider the warning false and suppress it.
+
+maybe_suppress_warnings(Arg, Vs, PrevBody) ->
+ case should_suppress_warning(Arg) of
+ true ->
+ Arg; %Already suppressed.
+ false ->
+ case is_any_var_used(Vs, PrevBody) of
+ true ->
+ suppress_warning([Arg]);
+ false ->
+ Arg
+ end
end.
+%% Suppress warnings for a Core Erlang expression whose value will
+%% be ignored.
+suppress_warning([H|T]) ->
+ case cerl:is_literal(H) of
+ true ->
+ suppress_warning(T);
+ false ->
+ case cerl:is_data(H) of
+ true ->
+ suppress_warning(cerl:data_es(H) ++ T);
+ false ->
+ %% Some other thing, such as a function call.
+ %% This cannot be the compiler's fault, so the
+ %% warning should not be suppressed. We must
+ %% be careful not to destroy tail-recursion.
+ case T of
+ [] ->
+ H;
+ [_|_] ->
+ cerl:c_seq(H, suppress_warning(T))
+ end
+ end
+ end;
+suppress_warning([]) -> void().
+
move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg,
body=InnerArg0}=Outer,
clauses=InnerClauses}=Inner, Sub) ->
@@ -2647,7 +2593,7 @@ move_case_into_arg(#c_case{arg=#c_let{vars=OuterVars0,arg=OuterArg,
%% let <OuterVars> = <OuterArg>
%% in case <InnerArg> of <InnerClauses> end
%%
- ScopeSub0 = sub_subst_scope(Sub#sub{t=[]}),
+ ScopeSub0 = sub_subst_scope(Sub#sub{t=#{}}),
{OuterVars,ScopeSub} = pattern_list(OuterVars0, ScopeSub0),
InnerArg = body(InnerArg0, ScopeSub),
Outer#c_let{vars=OuterVars,arg=OuterArg,
@@ -2676,7 +2622,7 @@ move_case_into_arg(#c_case{arg=#c_case{arg=OuterArg,
%% <OuterCb>
%% end
%%
- ScopeSub0 = sub_subst_scope(Sub#sub{t=[]}),
+ ScopeSub0 = sub_subst_scope(Sub#sub{t=#{}}),
{OuterPats,ScopeSub} = pattern_list(OuterPats0, ScopeSub0),
OuterGuard = guard(OuterGuard0, ScopeSub),
InnerArg = body(InnerArg0, ScopeSub),
@@ -2721,7 +2667,7 @@ move_case_into_arg(_, _) ->
%% <> when 'true' ->
%% let <Var> = Literal2 in LetBody
%% end
-%%
+%%
%% In the worst case, the size of the code could increase.
%% In practice, though, substituting the literals into
%% LetBody and doing constant folding will decrease the code
@@ -2754,14 +2700,114 @@ is_any_var_used([#c_var{name=V}|Vs], Expr) ->
end;
is_any_var_used([], _) -> false.
-is_boolean_type(V, #sub{t=Tdb}) ->
- case orddict:find(V, Tdb) of
- {ok,bool} -> true;
- _ -> false
+%%%
+%%% Retrieving information about types.
+%%%
+
+-spec get_type(cerl:cerl(), #sub{}) -> type_info() | 'none'.
+
+get_type(#c_var{name=V}, #sub{t=Tdb}) ->
+ case Tdb of
+ #{V:=Type} -> Type;
+ _ -> none
+ end;
+get_type(C, _) ->
+ case cerl:type(C) of
+ binary -> C;
+ map -> C;
+ _ ->
+ case cerl:is_data(C) of
+ true -> C;
+ false -> none
+ end
end.
+-spec is_boolean_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_boolean_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none ->
+ maybe;
+ bool ->
+ yes;
+ C ->
+ B = cerl:is_c_atom(C) andalso
+ is_boolean(cerl:atom_val(C)),
+ yes_no(B)
+ end.
+
+-spec is_int_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_int_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ integer -> yes;
+ C -> yes_no(cerl:is_c_int(C))
+ end.
+
+-spec is_tuple_type(cerl:cerl(), sub()) -> yes_no_maybe().
+
+is_tuple_type(Var, Sub) ->
+ case get_type(Var, Sub) of
+ none -> maybe;
+ C -> yes_no(cerl:is_c_tuple(C))
+ end.
+
+yes_no(true) -> yes;
+yes_no(false) -> no.
+
+%%%
+%%% Update type information.
+%%%
+
+update_let_types(Vs, Args, Sub) when is_list(Args) ->
+ update_let_types_1(Vs, Args, Sub);
+update_let_types(_Vs, _Arg, Sub) ->
+ %% The argument is a complex expression (such as a 'case')
+ %% that returns multiple values.
+ Sub.
+
+update_let_types_1([#c_var{}=V|Vs], [A|As], Sub0) ->
+ Sub = update_types_from_expr(V, A, Sub0),
+ update_let_types_1(Vs, As, Sub);
+update_let_types_1([], [], Sub) -> Sub.
+
+update_types_from_expr(V, Expr, Sub) ->
+ Type = extract_type(Expr, Sub),
+ update_types(V, [Type], Sub).
+
+extract_type(#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=Name},
+ args=Args}=Call, Sub) ->
+ case returns_integer(Name, Args) of
+ true -> integer;
+ false -> extract_type_1(Call, Sub)
+ end;
+extract_type(Expr, Sub) ->
+ extract_type_1(Expr, Sub).
+
+extract_type_1(Expr, Sub) ->
+ case is_bool_expr(Expr, Sub) of
+ false -> Expr;
+ true -> bool
+ end.
+
+returns_integer(bit_size, [_]) -> true;
+returns_integer('bsl', [_,_]) -> true;
+returns_integer('bsr', [_,_]) -> true;
+returns_integer(byte_size, [_]) -> true;
+returns_integer(length, [_]) -> true;
+returns_integer('rem', [_,_]) -> true;
+returns_integer(size, [_]) -> true;
+returns_integer(tuple_size, [_]) -> true;
+returns_integer(trunc, [_]) -> true;
+returns_integer(_, _) -> false.
+
%% update_types(Expr, Pattern, Sub) -> Sub'
%% Update the type database.
+
+-spec update_types(cerl:cerl(), [type_info()], sub()) -> sub().
+
update_types(Expr, Pat, #sub{t=Tdb0}=Sub) ->
Tdb = update_types_1(Expr, Pat, Tdb0),
Sub#sub{t=Tdb}.
@@ -2778,33 +2824,38 @@ update_types_1(#c_var{name=V,anno=Anno}, Pat, Types) ->
update_types_1(_, _, Types) -> Types.
update_types_2(V, [#c_tuple{}=P], Types) ->
- orddict:store(V, P, Types);
+ Types#{V=>P};
update_types_2(V, [#c_literal{val=Bool}], Types) when is_boolean(Bool) ->
- orddict:store(V, bool, Types);
+ Types#{V=>bool};
+update_types_2(V, [Type], Types) when is_atom(Type) ->
+ Types#{V=>Type};
update_types_2(_, _, Types) -> Types.
%% kill_types(V, Tdb) -> Tdb'
%% Kill any entries that references the variable,
%% either in the key or in the value.
-kill_types(V, [{V,_}|Tdb]) ->
- kill_types(V, Tdb);
-kill_types(V, [{_,#c_tuple{}=Tuple}=Entry|Tdb]) ->
+kill_types(V, Tdb) ->
+ maps:from_list(kill_types2(V,maps:to_list(Tdb))).
+
+kill_types2(V, [{V,_}|Tdb]) ->
+ kill_types2(V, Tdb);
+kill_types2(V, [{_,#c_tuple{}=Tuple}=Entry|Tdb]) ->
case core_lib:is_var_used(V, Tuple) of
- false -> [Entry|kill_types(V, Tdb)];
- true -> kill_types(V, Tdb)
+ false -> [Entry|kill_types2(V, Tdb)];
+ true -> kill_types2(V, Tdb)
end;
-kill_types(V, [{_,Atom}=Entry|Tdb]) when is_atom(Atom) ->
- [Entry|kill_types(V, Tdb)];
-kill_types(_, []) -> [].
+kill_types2(V, [{_,Atom}=Entry|Tdb]) when is_atom(Atom) ->
+ [Entry|kill_types2(V, Tdb)];
+kill_types2(_, []) -> [].
%% copy_type(DestVar, SrcVar, Tdb) -> Tdb'
%% If the SrcVar has a type, assign it to DestVar.
%%
copy_type(V, #c_var{name=Src}, Tdb) ->
- case orddict:find(Src, Tdb) of
- {ok,Type} -> orddict:store(V, Type, Tdb);
- error -> Tdb
+ case Tdb of
+ #{Src:=Type} -> Tdb#{V=>Type};
+ _ -> Tdb
end;
copy_type(_, _, Tdb) -> Tdb.
@@ -3047,7 +3098,7 @@ bsm_ensure_no_partition_after([#c_clause{pats=Ps}|Cs], Pos) ->
bsm_problem(P, bin_partition)
end;
bsm_ensure_no_partition_after([], _) -> ok.
-
+
bsm_could_match_binary(#c_alias{pat=P}) -> bsm_could_match_binary(P);
bsm_could_match_binary(#c_cons{}) -> false;
bsm_could_match_binary(#c_tuple{}) -> false;
@@ -3081,11 +3132,11 @@ add_bin_opt_info(Core, Term) ->
end.
add_warning(Core, Term) ->
- case is_compiler_generated(Core) of
+ case should_suppress_warning(Core) of
true ->
ok;
false ->
- Anno = core_lib:get_anno(Core),
+ Anno = cerl:get_ann(Core),
Line = get_line(Anno),
File = get_file(Anno),
Key = {?MODULE,warnings},
@@ -3106,9 +3157,17 @@ get_file([{file,File}|_]) -> File;
get_file([_|T]) -> get_file(T);
get_file([]) -> "no_file". % should not happen
+should_suppress_warning(Core) ->
+ is_compiler_generated(Core) orelse
+ is_result_unwanted(Core).
+
is_compiler_generated(Core) ->
- Anno = core_lib:get_anno(Core),
- member(compiler_generated, Anno).
+ Ann = cerl:get_ann(Core),
+ member(compiler_generated, Ann).
+
+is_result_unwanted(Core) ->
+ Ann = cerl:get_ann(Core),
+ member(result_not_wanted, Ann).
get_warnings() ->
ordsets:from_list((erase({?MODULE,warnings}))).
@@ -3200,12 +3259,12 @@ format_error(bin_var_used_in_guard) ->
verify_scope(E, #sub{s=Scope}) ->
Free0 = cerl_trees:free_variables(E),
Free = [V || V <- Free0, not is_tuple(V)], %Ignore function names.
- case ordsets:is_subset(Free, gb_sets:to_list(Scope)) of
+ case ordsets:is_subset(Free, cerl_sets:to_list(Scope)) of
true -> true;
false ->
io:format("~p\n", [E]),
io:format("~p\n", [Free]),
- io:format("~p\n", [gb_sets:to_list(Scope)]),
+ io:format("~p\n", [cerl_sets:to_list(Scope)]),
false
end.
-endif.
diff --git a/lib/compiler/src/sys_core_fold_lists.erl b/lib/compiler/src/sys_core_fold_lists.erl
new file mode 100644
index 0000000000..49dc59052a
--- /dev/null
+++ b/lib/compiler/src/sys_core_fold_lists.erl
@@ -0,0 +1,386 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% Purpose : Inline high order lists functions from the lists module.
+
+-module(sys_core_fold_lists).
+
+-export([call/4]).
+
+-include("core_parse.hrl").
+
+%% We inline some very common higher order list operations.
+%% We use the same evaluation order as the library function.
+
+-spec call(cerl:c_call(), atom(), atom(), [cerl:cerl()]) ->
+ 'none' | cerl:cerl().
+
+call(#c_call{anno=Anno}, lists, all, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^all',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_literal{val=false}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=true}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^all',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, any, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^any',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_literal{val=true}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_case{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ clauses = [CC1, CC2, CC3]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=false}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^any',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foreach, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^foreach',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_seq{arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_apply{anno=Anno, op=Loop, args=[Xs]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=ok}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foreach',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, map, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^map',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H], arg=#c_apply{anno=Anno,
+ op=F,
+ args=[X]},
+ body=#c_cons{hd=H,
+ anno=[compiler_generated],
+ tl=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^map',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, flatmap, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^flatmap',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ H = #c_var{name='H'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[H],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_call{anno=[compiler_generated|Anno],
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='++'},
+ args=[H,
+ #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]}]}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^flatmap',1}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, filter, [Arg1,Arg2]) ->
+ Loop = #c_var{name={'lists^filter',1}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ B = #c_var{name='B'},
+ Err1 = #c_tuple{es=[#c_literal{val='case_clause'}, X]},
+ CC1 = #c_clause{pats=[#c_literal{val=true}], guard=#c_literal{val=true},
+ body=#c_cons{anno=[compiler_generated], hd=X, tl=Xs}},
+ CC2 = #c_clause{pats=[#c_literal{val=false}], guard=#c_literal{val=true},
+ body=Xs},
+ CC3 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err1)},
+ Case = #c_case{arg=B, clauses = [CC1, CC2, CC3]},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_let{vars=[B],
+ arg=#c_apply{anno=Anno, op=F, args=[X]},
+ body=#c_let{vars=[Xs],
+ arg=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs]},
+ body=Case}}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=1}]},
+ body=#c_literal{val=[]}},
+ Err2 = #c_tuple{es=[#c_literal{val='function_clause'}, F, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^filter',1}}|Anno], Err2)},
+ Fun = #c_fun{vars=[Xs],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, L], arg=#c_values{es=[Arg1, Arg2]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L]}}};
+call(#c_call{anno=Anno}, lists, foldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, #c_apply{anno=Anno,
+ op=F,
+ args=[X, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, foldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^foldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ A = #c_var{name='A'},
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=#c_apply{anno=Anno,
+ op=F,
+ args=[X, #c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, A]}]}},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+ body=A},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, A, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^foldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, A],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, A, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+ body=#c_apply{anno=Anno, op=Loop, args=[L, A]}}};
+call(#c_call{anno=Anno}, lists, mapfoldl, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldl',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+ body=Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+%%% Tuple passing version
+ Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs},
+ Avar]})
+%%% Multiple-value version
+%%% #c_let{vars=[Xs,A],
+%%% %% The tuple here will be optimised
+%%% %% away later; no worries.
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=#c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]}}
+ )},
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldl',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(#c_call{anno=Anno}, lists, mapfoldr, [Arg1,Arg2,Arg3]) ->
+ Loop = #c_var{name={'lists^mapfoldr',2}},
+ F = #c_var{name='F'},
+ Xs = #c_var{name='Xs'},
+ X = #c_var{name='X'},
+ Avar = #c_var{name='A'},
+ Match =
+ fun (A, P, E) ->
+ C1 = #c_clause{pats=[P], guard=#c_literal{val=true}, body=E},
+ Err = #c_tuple{es=[#c_literal{val='badmatch'}, X]},
+ C2 = #c_clause{pats=[X], guard=#c_literal{val=true},
+ body=match_fail(Anno, Err)},
+ #c_case{arg=A, clauses=[C1, C2]}
+ end,
+ C1 = #c_clause{pats=[#c_cons{hd=X, tl=Xs}], guard=#c_literal{val=true},
+%%% Tuple passing version
+ body=Match(#c_apply{anno=Anno,
+ op=Loop,
+ args=[Xs, Avar]},
+ #c_tuple{es=[Xs, Avar]},
+ Match(#c_apply{anno=Anno, op=F, args=[X, Avar]},
+ #c_tuple{es=[X, Avar]},
+ #c_tuple{anno=[compiler_generated],
+ es=[#c_cons{anno=[compiler_generated],
+ hd=X, tl=Xs}, Avar]}))
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs,A],
+%%% %% The tuple will be optimised away
+%%% arg=#c_apply{op=Loop, args=[Xs, A]},
+%%% body=Match(#c_apply{op=F, args=[X, A]},
+%%% #c_tuple{es=[X, A]},
+%%% #c_values{es=[#c_cons{hd=X, tl=Xs},
+%%% A]})}
+ },
+ C2 = #c_clause{pats=[#c_literal{val=[]}],
+ guard=#c_call{module=#c_literal{val=erlang},
+ name=#c_literal{val=is_function},
+ args=[F, #c_literal{val=2}]},
+%%% Tuple passing version
+ body=#c_tuple{anno=[compiler_generated],
+ es=[#c_literal{val=[]}, Avar]}},
+%%% Multiple-value version
+%%% body=#c_values{es=[#c_literal{val=[]}, A]}},
+ Err = #c_tuple{es=[#c_literal{val='function_clause'}, F, Avar, Xs]},
+ C3 = #c_clause{pats=[Xs], guard=#c_literal{val=true},
+ body=match_fail([{function_name,{'lists^mapfoldr',2}}|Anno], Err)},
+ Fun = #c_fun{vars=[Xs, Avar],
+ body=#c_case{arg=Xs, clauses=[C1, C2, C3]}},
+ L = #c_var{name='L'},
+ #c_let{vars=[F, Avar, L], arg=#c_values{es=[Arg1, Arg2, Arg3]},
+ body=#c_letrec{defs=[{Loop,Fun}],
+%%% Tuple passing version
+ body=#c_apply{anno=Anno,
+ op=Loop,
+ args=[L, Avar]}}};
+%%% Multiple-value version
+%%% body=#c_let{vars=[Xs, A],
+%%% arg=#c_apply{op=Loop,
+%%% args=[L, A]},
+%%% body=#c_tuple{es=[Xs, A]}}}};
+call(_, _, _, _) ->
+ none.
+
+match_fail(Ann, Arg) ->
+ Name = cerl:abstract(match_fail),
+ Args = [Arg],
+ cerl:ann_c_primop(Ann, Name, Args).
diff --git a/lib/compiler/src/sys_core_inline.erl b/lib/compiler/src/sys_core_inline.erl
index 9f93acb666..1e3a735e9b 100644
--- a/lib/compiler/src/sys_core_inline.erl
+++ b/lib/compiler/src/sys_core_inline.erl
@@ -195,10 +195,10 @@ kill_id_anns(Body) ->
A = kill_id_anns_1(A0),
CFun#c_fun{anno=A};
(Expr) ->
- %% Mark everything as compiler generated to suppress
- %% bogus warnings.
- A = compiler_generated(core_lib:get_anno(Expr)),
- core_lib:set_anno(Expr, A)
+ %% Mark everything as compiler generated to
+ %% suppress bogus warnings.
+ A = compiler_generated(cerl:get_ann(Expr)),
+ cerl:set_ann(Expr, A)
end, Body).
kill_id_anns_1([{'id',_}|As]) ->
diff --git a/lib/compiler/src/sys_pre_expand.erl b/lib/compiler/src/sys_pre_expand.erl
index f99307c865..4c4628d580 100644
--- a/lib/compiler/src/sys_pre_expand.erl
+++ b/lib/compiler/src/sys_pre_expand.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,6 @@
-record(expand, {module=[], %Module name
exports=[], %Exports
imports=[], %Imports
- compile=[], %Compile flags
attributes=[], %Attributes
callbacks=[], %Callbacks
optional_callbacks=[] :: [fa()], %Optional callbacks
@@ -46,9 +45,7 @@
vcount=0, %Variable counter
func=[], %Current function
arity=[], %Arity for current function
- fcount=0, %Local fun count
- bitdefault,
- bittypes
+ fcount=0 %Local fun count
}).
%% module(Forms, CompileOptions)
@@ -69,15 +66,12 @@ module(Fs0, Opts0) ->
%% Build initial expand record.
St0 = #expand{exports=PreExp,
- compile=Opts,
- defined=PreExp,
- bitdefault = erl_bits:system_bitdefault(),
- bittypes = erl_bits:system_bittypes()
+ defined=PreExp
},
%% Expand the functions.
{Tfs,St1} = forms(Fs, define_functions(Fs, St0)),
%% Get the correct list of exported functions.
- Exports = case member(export_all, St1#expand.compile) of
+ Exports = case member(export_all, Opts) of
true -> gb_sets:to_list(St1#expand.defined);
false -> St1#expand.exports
end,
@@ -85,7 +79,7 @@ module(Fs0, Opts0) ->
{Ats,St3} = module_attrs(St1#expand{exports = Exports}),
{Mfs,St4} = module_predef_funcs(St3),
{St4#expand.module, St4#expand.exports, Ats ++ Tfs ++ Mfs,
- St4#expand.compile}.
+ Opts}.
compiler_options(Forms) ->
lists:flatten([C || {attribute,_,compile,C} <- Forms]).
@@ -121,7 +115,8 @@ is_fa_list(_) -> false.
module_predef_funcs(St) ->
{Mpf1,St1}=module_predef_func_beh_info(St),
{Mpf2,St2}=module_predef_funcs_mod_info(St1),
- {Mpf1++Mpf2,St2}.
+ Mpf = [erl_parse:new_anno(F) || F <- Mpf1++Mpf2],
+ {Mpf,St2}.
module_predef_func_beh_info(#expand{callbacks=[]}=St) ->
{[], St};
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index 8c1a0c08ac..c9b1a45cfc 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -43,7 +43,7 @@
-export([module/2]).
-import(lists, [member/2,keymember/3,keysort/2,keydelete/3,
- append/1,map/2,flatmap/2,filter/2,foldl/3,foldr/3,mapfoldl/3,
+ append/1,flatmap/2,filter/2,foldl/3,foldr/3,mapfoldl/3,
sort/1,reverse/1,reverse/2]).
-import(v3_life, [vdb_find/2]).
@@ -57,8 +57,7 @@
break, %Break label
recv, %Receive label
is_top_block, %Boolean: top block or not
- functable=gb_trees:empty(), %Gb tree of local functions:
- % {{Name,Arity},Label}
+ functable=#{}, %Map of local functions: {Name,Arity}=>Label
in_catch=false, %Inside a catch or not.
need_frame, %Need a stack frame.
ultimate_failure %Label for ultimate match failure.
@@ -69,10 +68,8 @@
stk=[], %Stack table
res=[]}). %Reserved regs: [{reserved,I,V}]
-module({Mod,Exp,Attr,Forms}, Options) ->
- put(?MODULE, Options),
+module({Mod,Exp,Attr,Forms}, _Options) ->
{Fs,St} = functions(Forms, {atom,Mod}),
- erase(?MODULE),
{ok,{Mod,Exp,Attr,Fs,St#cg.lcount}}.
functions(Forms, AtomMod) ->
@@ -123,24 +120,15 @@ cg_fun(Les, Hvs, Vdb, AtomMod, NameArity, Anno, St0) ->
put_reg(V, Reg)
end, [], Hvs),
stk=[]}, 0, Vdb),
- {B0,_Aft,St} = cg_list(Les, 0, Vdb, Bef,
+ {B,_Aft,St} = cg_list(Les, 0, Vdb, Bef,
St3#cg{bfail=0,
ultimate_failure=UltimateMatchFail,
is_top_block=true}),
- B = fix_bs_match_strings(B0),
{Name,Arity} = NameArity,
Asm = [{label,Fi},line(Anno),{func_info,AtomMod,{atom,Name},Arity},
{label,Fl}|B++[{label,UltimateMatchFail},if_end]],
{Asm,Fl,St}.
-fix_bs_match_strings([{test,bs_match_string,F,[Ctx,BinList]}|Is])
- when is_list(BinList) ->
- I = {test,bs_match_string,F,[Ctx,list_to_bitstring(BinList)]},
- [I|fix_bs_match_strings(Is)];
-fix_bs_match_strings([I|Is]) ->
- [I|fix_bs_match_strings(Is)];
-fix_bs_match_strings([]) -> [].
-
%% cg(Lkexpr, Vdb, StackReg, State) -> {[Ainstr],StackReg,State}.
%% Generate code for a kexpr.
%% Split function into two steps for clarity, not efficiency.
@@ -586,7 +574,7 @@ top_level_block(Keis, Bef, MaxRegs, _St) ->
(return) ->
[{deallocate,FrameSz},return];
(Tuple) when is_tuple(Tuple) ->
- [turn_yregs(tuple_size(Tuple), Tuple, MaxY)];
+ [turn_yregs(Tuple, MaxY)];
(Other) ->
[Other]
end, Keis),
@@ -598,14 +586,49 @@ top_level_block(Keis, Bef, MaxRegs, _St) ->
%% catches work. The code generation algorithm gives a lower register
%% number to the outer catch, which is wrong.
-turn_yregs(0, Tp, _) -> Tp;
-turn_yregs(El, Tp, MaxY) ->
- turn_yregs(El-1,setelement(El,Tp,turn_yreg(element(El,Tp),MaxY)),MaxY).
-
-turn_yreg({yy,YY},MaxY) -> {y,MaxY-YY};
-turn_yreg({list,Ls},MaxY) -> {list, turn_yreg(Ls,MaxY)};
-turn_yreg(Ts,MaxY) when is_list(Ts) -> [turn_yreg(T,MaxY)||T<-Ts];
-turn_yreg(Other,_MaxY) -> Other.
+turn_yregs({call,_,_}=I, _MaxY) -> I;
+turn_yregs({call_ext,_,_}=I, _MaxY) -> I;
+turn_yregs({jump,_}=I, _MaxY) -> I;
+turn_yregs({label,_}=I, _MaxY) -> I;
+turn_yregs({line,_}=I, _MaxY) -> I;
+turn_yregs({test_heap,_,_}=I, _MaxY) -> I;
+turn_yregs({bif,Op,F,A,B}, MaxY) ->
+ {bif,Op,F,turn_yreg(A, MaxY),turn_yreg(B, MaxY)};
+turn_yregs({gc_bif,Op,F,Live,A,B}, MaxY) when is_integer(Live) ->
+ {gc_bif,Op,F,Live,turn_yreg(A, MaxY),turn_yreg(B, MaxY)};
+turn_yregs({get_tuple_element,S,N,D}, MaxY) ->
+ {get_tuple_element,turn_yreg(S, MaxY),N,turn_yreg(D, MaxY)};
+turn_yregs({put_tuple,Arity,D}, MaxY) ->
+ {put_tuple,Arity,turn_yreg(D, MaxY)};
+turn_yregs({select_val,R,F,L}, MaxY) ->
+ {select_val,turn_yreg(R, MaxY),F,L};
+turn_yregs({test,Op,F,L}, MaxY) ->
+ {test,Op,F,turn_yreg(L, MaxY)};
+turn_yregs({test,Op,F,Live,A,B}, MaxY) when is_integer(Live) ->
+ {test,Op,F,Live,turn_yreg(A, MaxY),turn_yreg(B, MaxY)};
+turn_yregs({Op,A}, MaxY) ->
+ {Op,turn_yreg(A, MaxY)};
+turn_yregs({Op,A,B}, MaxY) ->
+ {Op,turn_yreg(A, MaxY),turn_yreg(B, MaxY)};
+turn_yregs({Op,A,B,C}, MaxY) ->
+ {Op,turn_yreg(A, MaxY),turn_yreg(B, MaxY),turn_yreg(C, MaxY)};
+turn_yregs(Tuple, MaxY) ->
+ turn_yregs(tuple_size(Tuple), Tuple, MaxY).
+
+turn_yregs(1, Tp, _) ->
+ Tp;
+turn_yregs(N, Tp, MaxY) ->
+ E = turn_yreg(element(N, Tp), MaxY),
+ turn_yregs(N-1, setelement(N, Tp, E), MaxY).
+
+turn_yreg({yy,YY}, MaxY) ->
+ {y,MaxY-YY};
+turn_yreg({list,Ls},MaxY) ->
+ {list,turn_yreg(Ls, MaxY)};
+turn_yreg([_|_]=Ts, MaxY) ->
+ [turn_yreg(T, MaxY) || T <- Ts];
+turn_yreg(Other, _MaxY) ->
+ Other.
%% select_cg(Sclause, V, TypeFail, ValueFail, StackReg, State) ->
%% {Is,StackReg,State}.
@@ -649,9 +672,7 @@ select_val_cg(Type, R, [Val, {f,Lbl}], Tf, Vf, [{label,Lbl}|Sis]) ->
[{test,select_type_test(Type),{f,Tf},[R]},
{test,is_eq_exact,{f,Vf},[R,{Type,Val}]}|Sis];
select_val_cg(Type, R, Vls0, Tf, Vf, Sis) ->
- Vls1 = map(fun ({f,_Lbl} = F) -> F;
- (Value) -> {Type,Value}
- end, Vls0),
+ Vls1 = [case Value of {f,_Lbl} -> Value; _ -> {Type,Value} end || Value <- Vls0],
[{test,select_type_test(Type),{f,Tf},[R]}, {select_val,R,{f,Vf},{list,Vls1}}|Sis].
select_type_test(integer) -> is_integer;
@@ -684,22 +705,37 @@ select_nil(#l{ke={val_clause,nil,B}}, V, Tf, Vf, Bef, St0) ->
select_binary(#l{ke={val_clause,{binary,{var,V}},B},i=I,vdb=Vdb},
V, Tf, Vf, Bef, St0) ->
Int0 = clear_dead(Bef#sr{reg=Bef#sr.reg}, I, Vdb),
- {Bis,Aft,St1} = match_cg(B, Vf, Int0, St0),
+ {Bis0,Aft,St1} = match_cg(B, Vf, Int0, St0),
CtxReg = fetch_var(V, Int0),
Live = max_reg(Bef#sr.reg),
- {[{test,bs_start_match2,{f,Tf},Live,[CtxReg,V],CtxReg},
- {bs_save2,CtxReg,{V,V}}|Bis],
- Aft,St1};
+ Bis1 = [{test,bs_start_match2,{f,Tf},Live,[CtxReg,V],CtxReg},
+ {bs_save2,CtxReg,{V,V}}|Bis0],
+ Bis = finish_select_binary(Bis1),
+ {Bis,Aft,St1};
select_binary(#l{ke={val_clause,{binary,{var,Ivar}},B},i=I,vdb=Vdb},
V, Tf, Vf, Bef, St0) ->
Regs = put_reg(Ivar, Bef#sr.reg),
Int0 = clear_dead(Bef#sr{reg=Regs}, I, Vdb),
- {Bis,Aft,St1} = match_cg(B, Vf, Int0, St0),
+ {Bis0,Aft,St1} = match_cg(B, Vf, Int0, St0),
CtxReg = fetch_var(Ivar, Int0),
Live = max_reg(Bef#sr.reg),
- {[{test,bs_start_match2,{f,Tf},Live,[fetch_var(V, Bef),Ivar],CtxReg},
- {bs_save2,CtxReg,{Ivar,Ivar}}|Bis],
- Aft,St1}.
+ Bis1 = [{test,bs_start_match2,{f,Tf},Live,[fetch_var(V, Bef),Ivar],CtxReg},
+ {bs_save2,CtxReg,{Ivar,Ivar}}|Bis0],
+ Bis = finish_select_binary(Bis1),
+ {Bis,Aft,St1}.
+
+finish_select_binary([{bs_save2,R,Point}=I,{bs_restore2,R,Point}|Is]) ->
+ [I|finish_select_binary(Is)];
+finish_select_binary([{bs_save2,R,Point}=I,{test,is_eq_exact,_,_}=Test,
+ {bs_restore2,R,Point}|Is]) ->
+ [I,Test|finish_select_binary(Is)];
+finish_select_binary([{test,bs_match_string,F,[Ctx,BinList]}|Is])
+ when is_list(BinList) ->
+ I = {test,bs_match_string,F,[Ctx,list_to_bitstring(BinList)]},
+ [I|finish_select_binary(Is)];
+finish_select_binary([I|Is]) ->
+ [I|finish_select_binary(Is)];
+finish_select_binary([]) -> [].
%% New instructions for selection of binary segments.
@@ -924,7 +960,7 @@ select_extract_tuple(Src, Vs, I, Vdb, Bef, St) ->
select_map(Scs, V, Tf, Vf, Bef, St0) ->
Reg = fetch_var(V, Bef),
{Is,Aft,St1} =
- match_fmf(fun(#l{ke={val_clause,{map,_,Es},B},i=I,vdb=Vdb}, Fail, St1) ->
+ match_fmf(fun(#l{ke={val_clause,{map,exact,_,Es},B},i=I,vdb=Vdb}, Fail, St1) ->
select_map_val(V, Es, B, Fail, I, Vdb, Bef, St1)
end, Vf, St0, Scs),
{[{test,is_map,{f,Tf},[Reg]}|Is],Aft,St1}.
@@ -1041,7 +1077,7 @@ protected_cg(Ts, Rs, _Fail, I, Vdb, Bef, St0) ->
St2#cg{bfail=Pfail}),
%%ok = io:fwrite("cg ~w: ~p~n", [?LINE,{Rs,I,Vdb,Aft}]),
%% Set return values to false.
- Mis = map(fun ({var,V}) -> {move,{atom,false},fetch_var(V, Aft)} end, Rs),
+ Mis = [{move,{atom,false},fetch_var(V,Aft)}||{var,V} <- Rs],
{Tis ++ [{jump,{f,Psucc}},
{label,Pfail}] ++ Mis ++ [{label,Psucc}],
Aft,St3#cg{bfail=St0#cg.bfail}}.
@@ -1224,13 +1260,12 @@ enter_line(_, _, _) ->
local_func_label(Name, Arity, St) ->
local_func_label({Name,Arity}, St).
-local_func_label(Key, #cg{functable=Tab}=St0) ->
- case gb_trees:lookup(Key, Tab) of
- {value,Label} ->
- {Label,St0};
- none ->
+local_func_label(Key, #cg{functable=Map}=St0) ->
+ case Map of
+ #{Key := Label} -> {Label,St0};
+ _ ->
{Label,St} = new_label(St0),
- {Label,St#cg{functable=gb_trees:insert(Key, Label, Tab)}}
+ {Label,St#cg{functable=Map#{Key => Label}}}
end.
%% need_stack_frame(State) -> State'
@@ -1523,9 +1558,11 @@ set_cg([{var,R}], {map,Op,Map,[{map_pair,{var,_}=K,V}]}, Le, Vdb, Bef,
List = [cg_reg_arg(K,Int0),cg_reg_arg(V,Int0)],
Live = max_reg(Bef#sr.reg),
- Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
- Aft = clear_dead(Int1, Le#l.i, Vdb),
- Target = fetch_reg(R, Int1#sr.reg),
+
+ %% The target register can reuse one of the source registers.
+ Aft0 = clear_dead(Int0, Le#l.i, Vdb),
+ Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)},
+ Target = fetch_reg(R, Aft#sr.reg),
I = case Op of
assoc -> put_map_assoc;
@@ -1549,17 +1586,16 @@ set_cg([{var,R}], {map,Op,Map,Es}, Le, Vdb, Bef,
SrcReg = cg_reg_arg(Map,Int0),
Line = line(Le#l.a),
- %% The instruction needs to store keys in term sorted order
- %% All keys has to be unique here
- Pairs = map_pair_strip_and_termsort(Es),
-
%% fetch registers for values to be put into the map
+ Pairs = [{K,V} || {_,K,V} <- Es],
List = flatmap(fun({K,V}) -> [K,cg_reg_arg(V,Int0)] end, Pairs),
Live = max_reg(Bef#sr.reg),
- Int1 = Int0#sr{reg=put_reg(R, Int0#sr.reg)},
- Aft = clear_dead(Int1, Le#l.i, Vdb),
- Target = fetch_reg(R, Int1#sr.reg),
+
+ %% The target register can reuse one of the source registers.
+ Aft0 = clear_dead(Int0, Le#l.i, Vdb),
+ Aft = Aft0#sr{reg=put_reg(R, Aft0#sr.reg)},
+ Target = fetch_reg(R, Aft#sr.reg),
I = case Op of
assoc -> put_map_assoc;
@@ -1578,16 +1614,6 @@ set_cg([{var,R}], Con, Le, Vdb, Bef, St) ->
end,
{Ais,clear_dead(Int, Le#l.i, Vdb),St}.
-map_pair_strip_and_termsort(Es) ->
- %% format in
- %% [{map_pair,K,V}]
- %% where K is for example {integer, 1} and we want to sort on 1.
- Ls = [{K,V}||{_,K,V}<-Es],
- lists:sort(fun ({{_,A},_}, {{_,B},_}) -> erts_internal:cmp_term(A,B) =< 0;
- ({nil,_}, {{_,B},_}) -> [] =< B;
- ({{_,A},_}, {nil,_}) -> A =< []
- end, Ls).
-
%%%
%%% Code generation for constructing binaries.
%%%
@@ -1962,25 +1988,28 @@ clear_dead(Sr, Until, Vdb) ->
stk=clear_dead_stk(Sr#sr.stk, Until, Vdb)}.
clear_dead_reg(Sr, Until, Vdb) ->
- Reg = map(fun ({_I,V} = IV) ->
- case vdb_find(V, Vdb) of
- {V,_,L} when L > Until -> IV;
- _ -> free %Remove anything else
- end;
- ({reserved,_I,_V} = Reserved) -> Reserved;
- (free) -> free
- end, Sr#sr.reg),
+ Reg = [case R of
+ {_I,V} = IV ->
+ case vdb_find(V, Vdb) of
+ {V,_,L} when L > Until -> IV;
+ _ -> free %Remove anything else
+ end;
+ {reserved,_I,_V} = Reserved -> Reserved;
+ free -> free
+ end || R <- Sr#sr.reg],
reserve(Sr#sr.res, Reg, Sr#sr.stk).
clear_dead_stk(Stk, Until, Vdb) ->
- map(fun ({V} = T) ->
- case vdb_find(V, Vdb) of
- {V,_,L} when L > Until -> T;
- _ -> dead %Remove anything else
- end;
- (free) -> free;
- (dead) -> dead
- end, Stk).
+ [case S of
+ {V} = T ->
+ case vdb_find(V, Vdb) of
+ {V,_,L} when L > Until -> T;
+ _ -> dead %Remove anything else
+ end;
+ free -> free;
+ dead -> dead
+ end || S <- Stk].
+
%% sr_merge(Sr1, Sr2) -> Sr.
%% Merge two stack/register states keeping the longest of both stack
@@ -2110,9 +2139,11 @@ put_stack(Val, [free|Stk]) -> [{Val}|Stk];
put_stack(Val, [NotFree|Stk]) -> [NotFree|put_stack(Val, Stk)].
put_stack_carefully(Val, Stk0) ->
- case catch put_stack_carefully1(Val, Stk0) of
- error -> error;
- Stk1 when is_list(Stk1) -> Stk1
+ try
+ put_stack_carefully1(Val, Stk0)
+ catch
+ throw:error ->
+ error
end.
put_stack_carefully1(_, []) -> throw(error);
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index 612660c2d6..ecaecb0ff6 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -66,6 +66,7 @@
%% match arguments are novars
%% case arguments are novars
%% receive timeouts are novars
+%% binaries and maps are novars
%% let/set arguments are expressions
%% fun is not a safe
@@ -77,13 +78,11 @@
splitwith/2,keyfind/3,sort/1,foreach/2,droplast/1,last/1]).
-import(ordsets, [add_element/2,del_element/2,is_element/2,
union/1,union/2,intersection/2,subtract/2]).
--import(cerl, [ann_c_cons/3,ann_c_cons_skel/3,ann_c_tuple/2,c_tuple/1,
+-import(cerl, [ann_c_cons/3,ann_c_tuple/2,c_tuple/1,
ann_c_map/3]).
-include("core_parse.hrl").
--define(REC_OFFSET, 100000000). % Also in erl_expand_records.
-
%% Internal core expressions and help functions.
%% N.B. annotations fields in place as normal Core expressions.
@@ -105,7 +104,9 @@
-record(iset, {anno=#a{},var,arg}).
-record(itry, {anno=#a{},args,vars,body,evars,handler}).
-record(ifilter, {anno=#a{},arg}).
--record(igen, {anno=#a{},acc_pat,acc_guard,skip_pat,tail,tail_pat,arg}).
+-record(igen, {anno=#a{},ceps=[],acc_pat,acc_guard,
+ skip_pat,tail,tail_pat,arg}).
+-record(isimple, {anno=#a{},term :: cerl:cerl()}).
-type iapply() :: #iapply{}.
-type ibinary() :: #ibinary{}.
@@ -124,11 +125,12 @@
-type itry() :: #itry{}.
-type ifilter() :: #ifilter{}.
-type igen() :: #igen{}.
+-type isimple() :: #isimple{}.
-type i() :: iapply() | ibinary() | icall() | icase() | icatch()
| iclause() | ifun() | iletrec() | imatch() | iprimop()
| iprotect() | ireceive1() | ireceive2() | iset() | itry()
- | ifilter() | igen().
+ | ifilter() | igen() | isimple().
-type warning() :: {file:filename(), [{integer(), module(), term()}]}.
@@ -166,8 +168,10 @@ form({attribute,_,file,{File,_Line}}, {Fs,As,Ws,_}, _Opts) ->
form({attribute,_,_,_}=F, {Fs,As,Ws,File}, _Opts) ->
{Fs,[attribute(F)|As],Ws,File}.
-attribute({attribute,Line,Name,Val}) ->
- {#c_literal{val=Name, anno=[Line]}, #c_literal{val=Val, anno=[Line]}}.
+attribute(Attribute) ->
+ Fun = fun(A) -> [erl_anno:location(A)] end,
+ {attribute,Line,Name,Val} = erl_parse:map_anno(Fun, Attribute),
+ {#c_literal{val=Name, anno=Line}, #c_literal{val=Val, anno=Line}}.
%% function_dump(module_info,_,_,_) -> ok;
%% function_dump(Name,Arity,Format,Terms) ->
@@ -287,13 +291,15 @@ gexpr({protect,Line,Arg}, Bools0, St0) ->
{#iprotect{anno=#a{anno=Anno},body=Eps++[E]},[],Bools0,St}
end;
gexpr({op,L,'andalso',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch_guard(L, E1, V, E2, False),
gexpr(E, Bools, St);
gexpr({op,L,'orelse',E1,E2}, Bools, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch_guard(L, E1, V, True, E2),
@@ -382,33 +388,30 @@ gexpr_test(E0, Bools0, St0) ->
Lanno = Anno#a.anno,
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
+ {icall_eq_true(New),
Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
end;
_ ->
- Anno = get_ianno(E1),
Lanno = get_lineno_anno(E1),
+ ACompGen = #a{anno=[compiler_generated]},
case is_simple(E1) of
true ->
Bools = [E1|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[E1,#c_literal{anno=Lanno,val=true}]},Eps0,Bools,St1};
+ {icall_eq_true(E1),Eps0,Bools,St1};
false ->
{New,St2} = new_var(Lanno, St1),
Bools = [New|Bools0],
- {#icall{anno=Anno, %Must have an #a{}
- module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val='=:='},
- args=[New,#c_literal{anno=Lanno,val=true}]},
- Eps0 ++ [#iset{anno=Anno,var=New,arg=E1}],Bools,St2}
+ {icall_eq_true(New),
+ Eps0 ++ [#iset{anno=ACompGen,var=New,arg=E1}],Bools,St2}
end
end.
+icall_eq_true(Arg) ->
+ #icall{anno=#a{anno=[compiler_generated]},
+ module=#c_literal{val=erlang},
+ name=#c_literal{val='=:='},
+ args=[Arg,#c_literal{val=true}]}.
+
force_booleans(Vs0, E, Eps, St) ->
Vs1 = [set_anno(V, []) || V <- Vs0],
Vs = unforce(E, Eps, Vs1),
@@ -418,16 +421,15 @@ force_booleans_1([], E, Eps, St) ->
{E,Eps,St};
force_booleans_1([V|Vs], E0, Eps0, St0) ->
{E1,Eps1,St1} = force_safe(E0, St0),
- Lanno = element(2, V),
- Anno = #a{anno=Lanno},
- Call = #icall{anno=Anno,module=#c_literal{anno=Lanno,val=erlang},
- name=#c_literal{anno=Lanno,val=is_boolean},
+ ACompGen = #a{anno=[compiler_generated]},
+ Call = #icall{anno=ACompGen,module=#c_literal{val=erlang},
+ name=#c_literal{val=is_boolean},
args=[V]},
- {New,St} = new_var(Lanno, St1),
- Iset = #iset{anno=Anno,var=New,arg=Call},
+ {New,St} = new_var([], St1),
+ Iset = #iset{var=New,arg=Call},
Eps = Eps0 ++ Eps1 ++ [Iset],
- E = #icall{anno=Anno,
- module=#c_literal{anno=Lanno,val=erlang},name=#c_literal{anno=Lanno,val='and'},
+ E = #icall{anno=ACompGen,
+ module=#c_literal{val=erlang},name=#c_literal{val='and'},
args=[E1,New]},
force_booleans_1(Vs, E, Eps, St).
@@ -514,43 +516,32 @@ exprs([], St) -> {[],St}.
%% Generate an internal core expression.
expr({var,L,V}, St) -> {#c_var{anno=lineno_anno(L, St),name=V},[],St};
-expr({char,L,C}, St) -> {#c_literal{anno=lineno_anno(L, St),val=C},[],St};
-expr({integer,L,I}, St) -> {#c_literal{anno=lineno_anno(L, St),val=I},[],St};
-expr({float,L,F}, St) -> {#c_literal{anno=lineno_anno(L, St),val=F},[],St};
-expr({atom,L,A}, St) -> {#c_literal{anno=lineno_anno(L, St),val=A},[],St};
-expr({nil,L}, St) -> {#c_literal{anno=lineno_anno(L, St),val=[]},[],St};
-expr({string,L,S}, St) -> {#c_literal{anno=lineno_anno(L, St),val=S},[],St};
+expr({char,L,C}, St) -> {#c_literal{anno=full_anno(L, St),val=C},[],St};
+expr({integer,L,I}, St) -> {#c_literal{anno=full_anno(L, St),val=I},[],St};
+expr({float,L,F}, St) -> {#c_literal{anno=full_anno(L, St),val=F},[],St};
+expr({atom,L,A}, St) -> {#c_literal{anno=full_anno(L, St),val=A},[],St};
+expr({nil,L}, St) -> {#c_literal{anno=full_anno(L, St),val=[]},[],St};
+expr({string,L,S}, St) -> {#c_literal{anno=full_anno(L, St),val=S},[],St};
expr({cons,L,H0,T0}, St0) ->
{H1,Hps,St1} = safe(H0, St0),
{T1,Tps,St2} = safe(T0, St1),
- A = lineno_anno(L, St2),
+ A = full_anno(L, St2),
{annotate_cons(A, H1, T1, St2),Hps ++ Tps,St2};
expr({lc,L,E,Qs0}, St0) ->
{Qs1,St1} = preprocess_quals(L, Qs0, St0),
lc_tq(L, E, Qs1, #c_literal{anno=lineno_anno(L, St1),val=[]}, St1);
expr({bc,L,E,Qs}, St) ->
- bc_tq(L, E, Qs, {nil,L}, St);
+ bc_tq(L, E, Qs, St);
expr({tuple,L,Es0}, St0) ->
{Es1,Eps,St1} = safe_list(Es0, St0),
A = record_anno(L, St1),
{annotate_tuple(A, Es1, St1),Eps,St1};
expr({map,L,Es0}, St0) ->
- map_build_pair_chain(#c_literal{val=#{}},Es0,lineno_anno(L,St0),St0);
-expr({map,L,M0,Es0}, St0) ->
- try expr_map(M0,Es0,lineno_anno(L, St0),St0) of
- {_,_,_}=Res -> Res
- catch
- throw:{bad_map,Warning} ->
- St = add_warning(L, Warning, St0),
- LineAnno = lineno_anno(L, St),
- As = [#c_literal{anno=LineAnno,val=badarg}],
- {#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
- module=#c_literal{anno=LineAnno,val=erlang},
- name=#c_literal{anno=LineAnno,val=error},
- args=As},[],St}
- end;
+ map_build_pairs(#c_literal{val=#{}}, Es0, full_anno(L, St0), St0);
+expr({map,L,M,Es}, St) ->
+ expr_map(M, Es, L, St);
expr({bin,L,Es0}, St0) ->
- try expr_bin(Es0, lineno_anno(L, St0), St0) of
+ try expr_bin(Es0, full_anno(L, St0), St0) of
{_,_,_}=Res -> Res
catch
throw:bad_binary ->
@@ -640,11 +631,11 @@ expr({'catch',L,E0}, St0) ->
Lanno = lineno_anno(L, St1),
{#icatch{anno=#a{anno=Lanno},body=Eps ++ [E1]},[],St1};
expr({'fun',L,{function,F,A},{_,_,_}=Id}, St) ->
- Lanno = lineno_anno(L, St),
+ Lanno = full_anno(L, St),
{#c_var{anno=Lanno++[{id,Id}],name={F,A}},[],St};
expr({'fun',L,{function,M,F,A}}, St0) ->
{As,Aps,St1} = safe_list([M,F,A], St0),
- Lanno = lineno_anno(L, St1),
+ Lanno = full_anno(L, St1),
{#icall{anno=#a{anno=Lanno},
module=#c_literal{val=erlang},
name=#c_literal{val=make_fun},
@@ -655,13 +646,9 @@ expr({named_fun,L,'_',Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, unnamed);
expr({named_fun,L,Name,Cs,Id}, St) ->
fun_tq(Id, Cs, L, St, {named,Name});
-expr({call,L,{remote,_,M,F},As0}, #core{wanted=Wanted}=St0) ->
+expr({call,L,{remote,_,M,F},As0}, St0) ->
{[M1,F1|As1],Aps,St1} = safe_list([M,F|As0], St0),
- Lanno = lineno_anno(L, St1),
- Anno = case Wanted of
- false -> [result_not_wanted|Lanno];
- true -> Lanno
- end,
+ Anno = full_anno(L, St1),
{#icall{anno=#a{anno=Anno},module=M1,name=F1,args=As1},Aps,St1};
expr({call,Lc,{atom,Lf,F},As0}, St0) ->
{As1,Aps,St1} = safe_list(As0, St0),
@@ -710,26 +697,28 @@ expr({op,_,'++',{lc,Llc,E,Qs0},More}, St0) ->
{Y,Yps,St} = lc_tq(Llc, E, Qs, Mc, St2),
{Y,Mps++Yps,St};
expr({op,L,'andalso',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
False = {atom,L,false},
E = make_bool_switch(L, E1, V, E2, False, St0),
expr(E, St);
expr({op,L,'orelse',E1,E2}, St0) ->
- {#c_var{name=V0},St} = new_var(L, St0),
+ Anno = lineno_anno(L, St0),
+ {#c_var{name=V0},St} = new_var(Anno, St0),
V = {var,L,V0},
True = {atom,L,true},
E = make_bool_switch(L, E1, V, True, E2, St0),
expr(E, St);
expr({op,L,Op,A0}, St0) ->
{A1,Aps,St1} = safe(A0, St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=[A1]},Aps,St1};
expr({op,L,Op,L0,R0}, St0) ->
{As,Aps,St1} = safe_list([L0,R0], St0),
- LineAnno = lineno_anno(L, St1),
+ LineAnno = full_anno(L, St1),
{#icall{anno=#a{anno=LineAnno}, %Must have an #a{}
module=#c_literal{anno=LineAnno,val=erlang},
name=#c_literal{anno=LineAnno,val=Op},args=As},Aps,St1}.
@@ -740,7 +729,7 @@ make_bool_switch(L, E, V, T, F, #core{}) ->
make_bool_switch_body(L, E, V, T, F).
make_bool_switch_body(L, E, V, T, F) ->
- NegL = neg_line(abs_line(L)),
+ NegL = no_compiler_warning(L),
Error = {tuple,NegL,[{atom,NegL,badarg},V]},
{'case',NegL,E,
[{clause,NegL,[{atom,NegL,true}],[],[T]},
@@ -751,93 +740,67 @@ make_bool_switch_body(L, E, V, T, F) ->
make_bool_switch_guard(_, E, _, {atom,_,true}, {atom,_,false}) -> E;
make_bool_switch_guard(L, E, V, T, F) ->
- NegL = neg_line(abs_line(L)),
+ NegL = no_compiler_warning(L),
{'case',NegL,E,
[{clause,NegL,[{atom,NegL,true}],[],[T]},
{clause,NegL,[{atom,NegL,false}],[],[F]},
{clause,NegL,[V],[],[V]}
]}.
-expr_map(M0,Es0,A,St0) ->
- {M1,Mps,St1} = safe(M0, St0),
+expr_map(M0, Es0, L, St0) ->
+ {M1,Eps0,St1} = safe(M0, St0),
+ Badmap = badmap_term(M1, St1),
+ A = lineno_anno(L, St1),
+ Fc = fail_clause([], [{eval_failure,badmap}|A], Badmap),
case is_valid_map_src(M1) of
true ->
- case {M1,Es0} of
- {#c_var{}, []} ->
- %% transform M#{} to is_map(M)
- {Vpat,St2} = new_var(St1),
- {Fpat,St3} = new_var(St2),
- Cs = [#iclause{
- anno=A,
- pats=[Vpat],
- guard=[#icall{anno=#a{anno=A},
+ {M2,Eps1,St2} = map_build_pairs(M1, Es0, full_anno(L, St1), St1),
+ M3 = case Es0 of
+ [] -> M1;
+ [_|_] -> M2
+ end,
+ Cs = [#iclause{
+ anno=#a{anno=[compiler_generated|A]},
+ pats=[],
+ guard=[#icall{anno=#a{anno=A},
module=#c_literal{anno=A,val=erlang},
name=#c_literal{anno=A,val=is_map},
- args=[Vpat]}],
- body=[Vpat]}],
- Fc = fail_clause([Fpat], A, #c_literal{val=badarg}),
- {#icase{anno=#a{anno=A},args=[M1],clauses=Cs,fc=Fc},Mps,St3};
- {_,_} ->
- {M2,Eps,St2} = map_build_pair_chain(M1,Es0,A,St1),
- {M2,Mps++Eps,St2}
- end;
- false -> throw({bad_map,bad_map})
- end.
-
-%% Group continuous literal blocks and single variables, i.e.
-%% M0#{ a := 1, b := V1, K1 := V2, K2 := 42}
-%% becomes equivalent to
-%% M1 = M0#{ a := 1, b := V1 },
-%% M2 = M1#{ K1 := V1 },
-%% M3 = M2#{ K2 := 42 }
-
-map_build_pair_chain(M,Es,A,St) ->
- %% hack, remove iset if only literal
- case map_build_pair_chain(M,Es,A,St,[]) of
- {_,[#iset{arg=#c_literal{}=Val}],St1} -> {Val,[],St1};
- Normal -> Normal
- end.
-
-map_build_pair_chain(M0,[],_,St,Mps) ->
- {M0,Mps,St};
-map_build_pair_chain(M0,Es0,A,St0,Mps) ->
- % group continuous literal blocks
- % Anno = #a{anno=[compiler_generated]},
- % order is important, we need to reverse the literals
- case map_pair_block(Es0,[],[],St0) of
- {{CesL,EspL},{[],[]},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=ann_c_map(A,M0,reverse(CesL))}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps++EspL++Pre);
- {{[],[]},{CesV,EspV},Es1,St1} ->
- {MVar,St2} = new_var(St1),
- Pre = [#iset{var=MVar, arg=#c_map{arg=M0,es=CesV, anno=A}}],
- map_build_pair_chain(MVar,Es1,A,St2,Mps ++ EspV++Pre);
- {{CesL,EspL},{CesV,EspV},Es1,St1} ->
- {MVarL,St2} = new_var(St1),
- {MVarV,St3} = new_var(St2),
- Pre = [#iset{var=MVarL, arg=ann_c_map(A,M0,reverse(CesL))},
- #iset{var=MVarV, arg=#c_map{arg=MVarL,es=CesV,anno=A}}],
- map_build_pair_chain(MVarV,Es1,A,St3,Mps++EspL++EspV++Pre)
+ args=[M1]}],
+ body=[M3]}],
+ Eps = Eps0 ++ Eps1,
+ {#icase{anno=#a{anno=A},args=[],clauses=Cs,fc=Fc},Eps,St2};
+ false ->
+ %% Not a map source. The update will always fail.
+ St2 = add_warning(L, badmap, St1),
+ #iclause{body=[Fail]} = Fc,
+ {Fail,Eps0,St2}
end.
-map_pair_block([{Op,L,K0,V0}|Es],Ces,Esp,St0) ->
- {K,Ep0,St1} = safe(K0, St0),
- {V,Ep1,St2} = safe(V0, St1),
- A = lineno_anno(L, St2),
- Pair0 = map_op_to_c_map_pair(Op),
- Pair1 = Pair0#c_map_pair{anno=A,key=K,val=V},
- case cerl:is_literal(K) of
- true ->
- map_pair_block(Es,[Pair1|Ces],Ep0 ++ Ep1 ++ Esp,St2);
- false ->
- {{Ces,Esp},{[Pair1],Ep0++Ep1},Es,St2}
- end;
-map_pair_block([],Ces,Esp,St) ->
- {{Ces,Esp},{[],[]},[],St}.
+badmap_term(_Map, #core{in_guard=true}) ->
+ %% The code generator cannot handle complex error reasons
+ %% in guards. But the exact error reason does not matter anyway
+ %% since it is not user-visible.
+ #c_literal{val=badmap};
+badmap_term(Map, #core{in_guard=false}) ->
+ #c_tuple{es=[#c_literal{val=badmap},Map]}.
+
+map_build_pairs(Map, Es0, Ann, St0) ->
+ {Es,Pre,St1} = map_build_pairs_1(Es0, St0),
+ {ann_c_map(Ann, Map, Es),Pre,St1}.
+
+map_build_pairs_1([{Op0,L,K0,V0}|Es], St0) ->
+ {K,Pre0,St1} = safe(K0, St0),
+ {V,Pre1,St2} = safe(V0, St1),
+ {Pairs,Pre2,St3} = map_build_pairs_1(Es, St2),
+ As = lineno_anno(L, St3),
+ Op = map_op(Op0),
+ Pair = cerl:ann_c_map_pair(As, Op, K, V),
+ {[Pair|Pairs],Pre0++Pre1++Pre2,St3};
+map_build_pairs_1([], St) ->
+ {[],[],St}.
-map_op_to_c_map_pair(map_field_assoc) -> #c_map_pair{op=#c_literal{val=assoc}};
-map_op_to_c_map_pair(map_field_exact) -> #c_map_pair{op=#c_literal{val=exact}}.
+map_op(map_field_assoc) -> #c_literal{val=assoc};
+map_op(map_field_exact) -> #c_literal{val=exact}.
is_valid_map_src(#c_literal{val = M}) when is_map(M) -> true;
is_valid_map_src(#c_var{}) -> true;
@@ -906,10 +869,10 @@ constant_bin_1(Es) ->
({float,_,F}, B) -> {value,F,B};
({atom,_,undefined}, B) -> {value,undefined,B}
end,
- case catch eval_bits:expr_grp(Es, EmptyBindings, EvalFun) of
+ try eval_bits:expr_grp(Es, EmptyBindings, EvalFun) of
{value,Bin,EmptyBindings} ->
- Bin;
- _ ->
+ Bin
+ catch error:_ ->
error
end.
@@ -956,7 +919,7 @@ verify_suitable_fields([]) -> ok.
%% (We don't need an exact result for this purpose.)
count_bits(Int) ->
- count_bits_1(abs_line(Int), 64).
+ count_bits_1(abs(Int), 64).
count_bits_1(0, Bits) -> Bits;
count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
@@ -1001,7 +964,7 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
{Cs1,Ceps,St1} = clauses(Cs0, St0),
{Args,St2} = new_vars(Arity, St1),
{Ps,St3} = new_vars(Arity, St2), %Need new variables here
- Anno = lineno_anno(L, St3),
+ Anno = full_anno(L, St3),
Fc = function_clause(Ps, Anno, {Name,Arity}),
Fun = #ifun{anno=#a{anno=Anno},
id=[{id,Id}], %We KNOW!
@@ -1011,7 +974,8 @@ fun_tq({_,_,Name}=Id, Cs0, L, St0, NameInfo) ->
%% lc_tq(Line, Exp, [Qualifier], Mc, State) -> {LetRec,[PreExp],State}.
%% This TQ from Simon PJ pp 127-138.
-lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+lc_tq(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lc", St0),
@@ -1046,7 +1010,7 @@ lc_tq(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=[Var],clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,1},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg]}]},
- [],St4};
+ Ceps,St4};
lc_tq(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun lc_tq/5);
lc_tq(Line, E0, [], Mc0, St0) ->
@@ -1060,7 +1024,7 @@ lc_tq(Line, E0, [], Mc0, St0) ->
%% This TQ from Gustafsson ERLANG'05.
%% More could be transformed before calling bc_tq.
-bc_tq(Line, Exp, Qs0, _, St0) ->
+bc_tq(Line, Exp, Qs0, St0) ->
{BinVar,St1} = new_var(St0),
{Sz,SzPre,St2} = bc_initial_size(Exp, Qs0, St1),
{Qs,St3} = preprocess_quals(Line, Qs0, St2),
@@ -1071,7 +1035,8 @@ bc_tq(Line, Exp, Qs0, _, St0) ->
args=[Sz]}}] ++ BcPre,
{E,Pre,St}.
-bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
+bc_tq1(Line, E, [#igen{anno=GAnno,ceps=Ceps,
+ acc_pat=AccPat,acc_guard=AccGuard,
skip_pat=SkipPat,tail=Tail,tail_pat=TailPat,
arg={Pre,Arg}}|Qs], Mc, St0) ->
{Name,St1} = new_fun_name("lbc", St0),
@@ -1109,7 +1074,7 @@ bc_tq1(Line, E, [#igen{anno=GAnno,acc_pat=AccPat,acc_guard=AccGuard,
Fun = #ifun{anno=LAnno,id=[],vars=Vars,clauses=Cs,fc=Fc},
{#iletrec{anno=LAnno#a{anno=[list_comprehension|LA]},defs=[{{Name,2},Fun}],
body=Pre ++ [#iapply{anno=LAnno,op=F,args=[Arg,Mc]}]},
- [],St4};
+ Ceps,St4};
bc_tq1(Line, E, [#ifilter{}=Filter|Qs], Mc, St) ->
filter_tq(Line, E, Filter, Mc, St, Qs, fun bc_tq1/5);
bc_tq1(_, {bin,Bl,Elements}, [], AccVar, St0) ->
@@ -1245,8 +1210,9 @@ generator(Line, {generate,Lg,P0,E}, Gs, St0) ->
ann_c_cons(LA, Skip, Tail)}
end,
{Ce,Pre,St4} = safe(E, St3),
- Gen = #igen{anno=#a{anno=GA},acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
- tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Ceps++Pre,Ce}},
+ Gen = #igen{anno=#a{anno=GA},ceps=Ceps,
+ acc_pat=AccPat,acc_guard=Cg,skip_pat=SkipPat,
+ tail=Tail,tail_pat=#c_literal{anno=LA,val=[]},arg={Pre,Ce}},
{Gen,St4};
generator(Line, {b_generate,Lg,P,E}, Gs, St0) ->
LA = lineno_anno(Line, St0),
@@ -1515,6 +1481,7 @@ force_novars(#iapply{}=App, St) -> {App,[],St};
force_novars(#icall{}=Call, St) -> {Call,[],St};
force_novars(#ifun{}=Fun, St) -> {Fun,[],St}; %These are novars too
force_novars(#ibinary{}=Bin, St) -> {Bin,[],St};
+force_novars(#c_map{}=Bin, St) -> {Bin,[],St};
force_novars(Ce, St) ->
force_safe(Ce, St).
@@ -1634,7 +1601,7 @@ pattern({tuple,L,Ps}, St) ->
{annotate_tuple(record_anno(L, St), Ps1, St),Eps,St1};
pattern({map,L,Pairs}, St0) ->
{Ps,Eps,St1} = pattern_map_pairs(Pairs, St0),
- {#c_map{anno=lineno_anno(L, St1), es=Ps},Eps,St1};
+ {#c_map{anno=lineno_anno(L, St1),es=Ps,is_pat=true},Eps,St1};
pattern({bin,L,Ps}, St) ->
%% We don't create a #ibinary record here, since there is
%% no need to hold any used/new annotations in a pattern.
@@ -1656,49 +1623,30 @@ pattern_map_pairs(Ps, St) ->
{CMapPair,EpsP,Sti1} = pattern_map_pair(P,Sti0),
{CMapPair, {EpsM++EpsP,Sti1}}
end, {[],St}, Ps),
- {pat_alias_map_pairs(CMapPairs,[]),Eps,St1}.
-
-%% remove cluddering annotations
-pattern_map_clean_key(#c_literal{val=V}) -> {literal,V};
-pattern_map_clean_key(#c_var{name=V}) -> {var,V}.
-
-pat_alias_map_pairs(Ps1,Ps2) ->
- Ps = Ps1 ++ Ps2,
- F = fun(#c_map_pair{key=Ck,val=Cv},Dbi) ->
- K = pattern_map_clean_key(Ck),
- case dict:find(K,Dbi) of
- {ok,Cvs} -> dict:store(K,[Cv|Cvs],Dbi);
- _ -> dict:store(K,[Cv],Dbi)
- end
- end,
- Kdb = lists:foldl(F,dict:new(),Ps),
- pat_alias_map_pairs(Ps,Kdb,sets:new()).
-
-pat_alias_map_pairs([],_,_) -> [];
-pat_alias_map_pairs([#c_map_pair{key=Ck}=Pair|Pairs],Kdb,Set) ->
- K = pattern_map_clean_key(Ck),
- case sets:is_element(K,Set) of
- true ->
- pat_alias_map_pairs(Pairs,Kdb,Set);
- false ->
- Cvs = dict:fetch(K,Kdb),
- Cv = pat_alias_map_pair_values(Cvs),
- Set1 = sets:add_element(K,Set),
- [Pair#c_map_pair{val=Cv}|pat_alias_map_pairs(Pairs,Kdb,Set1)]
- end.
-
-pat_alias_map_pair_values([Cv]) -> Cv;
-pat_alias_map_pair_values([Cv1,Cv2|Cvs]) ->
- pat_alias_map_pair_values([pat_alias(Cv1,Cv2)|Cvs]).
+ {pat_alias_map_pairs(CMapPairs),Eps,St1}.
pattern_map_pair({map_field_exact,L,K,V}, St0) ->
- {Ck,EpsK,St1} = safe_pattern_expr(K,St0),
+ {Ck,EpsK,St1} = safe_pattern_expr(K, St0),
{Cv,EpsV,St2} = pattern(V, St1),
- {#c_map_pair{anno=lineno_anno(L,St2),
+ {#c_map_pair{anno=lineno_anno(L, St2),
op=#c_literal{val=exact},
key=Ck,
val=Cv},EpsK++EpsV,St2}.
+pat_alias_map_pairs(Ps) ->
+ D = foldl(fun(#c_map_pair{key=K0}=Pair, D0) ->
+ K = cerl:set_ann(K0, []),
+ dict:append(K, Pair, D0)
+ end, dict:new(), Ps),
+ pat_alias_map_pairs_1(dict:to_list(D)).
+
+pat_alias_map_pairs_1([{_,[#c_map_pair{val=V0}=Pair|Vs]}|T]) ->
+ V = foldl(fun(#c_map_pair{val=V}, Pat) ->
+ pat_alias(V, Pat)
+ end, V0, Vs),
+ [Pair#c_map_pair{val=V}|pat_alias_map_pairs_1(T)];
+pat_alias_map_pairs_1([]) -> [].
+
%% pat_bin([BinElement], State) -> [BinSeg].
pat_bin(Ps, St) -> [pat_segment(P, St) || P <- Ps].
@@ -1714,48 +1662,55 @@ pat_segment({bin_element,_,Val,Size,[Type,{unit,Unit}|Flags]}, St) ->
%% pat_alias(CorePat, CorePat) -> AliasPat.
%% Normalise aliases. Trap bad aliases by throwing 'nomatch'.
-pat_alias(#c_var{name=V1}, P2) -> #c_alias{var=#c_var{name=V1},pat=P2};
-pat_alias(P1, #c_var{name=V2}) -> #c_alias{var=#c_var{name=V2},pat=P1};
-
-%% alias cons
-pat_alias(#c_cons{}=Cons, #c_literal{anno=A,val=[H|T]}=S) ->
- pat_alias(Cons, ann_c_cons_skel(A, #c_literal{anno=A,val=H},
- S#c_literal{val=T}));
-pat_alias(#c_literal{anno=A,val=[H|T]}=S, #c_cons{}=Cons) ->
- pat_alias(ann_c_cons_skel(A, #c_literal{anno=A,val=H},
- S#c_literal{val=T}), Cons);
-pat_alias(#c_cons{anno=Anno,hd=H1,tl=T1}, #c_cons{hd=H2,tl=T2}) ->
- ann_c_cons(Anno, pat_alias(H1, H2), pat_alias(T1, T2));
-
-%% alias tuples
-pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_literal{val=T}) when is_tuple(T) ->
- Es2 = [#c_literal{val=E} || E <- tuple_to_list(T)],
- ann_c_tuple(Anno, pat_alias_list(Es1, Es2));
-pat_alias(#c_literal{anno=Anno,val=T}, #c_tuple{es=Es2}) when is_tuple(T) ->
- Es1 = [#c_literal{val=E} || E <- tuple_to_list(T)],
- ann_c_tuple(Anno, pat_alias_list(Es1, Es2));
-pat_alias(#c_tuple{anno=Anno,es=Es1}, #c_tuple{es=Es2}) ->
- ann_c_tuple(Anno, pat_alias_list(Es1, Es2));
-
-%% alias maps
-%% There are no literals in maps patterns (patterns are always abstract)
-pat_alias(#c_map{es=Es1}=M,#c_map{es=Es2}) ->
- M#c_map{es=pat_alias_map_pairs(Es1,Es2)};
-
-pat_alias(#c_alias{var=V1,pat=P1},
- #c_alias{var=V2,pat=P2}) ->
- if V1 =:= V2 -> #c_alias{var=V1,pat=pat_alias(P1, P2)};
- true -> #c_alias{var=V1,pat=#c_alias{var=V2,pat=pat_alias(P1, P2)}}
+pat_alias(#c_var{name=V1}=P, #c_var{name=V1}) -> P;
+pat_alias(#c_var{name=V1}=Var,
+ #c_alias{var=#c_var{name=V2},pat=Pat}=Alias) ->
+ if
+ V1 =:= V2 ->
+ Alias;
+ true ->
+ Alias#c_alias{pat=pat_alias(Var, Pat)}
end;
-pat_alias(#c_alias{var=V1,pat=P1}, P2) ->
- #c_alias{var=V1,pat=pat_alias(P1, P2)};
-pat_alias(P1, #c_alias{var=V2,pat=P2}) ->
- #c_alias{var=V2,pat=pat_alias(P1, P2)};
+pat_alias(#c_var{}=P1, P2) -> #c_alias{var=P1,pat=P2};
+
+pat_alias(#c_alias{var=#c_var{name=V1}}=Alias, #c_var{name=V1}) ->
+ Alias;
+pat_alias(#c_alias{var=#c_var{name=V1}=Var1,pat=P1},
+ #c_alias{var=#c_var{name=V2}=Var2,pat=P2}) ->
+ Pat = pat_alias(P1, P2),
+ if
+ V1 =:= V2 ->
+ #c_alias{var=Var1,pat=Pat};
+ true ->
+ pat_alias(Var1, pat_alias(Var2, Pat))
+ end;
+pat_alias(#c_alias{var=#c_var{}=Var,pat=P1}, P2) ->
+ #c_alias{var=Var,pat=pat_alias(P1, P2)};
+
+pat_alias(#c_map{es=Es1}=M, #c_map{es=Es2}) ->
+ M#c_map{es=pat_alias_map_pairs(Es1 ++ Es2)};
+
+pat_alias(P1, #c_var{}=Var) ->
+ #c_alias{var=Var,pat=P1};
+pat_alias(P1, #c_alias{pat=P2}=Alias) ->
+ Alias#c_alias{pat=pat_alias(P1, P2)};
+
pat_alias(P1, P2) ->
- case {set_anno(P1, []),set_anno(P2, [])} of
- {P,P} -> P;
+ %% Aliases between binaries are not allowed, so the only
+ %% legal patterns that remain are data patterns.
+ case cerl:is_data(P1) andalso cerl:is_data(P2) of
+ false -> throw(nomatch);
+ true -> ok
+ end,
+ Type = cerl:data_type(P1),
+ case cerl:data_type(P2) of
+ Type -> ok;
_ -> throw(nomatch)
- end.
+ end,
+ Es1 = cerl:data_es(P1),
+ Es2 = cerl:data_es(P2),
+ Es = pat_alias_list(Es1, Es2),
+ cerl:make_data(Type, Es).
%% pat_alias_list([A1], [A2]) -> [A].
@@ -1794,7 +1749,7 @@ new_var_name(#core{vcount=C}=St) ->
new_var(St) ->
new_var([], St).
-new_var(Anno, St0) ->
+new_var(Anno, St0) when is_list(Anno) ->
{New,St} = new_var_name(St0),
{#c_var{anno=Anno,name=New},St}.
@@ -1852,7 +1807,7 @@ uclauses(Lcs, Ks, St0) ->
uclause(Cl0, Ks, St0) ->
{Cl1,_Pvs,Used,New,St1} = uclause(Cl0, Ks, Ks, St0),
- A0 = get_ianno(Cl1),
+ A0 = get_anno(Cl1),
A = A0#a{us=Used,ns=New},
{Cl1#iclause{anno=A},St1}.
@@ -2021,11 +1976,11 @@ uexpr(#ibinary{anno=A,segments=Ss}, _, St) ->
uexpr(#c_literal{}=Lit, _, St) ->
Anno = get_anno(Lit),
{set_anno(Lit, #a{us=[],anno=Anno}),St};
-uexpr(Lit, _, St) ->
- true = is_simple(Lit), %Sanity check!
- Vs = lit_vars(Lit),
- Anno = get_anno(Lit),
- {set_anno(Lit, #a{us=Vs,anno=Anno}),St}.
+uexpr(Simple, _, St) ->
+ true = is_simple(Simple), %Sanity check!
+ Vs = lit_vars(Simple),
+ Anno = get_anno(Simple),
+ {#isimple{anno=#a{us=Vs,anno=Anno},term=Simple},St}.
uexpr_list(Les0, Ks, St0) ->
mapfoldl(fun (Le, St) -> uexpr(Le, Ks, St) end, St0, Les0).
@@ -2039,7 +1994,7 @@ ufun_clauses(Lcs, Ks, St0) ->
ufun_clause(Cl0, Ks, St0) ->
{Cl1,Pvs,Used,_,St1} = uclause(Cl0, [], Ks, St0),
- A0 = get_ianno(Cl1),
+ A0 = get_anno(Cl1),
A = A0#a{us=subtract(intersection(Used, Ks), Pvs),ns=[]},
{Cl1#iclause{anno=A},St1}.
@@ -2202,7 +2157,8 @@ cguard(Gs, St0) ->
cexprs([#iset{var=#c_var{name=Name}=Var}=Iset], As, St) ->
%% Make return value explicit, and make Var true top level.
- cexprs([Iset,Var#c_var{anno=#a{us=[Name]}}], As, St);
+ Isimple = #isimple{anno=#a{us=[Name]},term=Var},
+ cexprs([Iset,Isimple], As, St);
cexprs([Le], As, St0) ->
{Ce,Es,Us,St1} = cexpr(Le, As, St0),
Exp = make_vars(As), %The export variables
@@ -2317,12 +2273,9 @@ cexpr(#c_literal{}=Lit, _As, St) ->
Anno = get_anno(Lit),
Vs = Anno#a.us,
{set_anno(Lit, Anno#a.anno),[],Vs,St};
-cexpr(Lit, _As, St) ->
- true = is_simple(Lit), %Sanity check!
- Anno = get_anno(Lit),
- Vs = Anno#a.us,
- %%Vs = lit_vars(Lit),
- {set_anno(Lit, Anno#a.anno),[],Vs,St}.
+cexpr(#isimple{anno=#a{us=Vs},term=Simple}, _As, St) ->
+ true = is_simple(Simple), %Sanity check!
+ {Simple,[],Vs,St}.
cfun(#ifun{anno=A,id=Id,vars=Args,clauses=Lcs,fc=Lfc}, _As, St0) ->
{Ccs,St1} = cclauses(Lcs, [], St0), %NEVER export!
@@ -2345,11 +2298,6 @@ lit_vars(#c_map_pair{key=K,val=V}, Vs) -> lit_vars(K, lit_vars(V, Vs));
lit_vars(#c_var{name=V}, Vs) -> add_element(V, Vs);
lit_vars(_, Vs) -> Vs. %These are atomic
-% lit_bin_vars(Segs, Vs) ->
-% foldl(fun (#c_bitstr{val=V,size=S}, Vs0) ->
-% lit_vars(V, lit_vars(S, Vs0))
-% end, Vs, Segs).
-
lit_list_vars(Ls) -> lit_list_vars(Ls, []).
lit_list_vars(Ls, Vs) ->
@@ -2363,37 +2311,26 @@ bitstr_vars(Segs, Vs) ->
lit_vars(V, lit_vars(S, Vs0))
end, Vs, Segs).
-record_anno(L, St) when L >= ?REC_OFFSET ->
- case member(dialyzer, St#core.opts) of
- true ->
- [record | lineno_anno(L - ?REC_OFFSET, St)];
- false ->
- lineno_anno(L, St)
- end;
-record_anno(L, St) when L < -?REC_OFFSET ->
- case member(dialyzer, St#core.opts) of
+record_anno(L, St) ->
+ case
+ erl_anno:record(L) andalso member(dialyzer, St#core.opts)
+ of
true ->
- [record | lineno_anno(L + ?REC_OFFSET, St)];
+ [record | lineno_anno(L, St)];
false ->
- lineno_anno(L, St)
- end;
-record_anno(L, St) ->
+ full_anno(L, St)
+ end.
+
+full_anno(L, #core{wanted=false}=St) ->
+ [result_not_wanted|lineno_anno(L, St)];
+full_anno(L, #core{wanted=true}=St) ->
lineno_anno(L, St).
lineno_anno(L, St) ->
- {line, Line} = erl_parse:get_attribute(L, line),
- if
- Line < 0 ->
- [-Line] ++ St#core.file ++ [compiler_generated];
- true ->
- [Line] ++ St#core.file
- end.
-
-get_ianno(Ce) ->
- case get_anno(Ce) of
- #a{}=A -> A;
- A when is_list(A) -> #a{anno=A}
- end.
+ Line = erl_anno:line(L),
+ Generated = erl_anno:generated(L),
+ CompilerGenerated = [compiler_generated || Generated],
+ [Line] ++ St#core.file ++ CompilerGenerated.
get_lineno_anno(Ce) ->
case get_anno(Ce) of
@@ -2401,15 +2338,8 @@ get_lineno_anno(Ce) ->
A when is_list(A) -> A
end.
-location(L) ->
- {location,Location} = erl_parse:get_attribute(L, location),
- Location.
-
-abs_line(L) ->
- erl_parse:set_line(L, fun(Line) -> abs(Line) end).
-
-neg_line(L) ->
- erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
+no_compiler_warning(Anno) ->
+ erl_anno:set_generated(true, Anno).
%%
%% The following three functions are used both with cerl:cerl() and with i()'s
@@ -2450,9 +2380,13 @@ format_error(nomatch) ->
"pattern cannot possibly match";
format_error(bad_binary) ->
"binary construction will fail because of a type mismatch";
-format_error(bad_map) ->
+format_error(badmap) ->
"map construction will fail because of a type mismatch".
-add_warning(Line, Term, #core{ws=Ws,file=[{file,File}]}=St) when Line >= 0 ->
- St#core{ws=[{File,[{location(Line),?MODULE,Term}]}|Ws]};
-add_warning(_, _, St) -> St.
+add_warning(Anno, Term, #core{ws=Ws,file=[{file,File}]}=St) ->
+ case erl_anno:generated(Anno) of
+ false ->
+ St#core{ws=[{File,[{erl_anno:location(Anno),?MODULE,Term}]}|Ws]};
+ true ->
+ St
+ end.
diff --git a/lib/compiler/src/v3_kernel.erl b/lib/compiler/src/v3_kernel.erl
index 6504351c02..c21b2a1505 100644
--- a/lib/compiler/src/v3_kernel.erl
+++ b/lib/compiler/src/v3_kernel.erl
@@ -114,7 +114,7 @@ copy_anno(Kdst, Ksrc) ->
ff, %Current function
vcount=0, %Variable counter
fcount=0, %Fun counter
- ds=[], %Defined variables
+ ds=cerl_sets:new() :: cerl_sets:set(), %Defined variables
funs=[], %Fun functions
free=[], %Free variables
ws=[] :: [warning()], %Warnings.
@@ -131,12 +131,12 @@ module(#c_module{anno=A,name=M,exports=Es,attrs=As,defs=Fs}, _Options) ->
{ok,#k_mdef{anno=A,name=M#c_literal.val,exports=Kes,attributes=Kas,
body=Kfs ++ St#kern.funs},lists:sort(St#kern.ws)}.
-attributes([{#c_literal{val=Name},Val}|As]) ->
+attributes([{#c_literal{val=Name},#c_literal{val=Val}}|As]) ->
case include_attribute(Name) of
false ->
attributes(As);
true ->
- [{Name,core_lib:literal_value(Val)}|attributes(As)]
+ [{Name,Val}|attributes(As)]
end;
attributes([]) -> [].
@@ -148,7 +148,7 @@ include_attribute(_) -> true.
function({#c_var{name={F,Arity}=FA},Body}, St0) ->
try
- St1 = St0#kern{func=FA,ff=undefined,vcount=0,fcount=0,ds=sets:new()},
+ St1 = St0#kern{func=FA,ff=undefined,vcount=0,fcount=0,ds=cerl_sets:new()},
{#ifun{anno=Ab,vars=Kvs,body=B0},[],St2} = expr(Body, new_sub(), St1),
{B1,_,St3} = ubody(B0, return, St2),
%%B1 = B0, St3 = St2, %Null second pass
@@ -273,17 +273,7 @@ expr(#c_tuple{anno=A,es=Ces}, Sub, St0) ->
{Kes,Ep,St1} = atomic_list(Ces, Sub, St0),
{#k_tuple{anno=A,es=Kes},Ep,St1};
expr(#c_map{anno=A,arg=Var,es=Ces}, Sub, St0) ->
- try expr_map(A,Var,Ces,Sub,St0) of
- {_,_,_}=Res -> Res
- catch
- throw:bad_map ->
- St1 = add_warning(get_line(A), bad_map, A, St0),
- Erl = #c_literal{val=erlang},
- Name = #c_literal{val=error},
- Args = [#c_literal{val=badarg}],
- Error = #c_call{anno=A,module=Erl,name=Name,args=Args},
- expr(Error, Sub, St1)
- end;
+ expr_map(A, Var, Ces, Sub, St0);
expr(#c_binary{anno=A,segments=Cv}, Sub, St0) ->
try atomic_bin(Cv, Sub, St0) of
{Kv,Ep,St1} ->
@@ -506,77 +496,81 @@ translate_fc(Args) ->
[#c_literal{val=function_clause},make_list(Args)].
expr_map(A,Var0,Ces,Sub,St0) ->
- %% An extra pass of validation of Map src because of inlining
{Var,Mps,St1} = expr(Var0, Sub, St0),
- case is_valid_map_src(Var) of
- true ->
- {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1),
- {Km,Eps++Mps,St2};
- false -> throw(bad_map)
- end.
-
-is_valid_map_src(#k_map{}) -> true;
-is_valid_map_src(#k_literal{val=M}) when is_map(M) -> true;
-is_valid_map_src(#k_var{}) -> true;
-is_valid_map_src(_) -> false.
+ {Km,Eps,St2} = map_split_pairs(A, Var, Ces, Sub, St1),
+ {Km,Eps++Mps,St2}.
map_split_pairs(A, Var, Ces, Sub, St0) ->
- %% two steps
- %% 1. force variables
- %% 2. remove multiples
- Pairs0 = [{Op,K,V} || #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
+ %% 1. Force variables.
+ %% 2. Group adjacent pairs with literal keys.
+ %% 3. Within each such group, remove multiple assignments to the same key.
+ %% 4. Partition each group according to operator ('=>' and ':=').
+ Pairs0 = [{Op,K,V} ||
+ #c_map_pair{op=#c_literal{val=Op},key=K,val=V} <- Ces],
{Pairs,Esp,St1} = foldr(fun
({Op,K0,V0}, {Ops,Espi,Sti0}) when Op =:= assoc; Op =:= exact ->
{K,Eps1,Sti1} = atomic(K0, Sub, Sti0),
{V,Eps2,Sti2} = atomic(V0, Sub, Sti1),
{[{Op,K,V}|Ops],Eps1 ++ Eps2 ++ Espi,Sti2}
end, {[],[],St0}, Pairs0),
-
- case map_group_pairs(Pairs) of
- {Assoc,[]} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {#k_map{anno=A,op=assoc,var=Var,es=Kes},Esp,St1};
- {[],Exact} ->
- Kes = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Var,es=Kes},Esp,St1};
- {Assoc,Exact} ->
- Kes1 = [#k_map_pair{key=K,val=V}||{_,{assoc,K,V}} <- Assoc],
- {Mvar,Em,St2} = force_atomic(#k_map{anno=A,op=assoc,var=Var,es=Kes1},St1),
- Kes2 = [#k_map_pair{key=K,val=V}||{_,{exact,K,V}} <- Exact],
- {#k_map{anno=A,op=exact,var=Mvar,es=Kes2},Esp ++ Em,St2}
-
+ map_split_pairs_1(A, Var, Pairs, Esp, St1).
+
+map_split_pairs_1(A, Map0, [{Op,Key,Val}|Pairs1]=Pairs0, Esp0, St0) ->
+ {Map1,Em,St1} = force_atomic(Map0, St0),
+ case Key of
+ #k_var{} ->
+ %% Don't combine variable keys with other keys.
+ Kes = [#k_map_pair{key=Key,val=Val}],
+ Map = #k_map{anno=A,op=Op,var=Map1,es=Kes},
+ map_split_pairs_1(A, Map, Pairs1, Esp0 ++ Em, St1);
+ _ ->
+ %% Literal key. Split off all literal keys.
+ {L,Pairs} = splitwith(fun({_,#k_var{},_}) -> false;
+ ({_,_,_}) -> true
+ end, Pairs0),
+ {Map,Esp,St2} = map_group_pairs(A, Map1, L, Esp0 ++ Em, St1),
+ map_split_pairs_1(A, Map, Pairs, Esp, St2)
+ end;
+map_split_pairs_1(_, Map, [], Esp, St0) ->
+ {Map,Esp,St0}.
+
+map_group_pairs(A, Var, Pairs0, Esp, St0) ->
+ Pairs = map_remove_dup_keys(Pairs0),
+ Assoc = [#k_map_pair{key=K,val=V} || {_,{assoc,K,V}} <- Pairs],
+ Exact = [#k_map_pair{key=K,val=V} || {_,{exact,K,V}} <- Pairs],
+ case {Assoc,Exact} of
+ {[_|_],[]} ->
+ {#k_map{anno=A,op=assoc,var=Var,es=Assoc},Esp,St0};
+ {[],[_|_]} ->
+ {#k_map{anno=A,op=exact,var=Var,es=Exact},Esp,St0};
+ {[_|_],[_|_]} ->
+ Map = #k_map{anno=A,op=assoc,var=Var,es=Assoc},
+ {Mvar,Em,St1} = force_atomic(Map, St0),
+ {#k_map{anno=A,op=exact,var=Mvar,es=Exact},Esp ++ Em,St1}
end.
-%% Group map by Assoc operations and Exact operations
+map_remove_dup_keys(Es) ->
+ dict:to_list(map_remove_dup_keys(Es, dict:new())).
-map_group_pairs(Es) ->
- Groups = dict:to_list(map_group_pairs(Es,dict:new())),
- partition(fun({_,{Op,_,_}}) -> Op =:= assoc end, Groups).
-
-map_group_pairs([{assoc,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{assoc,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([{exact,K,V}|Es0],Used0) ->
- Used1 = case map_key_is_used(K,Used0) of
- {ok, {assoc,_,_}} -> map_key_set_used(K,{assoc,K,V},Used0);
- {ok, {exact,_,_}} -> map_key_set_used(K,{exact,K,V},Used0);
- _ -> map_key_set_used(K,{exact,K,V},Used0)
- end,
- map_group_pairs(Es0,Used1);
-map_group_pairs([],Used) ->
- Used.
-
-map_key_set_used(K,How,Used) ->
- dict:store(map_key_clean(K),How,Used).
-
-map_key_is_used(K,Used) ->
- dict:find(map_key_clean(K),Used).
+map_remove_dup_keys([{assoc,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{exact,_,_}} -> exact;
+ _ -> assoc
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([{exact,K0,V}|Es0],Used0) ->
+ K = map_key_clean(K0),
+ Op = case dict:find(K, Used0) of
+ {ok,{assoc,_,_}} -> assoc;
+ _ -> exact
+ end,
+ Used1 = dict:store(K, {Op,K0,V}, Used0),
+ map_remove_dup_keys(Es0, Used1);
+map_remove_dup_keys([], Used) -> Used.
-%% Be explicit instead of using set_kanno(K,[])
+%% Be explicit instead of using set_kanno(K, []).
map_key_clean(#k_var{name=V}) -> {var,V};
map_key_clean(#k_literal{val=V}) -> {lit,V};
map_key_clean(#k_int{val=V}) -> {lit,V};
@@ -661,12 +655,12 @@ atomic_bin([#c_bitstr{anno=A,val=E0,size=S0,unit=U0,type=T,flags=Fs0}|Es0],
{E,Ap1,St1} = atomic(E0, Sub, St0),
{S1,Ap2,St2} = atomic(S0, Sub, St1),
validate_bin_element_size(S1),
- U1 = core_lib:literal_value(U0),
- Fs1 = core_lib:literal_value(Fs0),
+ U1 = cerl:concrete(U0),
+ Fs1 = cerl:concrete(Fs0),
{Es,Ap3,St3} = atomic_bin(Es0, Sub, St2),
{#k_bin_seg{anno=A,size=S1,
unit=U1,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs1,
seg=E,next=Es},
Ap1++Ap2++Ap3,St3};
@@ -721,15 +715,15 @@ force_variable(Ke, St0) ->
%% handling.
pattern(#c_var{anno=A,name=V}, _Isub, Osub, St0) ->
- case sets:is_element(V, St0#kern.ds) of
+ case cerl_sets:is_element(V, St0#kern.ds) of
true ->
{New,St1} = new_var_name(St0),
{#k_var{anno=A,name=New},
set_vsub(V, New, Osub),
- St1#kern{ds=sets:add_element(New, St1#kern.ds)}};
+ St1#kern{ds=cerl_sets:add_element(New, St1#kern.ds)}};
false ->
{#k_var{anno=A,name=V},Osub,
- St0#kern{ds=sets:add_element(V, St0#kern.ds)}}
+ St0#kern{ds=cerl_sets:add_element(V, St0#kern.ds)}}
end;
pattern(#c_literal{anno=A,val=Val}, _Isub, Osub, St) ->
{#k_literal{anno=A,val=Val},Osub,St};
@@ -793,8 +787,8 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
%% problems.
#k_atom{val=bad_size}
end,
- U0 = core_lib:literal_value(U),
- Fs0 = core_lib:literal_value(Fs),
+ U0 = cerl:concrete(U),
+ Fs0 = cerl:concrete(Fs),
%%ok= io:fwrite("~w: ~p~n", [?LINE,{B0,S,U0,Fs0}]),
{E,Osub1,St2} = pattern(E0, Isub0, Osub0, St1),
Isub1 = case E0 of
@@ -805,7 +799,7 @@ pattern_bin_1([#c_bitstr{anno=A,val=E0,size=S0,unit=U,type=T,flags=Fs}|Es0],
{Es,{Isub,Osub},St3} = pattern_bin_1(Es0, Isub1, Osub1, St2),
{#k_bin_seg{anno=A,size=S,
unit=U0,
- type=core_lib:literal_value(T),
+ type=cerl:concrete(T),
flags=Fs0,
seg=E,next=Es},
{Isub,Osub},St3};
@@ -842,12 +836,23 @@ get_vsub(V, Vsub) ->
set_vsub(V, S, Vsub) ->
orddict:store(V, S, Vsub).
-subst_vsub(V, S, Vsub0) ->
- %% Fold chained substitutions.
- Vsub1 = orddict:map(fun (_, V1) when V1 =:= V -> S;
- (_, V1) -> V1
- end, Vsub0),
- orddict:store(V, S, Vsub1).
+subst_vsub(Key, New, [{K,Key}|Dict]) ->
+ %% Fold chained substitution.
+ [{K,New}|subst_vsub(Key, New, Dict)];
+subst_vsub(Key, New, [{K,_}|_]=Dict) when Key < K ->
+ %% Insert the new substitution here, and continue
+ %% look for chained substitutions.
+ [{Key,New}|subst_vsub_1(Key, New, Dict)];
+subst_vsub(Key, New, [{K,_}=E|Dict]) when Key > K ->
+ [E|subst_vsub(Key, New, Dict)];
+subst_vsub(Key, New, []) -> [{Key,New}].
+
+subst_vsub_1(V, S, [{K,V}|Dict]) ->
+ %% Fold chained substitution.
+ [{K,S}|subst_vsub_1(V, S, Dict)];
+subst_vsub_1(V, S, [E|Dict]) ->
+ [E|subst_vsub_1(V, S, Dict)];
+subst_vsub_1(_, _, []) -> [].
get_fsub(F, A, Fsub) ->
case orddict:find({F,A}, Fsub) of
@@ -892,7 +897,7 @@ new_vars(0, St, Vs) -> {Vs,St}.
make_vars(Vs) -> [ #k_var{name=V} || V <- Vs ].
add_var_def(V, St) ->
- St#kern{ds=sets:add_element(V#k_var.name, St#kern.ds)}.
+ St#kern{ds=cerl_sets:add_element(V#k_var.name, St#kern.ds)}.
%%add_vars_def(Vs, St) ->
%% Ds = foldl(fun (#k_var{name=V}, Ds) -> add_element(V, Ds) end,
@@ -2010,9 +2015,7 @@ format_error(nomatch_shadow) ->
format_error(bad_call) ->
"invalid module and/or function name; this call will always fail";
format_error(bad_segment_size) ->
- "binary construction will fail because of a type mismatch";
-format_error(bad_map) ->
- "map construction will fail because of a type mismatch".
+ "binary construction will fail because of a type mismatch".
add_warning(none, Term, Anno, #kern{ws=Ws}=St) ->
File = get_file(Anno),
diff --git a/lib/compiler/src/v3_life.erl b/lib/compiler/src/v3_life.erl
index cd4b5fd674..ee0565efb6 100644
--- a/lib/compiler/src/v3_life.erl
+++ b/lib/compiler/src/v3_life.erl
@@ -45,7 +45,7 @@
-export([vdb_find/2]).
--import(lists, [member/2,map/2,foldl/3,reverse/1,sort/1]).
+-import(lists, [member/2,map/2,reverse/1,sort/1]).
-import(ordsets, [add_element/2,intersection/2,union/2]).
-include("v3_kernel.hrl").
@@ -68,7 +68,7 @@ functions([], Acc) -> reverse(Acc).
function(#k_fdef{anno=#k{a=Anno},func=F,arity=Ar,vars=Vs,body=Kb}) ->
try
As = var_list(Vs),
- Vdb0 = foldl(fun ({var,N}, Vdb) -> new_var(N, 0, Vdb) end, [], As),
+ Vdb0 = init_vars(As),
%% Force a top-level match!
B0 = case Kb of
#k_match{} -> Kb;
@@ -94,14 +94,14 @@ function(#k_fdef{anno=#k{a=Anno},func=F,arity=Ar,vars=Vs,body=Kb}) ->
body(#k_seq{arg=Ke,body=Kb}, I, Vdb0) ->
%%ok = io:fwrite("life ~w:~p~n", [?LINE,{Ke,I,Vdb0}]),
A = get_kanno(Ke),
- Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ Vdb1 = use_vars(union(A#k.us, A#k.ns), I, Vdb0),
{Es,MaxI,Vdb2} = body(Kb, I+1, Vdb1),
E = expr(Ke, I, Vdb2),
{[E|Es],MaxI,Vdb2};
body(Ke, I, Vdb0) ->
%%ok = io:fwrite("life ~w:~p~n", [?LINE,{Ke,I,Vdb0}]),
A = get_kanno(Ke),
- Vdb1 = use_vars(A#k.us, I, new_vars(A#k.ns, I, Vdb0)),
+ Vdb1 = use_vars(union(A#k.us, A#k.ns), I, Vdb0),
E = expr(Ke, I, Vdb1),
{[E],I,Vdb1}.
@@ -150,12 +150,12 @@ expr(#k_try_enter{anno=A,arg=Ka,vars=Vs,body=Kb,evars=Evs,handler=Kh}, I, Vdb) -
%% the body and handler. Add try tag 'variable'.
Ab = get_kanno(Kb),
Ah = get_kanno(Kh),
- Tdb1 = use_vars(Ab#k.us, I+3, use_vars(Ah#k.us, I+3, Tdb0)),
+ Tdb1 = use_vars(union(Ab#k.us, Ah#k.us), I+3, Tdb0),
Tdb2 = vdb_sub(I, I+2, Tdb1),
Vnames = fun (Kvar) -> Kvar#k_var.name end, %Get the variable names
{Aes,_,Adb} = body(Ka, I+2, add_var({catch_tag,I+1}, I+1, 1000000, Tdb2)),
- {Bes,_,Bdb} = body(Kb, I+4, new_vars(map(Vnames, Vs), I+3, Tdb2)),
- {Hes,_,Hdb} = body(Kh, I+4, new_vars(map(Vnames, Evs), I+3, Tdb2)),
+ {Bes,_,Bdb} = body(Kb, I+4, new_vars(sort(map(Vnames, Vs)), I+3, Tdb2)),
+ {Hes,_,Hdb} = body(Kh, I+4, new_vars(sort(map(Vnames, Evs)), I+3, Tdb2)),
#l{ke={try_enter,#l{ke={block,Aes},i=I+1,vdb=Adb,a=[]},
var_list(Vs),#l{ke={block,Bes},i=I+3,vdb=Bdb,a=[]},
var_list(Evs),#l{ke={block,Hes},i=I+3,vdb=Hdb,a=[]}},
@@ -171,7 +171,7 @@ expr(#k_receive{anno=A,var=V,body=Kb,timeout=T,action=Ka,ret=Rs}, I, Vdb) ->
%% Work out imported variables which need to be locked.
Rdb = vdb_sub(I, I+1, Vdb),
M = match(Kb, add_element(V#k_var.name, A#k.us), I+1, [],
- new_var(V#k_var.name, I, Rdb)),
+ new_vars([V#k_var.name], I, Rdb)),
{Tes,_,Adb} = body(Ka, I+1, Rdb),
#l{ke={receive_loop,atomic(T),variable(V),M,
#l{ke=Tes,i=I+1,vdb=Adb,a=[]},var_list(Rs)},
@@ -199,12 +199,12 @@ body_try(#k_try{anno=A,arg=Ka,vars=Vs,body=Kb,evars=Evs,handler=Kh,ret=Rs},
%% the body and handler. Add try tag 'variable'.
Ab = get_kanno(Kb),
Ah = get_kanno(Kh),
- Tdb1 = use_vars(Ab#k.us, I+3, use_vars(Ah#k.us, I+3, Tdb0)),
+ Tdb1 = use_vars(union(Ab#k.us, Ah#k.us), I+3, Tdb0),
Tdb2 = vdb_sub(I, I+2, Tdb1),
Vnames = fun (Kvar) -> Kvar#k_var.name end, %Get the variable names
{Aes,_,Adb} = body(Ka, I+2, add_var({catch_tag,I+1}, I+1, locked, Tdb2)),
- {Bes,_,Bdb} = body(Kb, I+4, new_vars(map(Vnames, Vs), I+3, Tdb2)),
- {Hes,_,Hdb} = body(Kh, I+4, new_vars(map(Vnames, Evs), I+3, Tdb2)),
+ {Bes,_,Bdb} = body(Kb, I+4, new_vars(sort(map(Vnames, Vs)), I+3, Tdb2)),
+ {Hes,_,Hdb} = body(Kh, I+4, new_vars(sort(map(Vnames, Evs)), I+3, Tdb2)),
#l{ke={'try',#l{ke={block,Aes},i=I+1,vdb=Adb,a=[]},
var_list(Vs),#l{ke={block,Bes},i=I+3,vdb=Bdb,a=[]},
var_list(Evs),#l{ke={block,Hes},i=I+3,vdb=Hdb,a=[]},
@@ -270,7 +270,7 @@ match(#k_select{anno=A,var=V,types=Kts}, Ls0, I, Ctxt, Vdb0) ->
end,
Vdb1 = use_vars(union(A#k.us, Ls1), I, Vdb0),
Ts = [type_clause(Tc, Ls1, I+1, Ctxt, Vdb1) || Tc <- Kts],
- #l{ke={select,literal2(V, Ctxt),Ts},i=I,vdb=Vdb1,a=Anno};
+ #l{ke={select,literal(V, Ctxt),Ts},i=I,vdb=Vdb1,a=Anno};
match(#k_guard{anno=A,clauses=Kcs}, Ls, I, Ctxt, Vdb0) ->
Vdb1 = use_vars(union(A#k.us, Ls), I, Vdb0),
Cs = [guard_clause(G, Ls, I+1, Ctxt, Vdb1) || G <- Kcs],
@@ -297,7 +297,7 @@ val_clause(#k_val_clause{anno=A,val=V,body=Kb}, Ls0, I, Ctxt0, Vdb0) ->
_ -> Ctxt0
end,
B = match(Kb, Ls1, I+1, Ctxt, Vdb1),
- #l{ke={val_clause,literal2(V, Ctxt),B},i=I,vdb=use_vars(Bus, I+1, Vdb1),a=A#k.a}.
+ #l{ke={val_clause,literal(V, Ctxt),B},i=I,vdb=use_vars(Bus, I+1, Vdb1),a=A#k.a}.
guard_clause(#k_guard_clause{anno=A,guard=Kg,body=Kb}, Ls, I, Ctxt, Vdb0) ->
Vdb1 = use_vars(union(A#k.us, Ls), I+2, Vdb0),
@@ -350,6 +350,7 @@ atomic_list(Ks) -> [atomic(K) || K <- Ks].
%% literal_list([Klit]) -> [Lit].
literal(#k_var{name=N}, _) -> {var,N};
+literal(#k_literal{val=I}, _) -> {literal,I};
literal(#k_int{val=I}, _) -> {integer,I};
literal(#k_float{val=F}, _) -> {float,F};
literal(#k_atom{val=N}, _) -> {atom,N};
@@ -358,58 +359,29 @@ literal(#k_nil{}, _) -> nil;
literal(#k_cons{hd=H,tl=T}, Ctxt) ->
{cons,[literal(H, Ctxt),literal(T, Ctxt)]};
literal(#k_binary{segs=V}, Ctxt) ->
- {binary,literal(V, Ctxt)};
+ {binary,literal(V, Ctxt)};
+literal(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=[]}, Ctxt) ->
+ %% Only occurs in patterns.
+ {bin_seg,Ctxt,literal(S, Ctxt),U,T,Fs,[literal(Seg, Ctxt)]};
literal(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}, Ctxt) ->
{bin_seg,Ctxt,literal(S, Ctxt),U,T,Fs,
[literal(Seg, Ctxt),literal(N, Ctxt)]};
+literal(#k_bin_int{size=S,unit=U,flags=Fs,val=Int,next=N}, Ctxt) ->
+ %% Only occurs in patterns.
+ {bin_int,Ctxt,literal(S, Ctxt),U,Fs,Int,
+ [literal(N, Ctxt)]};
literal(#k_bin_end{}, Ctxt) ->
{bin_end,Ctxt};
literal(#k_tuple{es=Es}, Ctxt) ->
{tuple,literal_list(Es, Ctxt)};
-literal(#k_map{op=Op,var=Var,es=Es}, Ctxt) ->
- {map,Op,literal(Var, Ctxt),literal_list(Es, Ctxt)};
+literal(#k_map{op=Op,var=Var,es=Es0}, Ctxt) ->
+ {map,Op,literal(Var, Ctxt),literal_list(Es0, Ctxt)};
literal(#k_map_pair{key=K,val=V}, Ctxt) ->
- {map_pair,literal(K, Ctxt),literal(V, Ctxt)};
-literal(#k_literal{val=V}, _Ctxt) ->
- {literal,V}.
+ {map_pair,literal(K, Ctxt),literal(V, Ctxt)}.
literal_list(Ks, Ctxt) ->
[literal(K, Ctxt) || K <- Ks].
-literal2(#k_var{name=N}, _) -> {var,N};
-literal2(#k_literal{val=I}, _) -> {literal,I};
-literal2(#k_int{val=I}, _) -> {integer,I};
-literal2(#k_float{val=F}, _) -> {float,F};
-literal2(#k_atom{val=N}, _) -> {atom,N};
-%%literal2(#k_char{val=C}, _) -> {char,C};
-literal2(#k_nil{}, _) -> nil;
-literal2(#k_cons{hd=H,tl=T}, Ctxt) ->
- {cons,[literal2(H, Ctxt),literal2(T, Ctxt)]};
-literal2(#k_binary{segs=V}, Ctxt) ->
- {binary,literal2(V, Ctxt)};
-literal2(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=[]}, Ctxt) ->
- {bin_seg,Ctxt,literal2(S, Ctxt),U,T,Fs,[literal2(Seg, Ctxt)]};
-literal2(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}, Ctxt) ->
- {bin_seg,Ctxt,literal2(S, Ctxt),U,T,Fs,
- [literal2(Seg, Ctxt),literal2(N, Ctxt)]};
-literal2(#k_bin_int{size=S,unit=U,flags=Fs,val=Int,next=N}, Ctxt) ->
- {bin_int,Ctxt,literal2(S, Ctxt),U,Fs,Int,
- [literal2(N, Ctxt)]};
-literal2(#k_bin_end{}, Ctxt) ->
- {bin_end,Ctxt};
-literal2(#k_tuple{es=Es}, Ctxt) ->
- {tuple,literal_list2(Es, Ctxt)};
-literal2(#k_map{op=Op,es=Es}, Ctxt) ->
- {map,Op,literal_list2(Es, Ctxt)};
-literal2(#k_map_pair{key=K,val=V}, Ctxt) ->
- {map_pair,literal2(K, Ctxt),literal2(V, Ctxt)}.
-
-literal_list2(Ks, Ctxt) ->
- [literal2(K, Ctxt) || K <- Ks].
-
-%% literal_bin(#k_bin_seg{size=S,unit=U,type=T,flags=Fs,seg=Seg,next=N}) ->
-%% {bin_seg,literal(S),U,T,Fs,[literal(Seg),literal(N)]}
-
%% is_gc_bif(Name, Arity) -> true|false
%% Determines whether the BIF Name/Arity might do a GC.
@@ -428,79 +400,78 @@ is_gc_bif(Bif, Arity) ->
erl_internal:new_type_test(Bif, Arity) orelse
erl_internal:comp_op(Bif, Arity)).
-%% new_var(VarName, I, Vdb) -> Vdb.
+%% Keep track of life time for variables.
+%%
+%% init_vars([{var,VarName}]) -> Vdb.
%% new_vars([VarName], I, Vdb) -> Vdb.
-%% use_var(VarName, I, Vdb) -> Vdb.
%% use_vars([VarName], I, Vdb) -> Vdb.
%% add_var(VarName, F, L, Vdb) -> Vdb.
+%%
+%% The list of variable names for new_vars/3 and use_vars/3
+%% must be sorted.
-new_var(V, I, Vdb) ->
- vdb_store_new(V, I, I, Vdb).
+init_vars(Vs) ->
+ vdb_new(Vs).
-new_vars(Vs, I, Vdb0) ->
- foldl(fun (V, Vdb) -> new_var(V, I, Vdb) end, Vdb0, Vs).
+new_vars([], _, Vdb) -> Vdb;
+new_vars([V], I, Vdb) -> vdb_store_new(V, {V,I,I}, Vdb);
+new_vars(Vs, I, Vdb) -> vdb_update_vars(Vs, Vdb, I).
-use_var(V, I, Vdb) ->
+use_vars([], _, Vdb) ->
+ Vdb;
+use_vars([V], I, Vdb) ->
case vdb_find(V, Vdb) of
- {V,F,L} when I > L -> vdb_update(V, F, I, Vdb);
+ {V,F,L} when I > L -> vdb_update(V, {V,F,I}, Vdb);
{V,_,_} -> Vdb;
- error -> vdb_store_new(V, I, I, Vdb)
- end.
+ error -> vdb_store_new(V, {V,I,I}, Vdb)
+ end;
+use_vars(Vs, I, Vdb) -> vdb_update_vars(Vs, Vdb, I).
-use_vars([], _, Vdb) -> Vdb;
-use_vars([V], I, Vdb) -> use_var(V, I, Vdb);
-use_vars(Vs, I, Vdb) ->
- Res = use_vars_1(sort(Vs), Vdb, I),
- %% The following line can be used as an assertion.
- %% Res = foldl(fun (V, Vdb) -> use_var(V, I, Vdb) end, Vdb, Vs),
- Res.
+add_var(V, F, L, Vdb) ->
+ vdb_store_new(V, {V,F,L}, Vdb).
-%% Measurements show that it is worthwhile having this special
-%% function that updates/inserts several variables at once.
+%% is_in_guard() -> true|false.
-use_vars_1([V|_]=Vs, [{V1,_,_}=Vd|Vdb], I) when V > V1 ->
- [Vd|use_vars_1(Vs, Vdb, I)];
-use_vars_1([V|Vs], [{V1,_,_}|_]=Vdb, I) when V < V1 ->
- %% New variable.
- [{V,I,I}|use_vars_1(Vs, Vdb, I)];
-use_vars_1([V|Vs], [{_,F,L}=Vd|Vdb], I) ->
- %% Existing variable.
- if
- I > L ->[{V,F,I}|use_vars_1(Vs, Vdb, I)];
- true -> [Vd|use_vars_1(Vs, Vdb, I)]
- end;
-use_vars_1([V|Vs], [], I) ->
- %% New variable.
- [{V,I,I}|use_vars_1(Vs, [], I)];
-use_vars_1([], Vdb, _) -> Vdb.
+is_in_guard() ->
+ get(guard_refc) > 0.
-add_var(V, F, L, Vdb) ->
- vdb_store_new(V, F, L, Vdb).
+%% vdb
+
+vdb_new(Vs) ->
+ sort([{V,0,0} || {var,V} <- Vs]).
vdb_find(V, Vdb) ->
- %% Performance note: Profiling shows that this function accounts for
- %% a lot of the execution time when huge constant terms are built.
- %% Using the BIF lists:keyfind/3 is a lot faster than the
- %% original Erlang version.
case lists:keyfind(V, 1, Vdb) of
false -> error;
Vd -> Vd
end.
-%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V < V1 -> error;
-%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V == V1 -> Vd;
-%vdb_find(V, [{V1,F,L}=Vd|Vdb]) when V > V1 -> vdb_find(V, Vdb);
-%vdb_find(V, []) -> error.
+vdb_update(V, Update, [{V,_,_}|Vdb]) ->
+ [Update|Vdb];
+vdb_update(V, Update, [Vd|Vdb]) ->
+ [Vd|vdb_update(V, Update, Vdb)].
-vdb_update(V, F, L, [{V1,_,_}=Vd|Vdb]) when V > V1 ->
- [Vd|vdb_update(V, F, L, Vdb)];
-vdb_update(V, F, L, [{V1,_,_}|Vdb]) when V == V1 ->
- [{V,F,L}|Vdb].
+vdb_store_new(V, New, [{V1,_,_}=Vd|Vdb]) when V > V1 ->
+ [Vd|vdb_store_new(V, New, Vdb)];
+vdb_store_new(V, New, [{V1,_,_}|_]=Vdb) when V < V1 ->
+ [New|Vdb];
+vdb_store_new(_, New, []) -> [New].
-vdb_store_new(V, F, L, [{V1,_,_}=Vd|Vdb]) when V > V1 ->
- [Vd|vdb_store_new(V, F, L, Vdb)];
-vdb_store_new(V, F, L, [{V1,_,_}|_]=Vdb) when V < V1 -> [{V,F,L}|Vdb];
-vdb_store_new(V, F, L, []) -> [{V,F,L}].
+vdb_update_vars([V|_]=Vs, [{V1,_,_}=Vd|Vdb], I) when V > V1 ->
+ [Vd|vdb_update_vars(Vs, Vdb, I)];
+vdb_update_vars([V|Vs], [{V1,_,_}|_]=Vdb, I) when V < V1 ->
+ %% New variable.
+ [{V,I,I}|vdb_update_vars(Vs, Vdb, I)];
+vdb_update_vars([V|Vs], [{_,F,L}=Vd|Vdb], I) ->
+ %% Existing variable.
+ if
+ I > L -> [{V,F,I}|vdb_update_vars(Vs, Vdb, I)];
+ true -> [Vd|vdb_update_vars(Vs, Vdb, I)]
+ end;
+vdb_update_vars([V|Vs], [], I) ->
+ %% New variable.
+ [{V,I,I}|vdb_update_vars(Vs, [], I)];
+vdb_update_vars([], Vdb, _) -> Vdb.
%% vdb_sub(Min, Max, Vdb) -> Vdb.
%% Extract variables which are used before and after Min. Lock
@@ -510,8 +481,3 @@ vdb_sub(Min, Max, Vdb) ->
[ if L >= Max -> {V,F,locked};
true -> Vd
end || {V,F,L}=Vd <- Vdb, F < Min, L >= Min ].
-
-%% is_in_guard() -> true|false.
-
-is_in_guard() ->
- get(guard_refc) > 0.
diff --git a/lib/compiler/test/Makefile b/lib/compiler/test/Makefile
index 892a401c75..98125fc84e 100644
--- a/lib/compiler/test/Makefile
+++ b/lib/compiler/test/Makefile
@@ -11,6 +11,7 @@ MODULES= \
beam_validator_SUITE \
beam_disasm_SUITE \
beam_except_SUITE \
+ beam_utils_SUITE \
bs_bincomp_SUITE \
bs_bit_binaries_SUITE \
bs_construct_SUITE \
@@ -34,12 +35,14 @@ MODULES= \
record_SUITE \
trycatch_SUITE \
warnings_SUITE \
+ z_SUITE \
test_lib
NO_OPT= \
andor \
apply \
beam_except \
+ beam_utils \
bs_construct \
bs_match \
bs_utf \
@@ -59,6 +62,7 @@ NO_OPT= \
INLINE= \
andor \
apply \
+ beam_utils \
bs_bincomp \
bs_bit_binaries \
bs_construct \
@@ -76,12 +80,6 @@ INLINE= \
receive \
record
-CORE_MODULES = \
- bs_shadowed_size_var \
- unused_multiple_values_error \
- nested_call_in_case
-
-
NO_OPT_MODULES= $(NO_OPT:%=%_no_opt_SUITE)
NO_OPT_ERL_FILES= $(NO_OPT_MODULES:%=%.erl)
POST_OPT_MODULES= $(NO_OPT:%=%_post_opt_SUITE)
@@ -91,8 +89,6 @@ INLINE_ERL_FILES= $(INLINE_MODULES:%=%.erl)
ERL_FILES= $(MODULES:%=%.erl)
-CORE_FILES= $(CORE_MODULES:%=%.core)
-
##TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
##INSTALL_PROGS= $(TARGET_FILES)
@@ -159,7 +155,7 @@ release_spec: opt
release_tests_spec: make_emakefile
$(INSTALL_DIR) "$(RELSYSDIR)"
$(INSTALL_DATA) compiler.spec compiler.cover \
- $(EMAKEFILE) $(ERL_FILES) $(CORE_FILES) "$(RELSYSDIR)"
+ $(EMAKEFILE) $(ERL_FILES) "$(RELSYSDIR)"
$(INSTALL_DATA) $(NO_OPT_ERL_FILES) $(POST_OPT_ERL_FILES) \
$(INLINE_ERL_FILES) "$(RELSYSDIR)"
chmod -R u+w "$(RELSYSDIR)"
diff --git a/lib/compiler/test/andor_SUITE.erl b/lib/compiler/test/andor_SUITE.erl
index b5408ecd8f..4d7f444c4f 100644
--- a/lib/compiler/test/andor_SUITE.erl
+++ b/lib/compiler/test/andor_SUITE.erl
@@ -33,7 +33,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_case,t_and_or,t_andalso,t_orelse,inside,overlap,
combined,in_case,before_and_inside_if]}].
@@ -173,7 +173,13 @@ t_and_or(Config) when is_list(Config) ->
true = (fun (X = true) when X or true or X -> true end)(True),
- ok.
+ Tuple = id({a,b}),
+ case Tuple of
+ {_,_} ->
+ {'EXIT',{badarg,_}} = (catch true and Tuple)
+ end,
+
+ ok.
t_andalso(Config) when is_list(Config) ->
Bs = [true,false],
@@ -364,6 +370,11 @@ combined(Config) when is_list(Config) ->
?line true = ?COMB(false, blurf, true),
?line true = ?COMB(true, true, blurf),
+ false = simple_comb(false, false),
+ false = simple_comb(false, true),
+ false = simple_comb(true, false),
+ true = simple_comb(true, true),
+
ok.
-undef(COMB).
@@ -390,6 +401,13 @@ comb(A, B, C) ->
end,
id(Res).
+simple_comb(A, B) ->
+ %% Use Res twice, to ensure that a careless optimization of 'not'
+ %% doesn't leave Res as a free variable.
+ Res = A andalso B,
+ _ = id(not Res),
+ Res.
+
%% Test that a boolean expression in a case expression is properly
%% optimized (in particular, that the error behaviour is correct).
in_case(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/beam_utils_SUITE.erl b/lib/compiler/test/beam_utils_SUITE.erl
new file mode 100644
index 0000000000..d2e24cb5ae
--- /dev/null
+++ b/lib/compiler/test/beam_utils_SUITE.erl
@@ -0,0 +1,236 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(beam_utils_SUITE).
+
+-export([all/0,suite/0,groups/0,init_per_suite/1,end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ apply_fun/1,apply_mf/1,bs_init/1,bs_save/1,
+ is_not_killed/1,is_not_used_at/1,
+ select/1,y_catch/1]).
+-export([id/1]).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ test_lib:recompile(?MODULE),
+ [{group,p}].
+
+groups() ->
+ [{p,[parallel],
+ [apply_fun,
+ apply_mf,
+ bs_init,
+ bs_save,
+ is_not_killed,
+ is_not_used_at,
+ select,
+ y_catch
+ ]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+apply_fun(_Config) ->
+ 3 = do_apply_fun(false, false),
+ 3 = do_apply_fun(false, true),
+ 3 = do_apply_fun(true, false),
+ 2 = do_apply_fun(true, true),
+ ok.
+
+do_apply_fun(X, Y) ->
+ F = fun(I) -> I+1 end,
+ Arg = case X andalso id(Y) of
+ true -> 1;
+ false -> 2
+ end,
+ F(Arg).
+
+apply_mf(_Config) ->
+ ok = do_apply_mf_used({a,b}, ?MODULE, id),
+ error = do_apply_mf_used([a], ?MODULE, id),
+ {'EXIT',{{case_clause,{[],b}},_}} = (catch do_apply_mf_used({[],b}, ?MODULE, id)),
+
+ error = do_apply_mf_killed({error,[a]}, ?MODULE, id),
+ ok = do_apply_mf_killed([b], ?MODULE, id),
+ {'EXIT',{{case_clause,{a,[b]}},_}} = (catch do_apply_mf_killed({a,[b]}, ?MODULE, id)),
+ {'EXIT',{{case_clause,{error,[]}},_}} = (catch do_apply_mf_killed({error,[]}, ?MODULE, id)),
+
+ ok.
+
+do_apply_mf_used(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Decoded,_} when Decoded =/= [] ->
+ ok;
+ List when is_list(List) ->
+ error
+ end,
+ Mod:Func(Res).
+
+do_apply_mf_killed(Arg, Mod, Func) ->
+ Res = case id(Arg) of
+ {Tag,Decoded} when Decoded =/= [], Tag =:= error ->
+ error;
+ List when is_list(List) ->
+ ok
+ end,
+ Mod:Func(Res).
+
+bs_init(_Config) ->
+ <<7>> = do_bs_init_1([?MODULE], 7),
+ error = do_bs_init_1([?MODULE], 0.0),
+ error = do_bs_init_1([?MODULE], -43),
+ error = do_bs_init_1([?MODULE], 42),
+
+ <<>> = do_bs_init_2([]),
+ <<0:32,((1 bsl 32)-1):32>> = do_bs_init_2([0,(1 bsl 32)-1]),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([0.5])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([-1])),
+ {'EXIT',{badarg,_}} = (catch do_bs_init_2([1 bsl 32])),
+ ok.
+
+do_bs_init_1([?MODULE], Sz) ->
+ if
+ is_integer(Sz), Sz >= -42, Sz < 42 ->
+ id(<<Sz:8>>);
+ true ->
+ error
+ end.
+
+do_bs_init_2(SigNos) ->
+ << <<SigNo:32>> ||
+ SigNo <- SigNos,
+ (is_integer(SigNo) andalso SigNo >= 0 andalso SigNo < (1 bsl 32)) orelse
+ erlang:error(badarg)
+ >>.
+
+
+bs_save(_Config) ->
+ {a,30,<<>>} = do_bs_save(<<1:1,30:5>>),
+ {b,127,<<>>} = do_bs_save(<<1:1,31:5,0:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<1:1,31:5,1:1,127:7>>),
+ {c,127,<<>>} = do_bs_save(<<0:1,31:5,1:1,127:7>>),
+ {d,1024,<<>>} = do_bs_save(<<0:1,31:5>>),
+ ok.
+
+do_bs_save(<<_:1, Tag:5, T/binary>>) when Tag < 31 ->
+ {a,Tag,T};
+do_bs_save(<<1:1, 31:5, 0:1, Tag:7, T/binary>>) ->
+ {b,Tag,T};
+do_bs_save(<<_:1, 31:5, 1:1, Tag:7, T/binary>>) ->
+ {c,Tag,T};
+do_bs_save(<<_:1, 31:5, T/binary>>) ->
+ {d,1024,T}.
+
+is_not_killed(_Config) ->
+ {Pid,Ref} = spawn_monitor(fun() -> exit(banan) end),
+ receive
+ {'DOWN', Ref, process, Pid, banan} ->
+ ok
+ end,
+ receive after 0 -> ok end.
+
+is_not_used_at(_Config) ->
+ {a,b} = do_is_not_used_at(a, [{a,b}]),
+ {a,b} = do_is_not_used_at(a, [x,{a,b}]),
+ {a,b} = do_is_not_used_at(a, [{x,y},{a,b}]),
+ none = do_is_not_used_at(z, [{a,b}]),
+ none = do_is_not_used_at(a, [x]),
+ none = do_is_not_used_at(a, [{x,y}]),
+ ok.
+
+do_is_not_used_at(Key, [P|Ps]) ->
+ if
+ tuple_size(P) >= 1, element(1, P) =:= Key ->
+ P;
+ true ->
+ do_is_not_used_at(Key, Ps)
+ end;
+do_is_not_used_at(_Key, []) -> none.
+
+-record(select, {fixed=false}).
+
+select(_Config) ->
+ a = do_select(#select{}, 0, 0),
+ b = do_select(#select{}, 0, 1),
+ c = do_select(#select{fixed=true}, 0, 0),
+ c = do_select(#select{fixed=true}, 0, 1),
+ ok.
+
+do_select(Head, OldSize, BSize) ->
+ Overwrite0 =
+ if
+ OldSize =:= BSize -> same;
+ true -> true
+ end,
+ Overwrite =
+ if
+ Head#select.fixed =/= false ->
+ false;
+ true ->
+ Overwrite0
+ end,
+ if
+ Overwrite =:= same ->
+ a;
+ Overwrite ->
+ b;
+ true ->
+ c
+ end.
+
+y_catch(_Config) ->
+ ok = try
+ do_y_catch(<<"<?xmlX">>, {state}),
+ failed
+ catch
+ throw:{<<"<?xmlX">>,{state}} ->
+ ok
+ end.
+
+do_y_catch(<<"<?xml",Rest0/binary>> = Bytes, State0) ->
+ {Rest1,State1} =
+ case do_y_catch_1(Rest0, State0) of
+ false ->
+ {Bytes,State0};
+ true ->
+ {_XmlAttributes, R, S} = do_y_catch_2(Rest0),
+ {R,S}
+ end,
+ case catch id({Rest1,State1}) of
+ Other ->
+ throw(Other)
+ end.
+
+do_y_catch_1(<<_,_/binary>>, _) ->
+ false.
+
+do_y_catch_2(_) -> {a,b,c}.
+
+
+%% The identity function.
+id(I) -> I.
diff --git a/lib/compiler/test/beam_validator_SUITE.erl b/lib/compiler/test/beam_validator_SUITE.erl
index 626f89ba7a..e64dd6b9c3 100644
--- a/lib/compiler/test/beam_validator_SUITE.erl
+++ b/lib/compiler/test/beam_validator_SUITE.erl
@@ -21,16 +21,17 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- beam_files/1,compiler_bug/1,stupid_but_valid/1,
+ compiler_bug/1,stupid_but_valid/1,
xrange/1,yrange/1,stack/1,call_last/1,merge_undefined/1,
uninit/1,unsafe_catch/1,
- dead_code/1,mult_labels/1,
+ dead_code/1,
overwrite_catchtag/1,overwrite_trytag/1,accessing_tags/1,bad_catch_try/1,
cons_guard/1,
freg_range/1,freg_uninit/1,freg_state/1,
- bin_match/1,bad_bin_match/1,bin_aligned/1,bad_dsetel/1,
+ bad_bin_match/1,bad_dsetel/1,
state_after_fault_in_catch/1,no_exception_in_catch/1,
- undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1]).
+ undef_label/1,illegal_instruction/1,failing_gc_guard_bif/1,
+ map_field_lists/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -47,18 +48,19 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [beam_files,{group,p}].
+ [{group,p}].
groups() ->
[{p,test_lib:parallel(),
[compiler_bug,stupid_but_valid,xrange,
yrange,stack,call_last,merge_undefined,uninit,
- unsafe_catch,dead_code,mult_labels,
+ unsafe_catch,dead_code,
overwrite_catchtag,overwrite_trytag,accessing_tags,
bad_catch_try,cons_guard,freg_range,freg_uninit,
- freg_state,bin_match,bad_bin_match,bin_aligned,bad_dsetel,
+ freg_state,bad_bin_match,bad_dsetel,
state_after_fault_in_catch,no_exception_in_catch,
- undef_label,illegal_instruction,failing_gc_guard_bif]}].
+ undef_label,illegal_instruction,failing_gc_guard_bif,
+ map_field_lists]}].
init_per_suite(Config) ->
Config.
@@ -72,33 +74,19 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
-beam_files(Config) when is_list(Config) ->
- ?line DataDir = proplists:get_value(data_dir, Config),
- ?line Wc = filename:join([DataDir,"..","..","*","*.beam"]),
- %% Must have at least two files here, or there will be
- %% a grammatical error in the output of the io:format/2 call below. ;-)
- ?line [_,_|_] = Fs = filelib:wildcard(Wc),
- ?line io:format("~p files\n", [length(Fs)]),
- test_lib:p_run(fun do_beam_file/1, Fs).
-
-
-do_beam_file(F) ->
- case beam_validator:file(F) of
- ok ->
- ok;
- {error,Es} ->
- io:format("File: ~s", [F]),
- io:format("Error: ~p\n", [Es]),
- error
- end.
-
compiler_bug(Config) when is_list(Config) ->
%% Check that the compiler returns an error if we try to
%% assemble one of the bad '.S' files.
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, "stack"),
- ?line error = compile:file(File, [asm,report_errors,binary,time]),
+ Data = ?config(data_dir, Config),
+ File = filename:join(Data, "compiler_bug"),
+ error = compile:file(File, [from_asm,report_errors,time]),
+
+ %% Make sure that the error was reported by
+ %% the beam_validator module.
+ {error,
+ [{"compiler_bug",
+ [{beam_validator,_}]}],
+ []} = compile:file(File, [from_asm,return_errors,time]),
ok.
%% The following code is stupid but it should compile.
@@ -134,7 +122,7 @@ yrange(Config) when is_list(Config) ->
{{move,{x,1},{y,-1}},5,
{invalid_store,{y,-1},term}}},
{{t,sum_2,2},
- {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},8,
+ {{bif,'+',{f,0},[{x,0},{y,1024}],{x,0}},7,
{uninitialized_reg,{y,1024}}}},
{{t,sum_3,2},
{{move,{x,1},{y,1024}},5,limit}},
@@ -145,31 +133,31 @@ yrange(Config) when is_list(Config) ->
stack(Config) when is_list(Config) ->
Errors = do_val(stack, Config),
- ?line [{{t,a,2},{return,11,{stack_frame,2}}},
- {{t,b,2},{{deallocate,2},4,{allocated,none}}},
- {{t,c,2},{{deallocate,2},12,{allocated,none}}},
- {{t,d,2},
- {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
- {{t,e,2},{{deallocate,5},6,{allocated,2}}},
- {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
- {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}}] = Errors,
+ [{{t,a,2},{return,9,{stack_frame,2}}},
+ {{t,b,2},{{deallocate,2},4,{allocated,none}}},
+ {{t,bad_1,0},{{allocate_zero,2,10},4,{{x,9},not_live}}},
+ {{t,bad_2,0},{{move,{y,0},{x,0}},5,{unassigned,{y,0}}}},
+ {{t,c,2},{{deallocate,2},10,{allocated,none}}},
+ {{t,d,2},
+ {{allocate,2,2},5,{existing_stack_frame,{size,2}}}},
+ {{t,e,2},{{deallocate,5},6,{allocated,2}}}] = Errors,
ok.
call_last(Config) when is_list(Config) ->
Errors = do_val(call_last, Config),
- ?line [{{t,a,1},{{call_last,1,{f,8},2},11,{allocated,1}}},
- {{t,b,1},
- {{call_ext_last,2,{extfunc,lists,seq,2},2},
- 11,
- {allocated,1}}}] = Errors,
+ [{{t,a,1},{{call_last,1,{f,8},2},9,{allocated,1}}},
+ {{t,b,1},
+ {{call_ext_last,2,{extfunc,lists,seq,2},2},
+ 10,
+ {allocated,1}}}] = Errors,
ok.
merge_undefined(Config) when is_list(Config) ->
Errors = do_val(merge_undefined, Config),
- ?line [{{t,handle_call,2},
- {{call_ext,2,{extfunc,debug,filter,2}},
- 22,
- {uninitialized_reg,{y,0}}}}] = Errors,
+ [{{t,handle_call,2},
+ {{call_ext,2,{extfunc,debug,filter,2}},
+ 22,
+ {uninitialized_reg,{y,0}}}}] = Errors,
ok.
uninit(Config) when is_list(Config) ->
@@ -178,10 +166,10 @@ uninit(Config) when is_list(Config) ->
[{{t,sum_1,2},
{{move,{y,0},{x,0}},5,{uninitialized_reg,{y,0}}}},
{{t,sum_2,2},
- {{call,1,{f,10}},6,{uninitialized_reg,{y,0}}}},
+ {{call,1,{f,8}},5,{uninitialized_reg,{y,0}}}},
{{t,sum_3,2},
{{bif,'+',{f,0},[{x,0},{y,0}],{x,0}},
- 7,
+ 6,
{unassigned,{y,0}}}}] = Errors,
ok.
@@ -190,7 +178,7 @@ unsafe_catch(Config) when is_list(Config) ->
?line
[{{t,small,2},
{{bs_put_integer,{f,0},{integer,16},1,
- {field_flags,[aligned,unsigned,big]},{y,0}},
+ {field_flags,[unsigned,big]},{y,0}},
20,
{unassigned,{y,0}}}}] = Errors,
ok.
@@ -199,10 +187,6 @@ dead_code(Config) when is_list(Config) ->
[] = do_val(dead_code, Config),
ok.
-mult_labels(Config) when is_list(Config) ->
- [] = do_val(erl_prim_loader, Config, ".beam"),
- ok.
-
overwrite_catchtag(Config) when is_list(Config) ->
Errors = do_val(overwrite_catchtag, Config),
?line
@@ -214,34 +198,34 @@ overwrite_trytag(Config) when is_list(Config) ->
Errors = do_val(overwrite_trytag, Config),
?line
[{{overwrite_trytag,foo,1},
- {{kill,{y,2}},9,{trytag,_}}}] = Errors,
+ {{kill,{y,2}},8,{trytag,_}}}] = Errors,
ok.
accessing_tags(Config) when is_list(Config) ->
Errors = do_val(accessing_tags, Config),
- ?line
- [{{accessing_tags,foo,1},
- {{move,{y,0},{x,0}},6,{catchtag,_}}},
- {{accessing_tags,bar,1},
- {{move,{y,0},{x,0}},6,{trytag,_}}}] = Errors,
+ [{{accessing_tags,bar,1},
+ {{move,{y,0},{x,0}},6,{trytag,_}}},
+ {{accessing_tags,foo,1},
+ {{move,{y,0},{x,0}},6,{catchtag,_}}}] = Errors,
ok.
bad_catch_try(Config) when is_list(Config) ->
Errors = do_val(bad_catch_try, Config),
- ?line [{{bad_catch_try,bad_1,1},
- {{'catch',{x,0},{f,3}},
- 5,{invalid_store,{x,0},{catchtag,[3]}}}},
- {{bad_catch_try,bad_2,1},
- {{catch_end,{x,9}},
- 8,{source_not_y_reg,{x,9}}}},
- {{bad_catch_try,bad_3,1},
- {{catch_end,{y,1}},9,{bad_type,{atom,kalle}}}},
- {{bad_catch_try,bad_4,1},
- {{'try',{x,0},{f,15}},5,{invalid_store,{x,0},{trytag,[15]}}}},
- {{bad_catch_try,bad_5,1},
- {{try_case,{y,1}},12,{bad_type,term}}},
- {{bad_catch_try,bad_6,1},
- {{try_end,{y,1}},8,{bad_type,{integer,1}}}}] = Errors,
+ [{{bad_catch_try,bad_1,1},
+ {{'catch',{x,0},{f,3}},
+ 5,{invalid_store,{x,0},{catchtag,[3]}}}},
+ {{bad_catch_try,bad_2,1},
+ {{catch_end,{x,9}},
+ 8,{source_not_y_reg,{x,9}}}},
+ {{bad_catch_try,bad_3,1},
+ {{catch_end,{y,1}},9,{bad_type,{atom,kalle}}}},
+ {{bad_catch_try,bad_4,1},
+ {{'try',{x,0},{f,15}},5,{invalid_store,{x,0},{trytag,[15]}}}},
+ {{bad_catch_try,bad_5,1},
+ {{try_case,{y,1}},12,{bad_type,term}}},
+ {{bad_catch_try,bad_6,1},
+ {{move,{integer,1},{y,1}},7,
+ {invalid_store,{y,1},{integer,1}}}}] = Errors,
ok.
cons_guard(Config) when is_list(Config) ->
@@ -310,66 +294,79 @@ freg_state(Config) when is_list(Config) ->
{fclearerror,5,{bad_floating_point_state,cleared}}}] = Errors,
ok.
-bin_match(Config) when is_list(Config) ->
- Errors = do_val(bin_match, Config),
- ?line
- [{{t,t,1},{{bs_save,0},4,no_bs_match_state}},
- {{t,x,1},{{bs_restore,1},16,{no_save_point,1}}}] = Errors,
- ok.
-
bad_bin_match(Config) when is_list(Config) ->
[{{t,t,1},{return,5,{match_context,{x,0}}}}] =
do_val(bad_bin_match, Config),
ok.
-bin_aligned(Config) when is_list(Config) ->
- Errors = do_val(bin_aligned, Config),
- ?line
- [{{t,decode,1},
- {{bs_put_integer,{f,0},
- {integer,5},
- 1,
- {field_flags,[unsigned,big,aligned]},
- {integer,0}},
- 10,
- {aligned_flag_set,{bits,3}}}}] = Errors,
- ok.
-
bad_dsetel(Config) when is_list(Config) ->
Errors = do_val(bad_dsetel, Config),
?line
[{{t,t,1},
{{set_tuple_element,{x,1},{x,0},1},
- 15,
+ 17,
illegal_context_for_set_tuple_element}}] = Errors,
ok.
state_after_fault_in_catch(Config) when is_list(Config) ->
Errors = do_val(state_after_fault_in_catch, Config),
- [{{t,foo,1},
- {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,if_end,1},
+ [{{state_after_fault_in_catch,badmatch,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
{{state_after_fault_in_catch,case_end,1},
{{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
- {{state_after_fault_in_catch,badmatch,1},
- {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}}] = Errors,
+ {{state_after_fault_in_catch,if_end,1},
+ {{move,{x,1},{x,0}},9,{uninitialized_reg,{x,1}}}},
+ {{t,foo,1},
+ {{move,{x,1},{x,0}},10,{uninitialized_reg,{x,1}}}}] = Errors,
ok.
no_exception_in_catch(Config) when is_list(Config) ->
Errors = do_val(no_exception_in_catch, Config),
[{{no_exception_in_catch,nested_of_1,4},
- {{move,{x,3},{x,0}},91,{uninitialized_reg,{x,3}}}}] = Errors,
+ {{move,{x,3},{x,0}},88,{uninitialized_reg,{x,3}}}}] = Errors,
ok.
undef_label(Config) when is_list(Config) ->
- Errors = do_val(undef_label, Config),
+ M = {undef_label,
+ [{t,1}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,undef_label},{atom,t},1},
+ {label,2},
+ {test,is_eq_exact,{f,42},[{x,0},{atom,x}]},
+ {move,{atom,ok},{x,0}},
+ return]},
+ {function,x,1,17,
+ [{label,3},
+ {func_info,{atom,undef_label},{atom,x},1},
+ {label,4},
+ return]}],
+ 5},
+ Errors = beam_val(M),
[{{undef_label,t,1},{undef_labels,[42]}},
{{undef_label,x,1},{return,4,no_entry_label}}] = Errors,
ok.
illegal_instruction(Config) when is_list(Config) ->
- Errors = do_val(illegal_instruction, Config),
+ M = {illegal_instruction,
+ [{t,1},{x,1},{y,0}],
+ [],
+ [{function,t,1,2,
+ [{label,1},
+ {func_info,{atom,illegal_instruction},{atom,t},1},
+ {label,2},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,x,1,4,
+ [{label,3},
+ bad_func_info,
+ {label,4},
+ {my_illegal_instruction,{x,0}},
+ return]},
+ {function,y,0,17,[]}],
+ 5},
+ Errors = beam_val(M),
[{{illegal_instruction,t,1},
{{my_illegal_instruction,{x,0}},4,unknown_instruction}},
{{'_',x,1},{bad_func_info,1,illegal_instruction}},
@@ -407,19 +404,40 @@ process_request_foo(_) ->
process_request_bar(Pid, [Response]) when is_pid(Pid) ->
Response.
+map_field_lists(Config) ->
+ Errors = do_val(map_field_lists, Config),
+ [{{map_field_lists,x,1},
+ {{test,has_map_fields,{f,1},{x,0},
+ {list,[{atom,a},{atom,a}]}},
+ 5,
+ keys_not_unique}},
+ {{map_field_lists,y,1},
+ {{test,has_map_fields,{f,3},{x,0},{list,[]}},
+ 5,
+ empty_field_list}}
+ ] = Errors.
%%%-------------------------------------------------------------------------
-do_val(Name, Config) ->
- do_val(Name, Config, ".S").
-
-do_val(Name, Config, Type) ->
- ?line Data = ?config(data_dir, Config),
- ?line File = filename:join(Data, atom_to_list(Name)++Type),
- ?line case beam_validator:file(File) of
- {error,Errors} ->
- ?line io:format("~p:~n~s",
- [File,beam_validator:format_error(Errors)]),
- Errors;
- ok -> []
- end.
+do_val(Mod, Config) ->
+ Data = ?config(data_dir, Config),
+ Base = atom_to_list(Mod),
+ File = filename:join(Data, Base),
+ case compile:file(File, [from_asm,no_postopt,return_errors]) of
+ {error,L,[]} ->
+ [{Base,Errors0}] = L,
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors;
+ {ok,Mod} ->
+ []
+ end.
+
+beam_val(M) ->
+ Name = atom_to_list(element(1, M)),
+ {error,[{Name,Errors0}]} = beam_validator:module(M, []),
+ Errors = [E || {beam_validator,E} <- Errors0],
+ _ = [io:put_chars(beam_validator:format_error(E)) ||
+ E <- Errors],
+ Errors.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_catch_try.S b/lib/compiler/test/beam_validator_SUITE_data/bad_catch_try.S
index 2a53f0dd93..6035f23506 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bad_catch_try.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bad_catch_try.S
@@ -63,11 +63,11 @@
{label,9}.
{func_info,{atom,bad_catch_try},{atom,bad_3},1}.
{label,10}.
- {allocate,1,1}.
+ {allocate,2,1}.
+ {move,{atom,kalle},{y,1}}.
{'catch',{y,0},{f,11}}.
{call,1,{f,26}}.
{label,11}.
- {move,{atom,kalle},{y,1}}.
{catch_end,{y,1}}.
{test,is_tuple,{f,12},[{x,0}]}.
{test,test_arity,{f,12},[{x,0},2]}.
@@ -106,7 +106,7 @@
{label,17}.
{func_info,{atom,bad_catch_try},{atom,bad_5},1}.
{label,18}.
- {allocate_zero,1,1}.
+ {allocate_zero,2,1}.
{'try',{y,0},{f,19}}.
{call,1,{f,26}}.
{try_end,{y,0}}.
@@ -131,7 +131,7 @@
{'try',{y,0},{f,23}}.
{call,1,{f,26}}.
{move,{integer,1},{y,1}}.
- {try_end,{y,1}}.
+ {try_end,{y,0}}.
{move,{atom,ok},{x,0}}.
{jump,{f,24}}.
{label,23}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
index 279b2fa97f..9630d73a93 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/bad_dsetel.S
@@ -1,4 +1,4 @@
-{module, t}. %% version = 0
+{module, bad_dsetel}. %% version = 0
{exports, [{module_info,0},{module_info,1},{t,1}]}.
@@ -21,7 +21,9 @@
{move,{integer,3},{x,0}}.
{call_ext,3,{extfunc,erlang,setelement,3}}.
{test_heap,6,1}.
- {put_string,3,{string,"abc"},{x,1}}.
+ {put_list,{integer,99},nil,{x,1}}.
+ {put_list,{integer,98},{x,1},{x,1}}.
+ {put_list,{integer,97},{x,1},{x,1}}.
{set_tuple_element,{x,1},{x,0},1}.
{'%live',1}.
{deallocate,0}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S b/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
deleted file mode 100644
index 2f353fbd25..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_aligned.S
+++ /dev/null
@@ -1,47 +0,0 @@
-{module, t}. %% version = 0
-
-{exports, [{decode,1},{module_info,0},{module_info,1}]}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, decode, 1, 2}.
- {label,1}.
- {func_info,{atom,t},{atom,decode},1}.
- {label,2}.
- {move,{integer,1},{x,1}}.
- {bif,size,{f,0},[{x,0}],{x,2}}.
- {bs_add,{f,0},[{x,1},{x,2},1],{x,1}}.
- {bs_init2,{f,0},{x,1},0,1,{field_flags,[]},{x,1}}.
- {bs_put_integer,{f,0},
- {integer,3},
- 1,
- {field_flags,[aligned,unsigned,big]},
- {integer,0}}.
- {bs_put_binary,{f,0},{atom,all},8,{field_flags,[unsigned,big]},{x,0}}.
- {bs_put_integer,{f,0},
- {integer,5},
- 1,
- {field_flags,[unsigned,big,aligned]},
- {integer,0}}.
- {move,{x,1},{x,0}}.
- return.
-
-
-{function, module_info, 0, 4}.
- {label,3}.
- {func_info,{atom,t},{atom,module_info},0}.
- {label,4}.
- {move,{atom,t},{x,0}}.
- {call_ext_only,1,{extfunc,erlang,get_module_info,1}}.
-
-
-{function, module_info, 1, 6}.
- {label,5}.
- {func_info,{atom,t},{atom,module_info},1}.
- {label,6}.
- {move,{x,0},{x,1}}.
- {move,{atom,t},{x,0}}.
- {call_ext_only,2,{extfunc,erlang,get_module_info,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S b/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
deleted file mode 100644
index 96df0f7933..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/bin_match.S
+++ /dev/null
@@ -1,64 +0,0 @@
-{module, bin_match}. %% version = 0
-
-{exports, [{t,1}]}.
-
-{attributes, []}.
-
-{labels, 8}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,t},{atom,t},1}.
- {label,2}.
-%% {test,bs_start_match,{f,1},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,3},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,3},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,3}.
- {bs_restore,0}.
- {test,bs_get_integer,
- {f,1},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,1},[0]}.
- {move,{x,1},{x,0}}.
- return.
-
-{function, x, 1, 5}.
- {label,4}.
- {func_info,{atom,t},{atom,x},1}.
- {label,5}.
- {test,bs_start_match,{f,4},[{x,0}]}.
- {bs_save,0}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_get_integer,
- {f,6},
- [{integer,8},1,{field_flags,[aligned,unsigned,big]},{x,2}]}.
- {test,bs_test_tail,{f,6},[0]}.
- {test_heap,3,3}.
- {put_tuple,2,{x,0}}.
- {put,{x,1}}.
- {put,{x,2}}.
- {'%live',1}.
- return.
- {label,6}.
- {bs_restore,1}.
- {test,bs_get_integer,
- {f,4},
- [{integer,32},1,{field_flags,[aligned,unsigned,big]},{x,1}]}.
- {test,bs_test_tail,{f,4},[0]}.
- {move,{x,1},{x,0}}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
new file mode 100644
index 0000000000..ba27bf5c47
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/compiler_bug.S
@@ -0,0 +1,38 @@
+{module, compiler_bug}. %% version = 0
+
+{exports, [{module_info,0},{module_info,1},{sum,2}]}.
+
+{attributes, []}.
+
+{labels, 7}.
+
+
+{function, sum, 2, 2}.
+ {label,1}.
+ {line,[{location,"compiler_bug.erl",4}]}.
+ {func_info,{atom,compiler_bug},{atom,sum},2}.
+ {label,2}.
+ {line,[{location,"compiler_bug.erl",5}]}.
+ {gc_bif,'+',{f,0},2,[{y,0},{y,1}],{x,0}}.
+ return.
+
+
+{function, module_info, 0, 4}.
+ {label,3}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},0}.
+ {label,4}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,1,{extfunc,erlang,get_module_info,1}}.
+
+
+{function, module_info, 1, 6}.
+ {label,5}.
+ {line,[]}.
+ {func_info,{atom,compiler_bug},{atom,module_info},1}.
+ {label,6}.
+ {move,{x,0},{x,1}}.
+ {move,{atom,compiler_bug},{x,0}}.
+ {line,[]}.
+ {call_ext_only,2,{extfunc,erlang,get_module_info,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
index f964f98fba..c114664ba0 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/dead_code.S
@@ -1,10 +1,10 @@
{module, dead_code}. %% version = 0
-{exports, [{execute,0},{module_info,0},{module_info,1}]}.
+{exports, [{execute,0}]}.
{attributes, []}.
-{labels, 10}.
+{labels, 6}.
{function, execute, 0, 2}.
@@ -12,7 +12,6 @@
{func_info,{atom,dead_code},{atom,execute},0}.
{label,2}.
{allocate,0,0}.
- {'%live',0}.
{call_ext,0,{extfunc,foo,fie,0}}.
{test,is_ne,{f,4},[{x,0},{integer,0}]}.
{test,is_ne,{f,4},[{x,0},{integer,1}]}.
@@ -22,27 +21,7 @@
{case_end,{x,0}}.
{label,4}.
{move,{atom,ok},{x,0}}.
- {'%live',1}.
{deallocate,0}.
return.
- {'%','Moved code'}.
{label,5}.
{case_end,{x,0}}.
-
-
-{function, module_info, 0, 7}.
- {label,6}.
- {func_info,{atom,dead_code},{atom,module_info},0}.
- {label,7}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
-
-
-{function, module_info, 1, 9}.
- {label,8}.
- {func_info,{atom,dead_code},{atom,module_info},1}.
- {label,9}.
- {move,nil,{x,0}}.
- {'%live',1}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam b/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
deleted file mode 100644
index dd58a88e42..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/erl_prim_loader.beam
+++ /dev/null
Binary files differ
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
index ee583a923e..b3ebff3ade 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_range.S
@@ -1,10 +1,10 @@
{module, freg_range}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
index ff4d7548ae..7466763482 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_state.S
@@ -1,6 +1,6 @@
{module, freg_state}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2},{sum_5,2}]}.
{attributes, []}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
index f8d805d9ec..71e833446a 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/freg_uninit.S
@@ -1,10 +1,10 @@
{module, freg_uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 7}.
{function, sum_1, 2, 2}.
@@ -14,7 +14,6 @@
{fconv,{x,0},{fr,0}}.
fclearerror.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
return.
@@ -26,7 +25,12 @@
{fconv,{x,1},{fr,1}}.
fclearerror.
{fcheckerror,{f,0}}.
- {call,2,{f,8}}.
+ {call,2,{f,6}}.
{bif,fadd,{f,0},[{fr,0},{fr,1}],{fr,0}}.
- {'%live',1}.
+ return.
+
+{function, foo, 2, 6}.
+ {label,5}.
+ {func_info,{atom,t},{atom,foo},2}.
+ {label,6}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S b/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
deleted file mode 100644
index d6e92abc71..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/illegal_instruction.S
+++ /dev/null
@@ -1,26 +0,0 @@
-{module, illegal_instruction}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,illegal_instruction},{atom,t},1}.
- {label,2}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-
-{function, x, 1, 4}.
- {label,3}.
- bad_func_info.
- {label,4}.
- {my_illegal_instruction,{x,0}}.
- return.
-
-{function, y, 0, 17}.
- \ No newline at end of file
diff --git a/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
new file mode 100644
index 0000000000..5e7ccc1e5d
--- /dev/null
+++ b/lib/compiler/test/beam_validator_SUITE_data/map_field_lists.S
@@ -0,0 +1,29 @@
+{module, map_field_lists}. %% version = 0
+
+{exports, [{x,1},{y,1}]}.
+
+{attributes, []}.
+
+{labels, 5}.
+
+
+{function, x, 1, 2}.
+ {label,1}.
+ {line,[{location,"map_field_lists.erl",4}]}.
+ {func_info,{atom,map_field_lists},{atom,x},1}.
+ {label,2}.
+ {test,is_map,{f,1},[{x,0}]}.
+ {test,has_map_fields,{f,1},{x,0},{list,[{atom,a},{atom,a}]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
+
+
+{function, y, 1, 4}.
+ {label,3}.
+ {line,[{location,"map_field_lists.erl",7}]}.
+ {func_info,{atom,map_field_lists},{atom,y},1}.
+ {label,4}.
+ {test,is_map,{f,3},[{x,0}]}.
+ {test,has_map_fields,{f,3},{x,0},{list,[]}}.
+ {move,{atom,ok},{x,0}}.
+ return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
index 3d76127824..481d55045d 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/merge_undefined.S
@@ -22,7 +22,8 @@
{label,4}.
{allocate_heap,1,6,2}.
{move,{x,1},{y,0}}.
- {put_string,2,{string,"~p"},{x,0}}.
+ {put_list,{integer,112},nil,{x,0}}.
+ {put_list,{integer,126},{x,0},{x,0}}.
{put_list,{y,0},nil,{x,1}}.
{'%live',2}.
{call_ext,2,{extfunc,io,format,2}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
index e08a718a39..1a5b417a5f 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/no_exception_in_catch.S
@@ -26,7 +26,7 @@
{call_ext,1,{extfunc,erlang,erase,1}}.
{move,{atom,nested},{x,0}}.
{call_ext,1,{extfunc,erlang,erase,1}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'try',{y,8},{f,13}}.
{'try',{y,7},{f,11}}.
{'try',{y,6},{f,9}}.
@@ -34,7 +34,7 @@
%% Because the following instructions can't possible throw an exception,
%% label 7 used to get no state. Now the try_end itself will save the state.
{move,{x,0},{y,4}}.
- {bif,self,nofail,[],{x,0}}.
+ {bif,self,{f,0},[],{x,0}}.
{'%live',1}.
{try_end,{y,5}}.
{test,is_eq_exact,{f,15},[{x,0},{y,4}]}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/stack.S b/lib/compiler/test/beam_validator_SUITE_data/stack.S
index 244c22a2f9..e4356a9d00 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/stack.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/stack.S
@@ -1,10 +1,10 @@
{module, stack}. %% version = 0
-{exports, [{a,2},{b,2},{c,2},{d,2},{e,2}]}.
+{exports, [{a,2},{b,2},{c,2},{d,2},{e,2},{bad_1,0},{bad_2,0},{foo,0}]}.
{attributes, []}.
-{labels, 21}.
+{labels, 17}.
{function, a, 2, 2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/state_after_fault_in_catch.S b/lib/compiler/test/beam_validator_SUITE_data/state_after_fault_in_catch.S
index 8e27347ed5..c3656d6218 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/state_after_fault_in_catch.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/state_after_fault_in_catch.S
@@ -14,7 +14,7 @@
{allocate,1,0}.
{'catch',{y,0},{f,3}}.
{move,{atom,apa},{x,0}}.
- {call_ext,1,{extfunc,erlang,fault,1}}.
+ {call_ext,1,{extfunc,erlang,error,1}}.
{label,3}.
{catch_end,{y,0}}.
{move,{x,1},{x,0}}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S b/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
deleted file mode 100644
index dd29066bf4..0000000000
--- a/lib/compiler/test/beam_validator_SUITE_data/undef_label.S
+++ /dev/null
@@ -1,22 +0,0 @@
-{module, undef_label}. %% version = 0
-
-{exports, []}.
-
-{attributes, []}.
-
-{labels, 7}.
-
-
-{function, t, 1, 2}.
- {label,1}.
- {func_info,{atom,undef_label},{atom,t},1}.
- {label,2}.
- {test,is_eq_exact,{f,42},[{x,0},{atom,x}]}.
- {move,{atom,ok},{x,0}}.
- return.
-
-{function, x, 1, 17}.
- {label,3}.
- {func_info,{atom,undef_label},{atom,x},1}.
- {label,4}.
- return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/uninit.S b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
index 1a45c31411..9a66f4f7d6 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/uninit.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/uninit.S
@@ -1,9 +1,11 @@
{module, uninit}. %% version = 0
-{exports, []}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2}]}.
{attributes, []}.
+{labels, 9}.
+
{function, sum_1, 2, 2}.
{label,1}.
{func_info,{atom,t},{atom,sum_1},2}.
@@ -11,7 +13,7 @@
{allocate,1,2}.
{move,{y,0},{x,0}}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -23,7 +25,7 @@
{label,4}.
{allocate,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
@@ -35,14 +37,14 @@
{label,6}.
{allocate_zero,1,2}.
{'%live',1}.
- {call,1,{f,10}}.
+ {call,1,{f,8}}.
{bif,'+',{f,0},[{x,0},{y,0}],{x,0}}.
{'%live',1}.
{deallocate,1}.
return.
-{function, id, 1, 10}.
- {label,9}.
+{function, id, 1, 8}.
+ {label,7}.
{func_info,{atom,t},{atom,id},1}.
- {label,10}.
+ {label,8}.
return.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/unsafe_catch.S b/lib/compiler/test/beam_validator_SUITE_data/unsafe_catch.S
index 500ac11377..f7d3f805b3 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/unsafe_catch.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/unsafe_catch.S
@@ -17,7 +17,7 @@
{bs_put_integer,{f,0},
{integer,8},
1,
- {field_flags,[aligned,unsigned,big]},
+ {field_flags,[unsigned,big]},
{x,0}}.
{move,{x,1},{y,0}}.
{move,{x,2},{x,0}}.
@@ -34,7 +34,7 @@
{bs_put_integer,{f,0},
{integer,16},
1,
- {field_flags,[aligned,unsigned,big]},
+ {field_flags,[unsigned,big]},
{y,0}}.
{move,{x,0},{y,0}}.
{move,{x,1},{x,0}}.
@@ -55,12 +55,12 @@
{bs_put_binary,{f,0},
{atom,all},
8,
- {field_flags,[aligned,unsigned,big]},
+ {field_flags,[unsigned,big]},
{y,0}}.
{bs_put_binary,{f,0},
{atom,all},
8,
- {field_flags,[aligned,unsigned,big]},
+ {field_flags,[unsigned,big]},
{x,0}}.
{move,{x,1},{x,0}}.
{deallocate,2}.
diff --git a/lib/compiler/test/beam_validator_SUITE_data/xrange.S b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
index 3abbdffbc2..c6f20288f7 100644
--- a/lib/compiler/test/beam_validator_SUITE_data/xrange.S
+++ b/lib/compiler/test/beam_validator_SUITE_data/xrange.S
@@ -1,10 +1,10 @@
{module, xrange}. %% version = 0
-{exports, [{module_info,0},{module_info,1},{prod,2},{sum,2},{sum_prod,3}]}.
+{exports, [{sum_1,2},{sum_2,2},{sum_3,2},{sum_4,2}]}.
{attributes, []}.
-{labels, 8}.
+{labels, 9}.
{function, sum_1, 2, 2}.
diff --git a/lib/compiler/test/bs_bit_binaries_SUITE.erl b/lib/compiler/test/bs_bit_binaries_SUITE.erl
index 8609a490f5..2433e7621e 100644
--- a/lib/compiler/test/bs_bit_binaries_SUITE.erl
+++ b/lib/compiler/test/bs_bit_binaries_SUITE.erl
@@ -37,7 +37,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,horrid_match,test_bitstr,test_bit_size,
asymmetric_tests,big_asymmetric_tests,
binary_to_and_from_list,big_binary_to_and_from_list,
diff --git a/lib/compiler/test/bs_construct_SUITE.erl b/lib/compiler/test/bs_construct_SUITE.erl
index ce39de2a82..9df874c387 100644
--- a/lib/compiler/test/bs_construct_SUITE.erl
+++ b/lib/compiler/test/bs_construct_SUITE.erl
@@ -39,7 +39,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[two,test1,fail,float_bin,in_guard,in_catch,
nasty_literals,side_effect,opt,otp_7556,float_arith,
otp_8054]}].
diff --git a/lib/compiler/test/bs_match_SUITE.erl b/lib/compiler/test/bs_match_SUITE.erl
index 149b9bbb8f..b54db06339 100644
--- a/lib/compiler/test/bs_match_SUITE.erl
+++ b/lib/compiler/test/bs_match_SUITE.erl
@@ -24,7 +24,7 @@
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
fun_shadow/1,int_float/1,otp_5269/1,null_fields/1,wiger/1,
- bin_tail/1,save_restore/1,shadowed_size_var/1,
+ bin_tail/1,save_restore/1,
partitioned_bs_match/1,function_clause/1,
unit/1,shared_sub_bins/1,bin_and_float/1,
dec_subidentifiers/1,skip_optional_tag/1,
@@ -34,7 +34,8 @@
otp_7188/1,otp_7233/1,otp_7240/1,otp_7498/1,
match_string/1,zero_width/1,bad_size/1,haystack/1,
cover_beam_bool/1,matched_out_size/1,follow_fail_branch/1,
- no_partition/1,calling_a_binary/1]).
+ no_partition/1,calling_a_binary/1,binary_in_map/1,
+ match_string_opt/1]).
-export([coverage_id/1,coverage_external_ignore/2]).
@@ -48,9 +49,9 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[fun_shadow,int_float,otp_5269,null_fields,wiger,
- bin_tail,save_restore,shadowed_size_var,
+ bin_tail,save_restore,
partitioned_bs_match,function_clause,unit,
shared_sub_bins,bin_and_float,dec_subidentifiers,
skip_optional_tag,wfbm,degenerated_match,bs_sum,
@@ -59,7 +60,8 @@ groups() ->
matching_and_andalso,otp_7188,otp_7233,otp_7240,
otp_7498,match_string,zero_width,bad_size,haystack,
cover_beam_bool,matched_out_size,follow_fail_branch,
- no_partition,calling_a_binary]}].
+ no_partition,calling_a_binary,binary_in_map,
+ match_string_opt]}].
init_per_suite(Config) ->
@@ -322,16 +324,6 @@ bad_float_unpack_match(<<F:64/float>>) -> F;
bad_float_unpack_match(<<I:64/integer-signed>>) -> I.
-shadowed_size_var(Config) when is_list(Config) ->
- ?line PrivDir = ?config(priv_dir, Config),
- ?line Dir = filename:dirname(code:which(?MODULE)),
- ?line Core = filename:join(Dir, "bs_shadowed_size_var"),
- ?line Opts = [from_core,{outdir,PrivDir}|test_lib:opt_opts(?MODULE)],
- ?line io:format("~p", [Opts]),
- ?line {ok,Mod} = c:c(Core, Opts),
- ?line [42|<<"abcde">>] = Mod:filter_essentials([<<42:32>>|<<5:32,"abcde">>]),
- ok.
-
partitioned_bs_match(Config) when is_list(Config) ->
?line <<1,2,3>> = partitioned_bs_match(blurf, <<42,1,2,3>>),
?line error = partitioned_bs_match(10, <<7,8,15,13>>),
@@ -368,11 +360,20 @@ partitioned_bs_match_3(Var, <<_>>) -> Var;
partitioned_bs_match_3(1, 2) -> ok.
function_clause(Config) when is_list(Config) ->
- ?line ok = function_clause_1(<<0,7,0,7,42>>),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,1,2,3>>)),
- ?line fc(function_clause_1, [<<0,1,2,3>>],
- catch function_clause_1(<<0,7,0,1,2,3>>)),
+ ok = function_clause_1(<<0,7,0,7,42>>),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,1,2,3>>)),
+ fc(function_clause_1, [<<0,1,2,3>>],
+ catch function_clause_1(<<0,7,0,1,2,3>>)),
+
+ ok = function_clause_2(<<0,7,0,7,42>>),
+ ok = function_clause_2(<<255>>),
+ ok = function_clause_2(<<13:4>>),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,1,2,3>>)),
+ fc(function_clause_2, [<<0,1,2,3>>],
+ catch function_clause_2(<<0,7,0,1,2,3>>)),
+
ok.
function_clause_1(<<0:8,7:8,T/binary>>) ->
@@ -380,6 +381,13 @@ function_clause_1(<<0:8,7:8,T/binary>>) ->
function_clause_1(<<_:8>>) ->
ok.
+function_clause_2(<<0:8,7:8,T/binary>>) ->
+ function_clause_2(T);
+function_clause_2(<<_:8>>) ->
+ ok;
+function_clause_2(<<_:4>>) ->
+ ok.
+
unit(Config) when is_list(Config) ->
?line 42 = peek1(<<42>>),
?line 43 = peek1(<<43,1,2>>),
@@ -1189,6 +1197,34 @@ call_binary(<<>>, Acc) ->
call_binary(<<H,T/bits>>, Acc) ->
T(<<Acc/binary,H>>).
+binary_in_map(Config) when is_list(Config) ->
+ ok = match_binary_in_map(#{key => <<42:8>>}),
+ {'EXIT',{{badmatch,#{key := 1}},_}} =
+ (catch match_binary_in_map(#{key => 1})),
+ {'EXIT',{{badmatch,#{key := <<1023:16>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1023:16>>})),
+ {'EXIT',{{badmatch,#{key := <<1:8>>}},_}} =
+ (catch match_binary_in_map(#{key => <<1:8>>})),
+ {'EXIT',{{badmatch,not_a_map},_}} =
+ (catch match_binary_in_map(not_a_map)),
+ ok.
+
+match_binary_in_map(Map) ->
+ case 8 of
+ N ->
+ #{key := <<42:N>>} = Map,
+ ok
+ end.
+
+match_string_opt(Config) when is_list(Config) ->
+ {x,<<1,2,3>>,{<<1>>,{v,<<1,2,3>>}}} =
+ do_match_string_opt({<<1>>,{v,<<1,2,3>>}}),
+ ok.
+
+do_match_string_opt({<<1>>,{v,V}}=T) ->
+ {x,V,T}.
+
+
check(F, R) ->
R = F().
diff --git a/lib/compiler/test/bs_shadowed_size_var.core b/lib/compiler/test/bs_shadowed_size_var.core
deleted file mode 100644
index d1d5ebba6d..0000000000
--- a/lib/compiler/test/bs_shadowed_size_var.core
+++ /dev/null
@@ -1,25 +0,0 @@
-module 'bs_shadowed_size_var' ['filter_essentials'/1]
- attributes []
-
-%% Reduced code from beam_asm inlined using the old inliner.
-
-'filter_essentials'/1 =
- fun (_cor0) ->
- case _cor0 of
- <[#{#<Sz>(32,1,'integer',['unsigned','big']) }#|T]> when 'true' ->
- let <_cor4> =
- case T of
- %% Variable 'Sz' repeated here. Should work.
- <#{#<Sz>(32,1,'integer',['unsigned','big']),
- #<Data>(Sz,8,'binary',['unsigned','big'])}#> when 'true' ->
- Data
- <_cor5> when 'true' ->
- primop 'match_fail'
- ({'case_clause',{_cor5}})
- end
- in [Sz|_cor4]
- <_cor5> when 'true' ->
- primop 'match_fail'
- ({'function_clause',_cor5})
- end
-end
diff --git a/lib/compiler/test/compilation_SUITE.erl b/lib/compiler/test/compilation_SUITE.erl
index 8711f35e8e..f570d94f7d 100644
--- a/lib/compiler/test/compilation_SUITE.erl
+++ b/lib/compiler/test/compilation_SUITE.erl
@@ -309,8 +309,8 @@ load_and_call(Out, Module) ->
%% Smoke-test of beam disassembler.
?line test_lib:smoke_disasm(Module),
- ?line true = erlang:delete_module(Module),
- ?line true = erlang:purge_module(Module),
+ _ = code:delete(Module),
+ _ = code:purge(Module),
%% Restore state of trap_exit just in case. (Since the compiler
%% uses a temporary process, we will get {'EXIT',Pid,normal} messages
@@ -428,41 +428,35 @@ self_compile_old_inliner(Config) when is_list(Config) ->
self_compile_1(Config, "old", [verbose,{inline,500}]).
self_compile_1(Config, Prefix, Opts) ->
- ?line Dog = test_server:timetrap(test_server:minutes(40)),
+ Dog = test_server:timetrap(test_server:minutes(40)),
- ?line Priv = ?config(priv_dir,Config),
- ?line Version = compiler_version(),
+ Priv = ?config(priv_dir,Config),
+ Version = compiler_version(),
%% Compile the compiler. (In this node to get better coverage.)
- ?line CompA = make_compiler_dir(Priv, Prefix++"compiler_a"),
- ?line VsnA = Version ++ ".0",
+ CompA = make_compiler_dir(Priv, Prefix++"compiler_a"),
+ VsnA = Version ++ ".0",
compile_compiler(compiler_src(), CompA, VsnA, [clint0,clint|Opts]),
%% Compile the compiler again using the newly compiled compiler.
%% (In another node because reloading the compiler would disturb cover.)
CompilerB = Prefix++"compiler_b",
CompB = make_compiler_dir(Priv, CompilerB),
- ?line VsnB = VsnA ++ ".0",
+ VsnB = VsnA ++ ".0",
self_compile_node(CompA, CompB, VsnB, Opts),
- %% Compare compiler directories.
- ?line compare_compilers(CompA, CompB),
+ %% Compare compiler directories. The compiler directories should
+ %% be equal (except for beam_asm that contains the compiler version).
+ compare_compilers(CompA, CompB),
- %% Compile and compare compiler C.
- ?line CompilerC = Prefix++"compiler_c",
- ?line CompC = make_compiler_dir(Priv, CompilerC),
- ?line VsnC = VsnB ++ ".0",
- self_compile_node(CompB, CompC, VsnC, Opts),
- ?line compare_compilers(CompB, CompC),
-
- ?line test_server:timetrap_cancel(Dog),
+ test_server:timetrap_cancel(Dog),
ok.
self_compile_node(CompilerDir, OutDir, Version, Opts) ->
- ?line Dog = test_server:timetrap(test_server:minutes(15)),
- ?line Pa = "-pa " ++ filename:dirname(code:which(?MODULE)) ++
+ Dog = test_server:timetrap(test_server:minutes(15)),
+ Pa = "-pa " ++ filename:dirname(code:which(?MODULE)) ++
" -pa " ++ CompilerDir,
- ?line Files = compiler_src(),
+ Files = compiler_src(),
%% We don't want the cover server started on the other node,
%% because it will load the same cover-compiled code as on this
@@ -472,7 +466,7 @@ self_compile_node(CompilerDir, OutDir, Version, Opts) ->
fun() ->
compile_compiler(Files, OutDir, Version, Opts)
end, Pa),
- ?line test_server:timetrap_cancel(Dog),
+ test_server:timetrap_cancel(Dog),
ok.
compile_compiler(Files, OutDir, Version, InlineOpts) ->
@@ -499,27 +493,22 @@ compiler_modules(Dir) ->
[list_to_atom(filename:rootname(filename:basename(F))) || F <- Files].
make_compiler_dir(Priv, Dir0) ->
- ?line Dir = filename:join(Priv, Dir0),
- ?line ok = file:make_dir(Dir),
+ Dir = filename:join(Priv, Dir0),
+ ok = file:make_dir(Dir),
Dir.
-make_current(Dir) ->
- true = code:add_patha(Dir),
- lists:foreach(fun(File) ->
- c:l(File)
- end, compiler_modules(Dir)),
- io:format("~p\n", [code:which(compile)]).
-
compiler_version() ->
- {value,{version,Version}} = lists:keysearch(version, 1,
- compile:module_info(compile)),
+ {version,Version} = lists:keyfind(version, 1,
+ compile:module_info(compile)),
Version.
compare_compilers(ADir, BDir) ->
{[],[],D} = beam_lib:cmp_dirs(ADir, BDir),
- [] = [T || {A,_}=T <- D,
- filename:basename(A) =/= "beam_asm.beam"]. %Contains compiler version.
+ %% beam_asm.beam contains compiler version and therefore it *must*
+ %% compare unequal.
+ ["beam_asm.beam"] = [filename:basename(A) || {A,_} <- D],
+ ok.
%%%
%%% The only test of the following code is that it compiles.
@@ -611,12 +600,10 @@ otp_7345(Config) when is_list(Config) ->
otp_7345(ObjRef, _RdEnv, Args) ->
Cid = ObjRef#contextId.cid,
- _DpRef =
- #dpRef{cid = Cid,
+ _ = #dpRef{cid = Cid,
ms_device_context_id = cid_id,
tlli = #ptmsi{value = 0}},
- _QosProfile =
- #qosProfileBssgp{peak_bit_rate_msb = 0,
+ _ = #qosProfileBssgp{peak_bit_rate_msb = 0,
peak_bit_rate_lsb = 80,
t_a_precedence = 49},
[Cpdu|_] = Args,
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 128291dc67..6b0369bf98 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,7 +30,7 @@
other_output/1, encrypted_abstr/1,
bad_record_use1/1, bad_record_use2/1, strict_record/1,
missing_testheap/1, cover/1, env/1, core/1, asm/1,
- sys_pre_attributes/1]).
+ sys_pre_attributes/1, dialyzer/1]).
-export([init/3]).
@@ -47,7 +47,7 @@ all() ->
other_output, encrypted_abstr,
{group, bad_record_use}, strict_record,
missing_testheap, cover, env, core, asm,
- sys_pre_attributes].
+ sys_pre_attributes, dialyzer].
groups() ->
[{bad_record_use, [],
@@ -102,6 +102,8 @@ file_1(Config) when is_list(Config) ->
?line compile_and_verify(Simple, Target, [debug_info]),
?line {ok,simple} = compile:file(Simple, [no_line_info]), %Coverage
+ {ok,simple} = compile:file(Simple, [{eprof,beam_z}]), %Coverage
+
?line ok = file:set_cwd(Cwd),
?line true = exists(Target),
?line passed = run(Target, test, []),
@@ -124,7 +126,8 @@ file_1(Config) when is_list(Config) ->
forms_2(Config) when is_list(Config) ->
Src = "/foo/bar",
AbsSrc = filename:absname(Src),
- {ok,simple,Binary} = compile:forms([{attribute,1,module,simple}],
+ Anno = erl_anno:new(1),
+ {ok,simple,Binary} = compile:forms([{attribute,Anno,module,simple}],
[binary,{source,Src}]),
code:load_binary(simple, Src, Binary),
Info = simple:module_info(compile),
@@ -748,42 +751,65 @@ env_1(Simple, Target) ->
%% compile the generated Core Erlang files.
core(Config) when is_list(Config) ->
- ?line Dog = test_server:timetrap(test_server:minutes(5)),
- ?line PrivDir = ?config(priv_dir, Config),
- ?line Outdir = filename:join(PrivDir, "core"),
- ?line ok = file:make_dir(Outdir),
+ PrivDir = ?config(priv_dir, Config),
+ Outdir = filename:join(PrivDir, "core"),
+ ok = file:make_dir(Outdir),
- ?line Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
- ?line TestBeams = filelib:wildcard(Wc),
- ?line Abstr = [begin {ok,{Mod,[{abstract_code,
+ Wc = filename:join(filename:dirname(code:which(?MODULE)), "*.beam"),
+ TestBeams = filelib:wildcard(Wc),
+ Abstr = [begin {ok,{Mod,[{abstract_code,
{raw_abstract_v1,Abstr}}]}} =
beam_lib:chunks(Beam, [abstract_code]),
{Mod,Abstr} end || Beam <- TestBeams],
- ?line Res = test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr),
- ?line test_server:timetrap_cancel(Dog),
- Res.
-
+ test_lib:p_run(fun(F) -> do_core(F, Outdir) end, Abstr).
do_core({M,A}, Outdir) ->
try
- {ok,M,Core} = compile:forms(A, [to_core,report]),
- CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
- CorePP = core_pp:format(Core),
- ok = file:write_file(CoreFile, CorePP),
- case compile:file(CoreFile, [clint,from_core,binary]) of
- {ok,M,_} ->
- ok = file:delete(CoreFile);
- Other ->
- io:format("*** core_lint failure '~p' for ~s\n",
- [Other,CoreFile]),
- error
- end
- catch Class:Error ->
+ do_core_1(M, A, Outdir)
+ catch
+ throw:{error,Error} ->
+ io:format("*** compilation failure '~p' for module ~s\n",
+ [Error,M]),
+ error;
+ Class:Error ->
io:format("~p: ~p ~p\n~p\n",
[M,Class,Error,erlang:get_stacktrace()]),
error
end.
+do_core_1(M, A, Outdir) ->
+ {ok,M,Core0} = compile:forms(A, [to_core]),
+ CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
+ CorePP = core_pp:format(Core0),
+ ok = file:write_file(CoreFile, CorePP),
+
+ %% Parse the .core file and return the result as Core Erlang Terms.
+ Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
+ {ok,M,Core1} -> Core1;
+ Other -> throw({error,Other})
+ end,
+ ok = file:delete(CoreFile),
+
+ %% Compile as usual (including optimizations).
+ compile_forms(Core, [clint,from_core,binary]),
+
+ %% Don't optimize to test that we are not dependent
+ %% on the Core Erlang optmimization passes.
+ %% (Example of a previous bug: The core_parse pass
+ %% would not turn map literals into #c_literal{}
+ %% records; if sys_core_fold was run it would fix
+ %% that; if sys_core_fold was not run v3_kernel would
+ %% crash.)
+ compile_forms(Core, [clint,from_core,no_copt,binary]),
+
+ ok.
+
+compile_forms(Forms, Opts) ->
+ case compile:forms(Forms, [report_errors|Opts]) of
+ {ok,[],_} -> ok;
+ Other -> throw({error,Other})
+ end.
+
%% Compile to Beam assembly language (.S) and then try to
%% run .S through the compiler again.
@@ -854,6 +880,20 @@ sys_pre_attributes(Config) ->
[report,verbose]),
ok.
+%% Test the dialyzer option to cover more code.
+dialyzer(Config) ->
+ Priv = ?config(priv_dir, Config),
+ file:set_cwd(?config(data_dir, Config)),
+ Opts = [{outdir,Priv},report_errors],
+ M = dialyzer_test,
+ {ok,M} = c:c(M, [dialyzer|Opts]),
+ [{a,b,c}] = M:M(),
+
+ %% Cover huge line numbers without the 'dialyzer' option.
+ {ok,M} = c:c(M, Opts),
+ [{a,b,c}] = M:M(),
+ ok.
+
%%%
%%% Utilities.
%%%
diff --git a/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
new file mode 100644
index 0000000000..ed65ff9c43
--- /dev/null
+++ b/lib/compiler/test/compile_SUITE_data/dialyzer_test.erl
@@ -0,0 +1,39 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(dialyzer_test).
+-export([?MODULE/0,turtle/0,test/1,huge/1]).
+
+-record(turtle, {a,b,c}).
+-record(tortoise, {a,b,c}).
+
+?MODULE() ->
+ [{a,b,c}].
+
+turtle() ->
+ #turtle{a=1,b=2,c=3}.
+
+test(T) ->
+ {T#tortoise.a,T#tortoise.b}.
+
+-file("dialyzer_test", 100000000).
+
+huge(X) ->
+ #turtle{a=42,b=100,c=511},
+ X#tortoise.a.
diff --git a/lib/compiler/test/core_SUITE.erl b/lib/compiler/test/core_SUITE.erl
index 428ad65364..c4a7efbfc4 100644
--- a/lib/compiler/test/core_SUITE.erl
+++ b/lib/compiler/test/core_SUITE.erl
@@ -24,7 +24,9 @@
dehydrated_itracer/1,nested_tries/1,
seq_in_guard/1,make_effect_seq/1,eval_is_boolean/1,
unsafe_case/1,nomatch_shadow/1,reversed_annos/1,
- map_core_test/1,eval_case/1,bad_boolean_guard/1]).
+ map_core_test/1,eval_case/1,bad_boolean_guard/1,
+ bs_shadowed_size_var/1
+ ]).
-include_lib("test_server/include/test_server.hrl").
@@ -50,7 +52,8 @@ groups() ->
[{p,test_lib:parallel(),
[dehydrated_itracer,nested_tries,seq_in_guard,make_effect_seq,
eval_is_boolean,unsafe_case,nomatch_shadow,reversed_annos,
- map_core_test,eval_case,bad_boolean_guard
+ map_core_test,eval_case,bad_boolean_guard,
+ bs_shadowed_size_var
]}].
@@ -78,6 +81,8 @@ end_per_group(_GroupName, Config) ->
?comp(map_core_test).
?comp(eval_case).
?comp(bad_boolean_guard).
+?comp(bs_shadowed_size_var).
+
try_it(Mod, Conf) ->
Src = filename:join(?config(data_dir, Conf), atom_to_list(Mod)),
@@ -87,4 +92,7 @@ try_it(Mod, Conf) ->
compile_and_load(Src, Opts) ->
{ok,Mod,Bin} = compile:file(Src, [from_core,report,time,binary|Opts]),
{module,Mod} = code:load_binary(Mod, Mod, Bin),
- ok = Mod:Mod().
+ ok = Mod:Mod(),
+ _ = code:delete(Mod),
+ _ = code:purge(Mod),
+ ok.
diff --git a/lib/compiler/test/core_SUITE_data/bs_shadowed_size_var.core b/lib/compiler/test/core_SUITE_data/bs_shadowed_size_var.core
new file mode 100644
index 0000000000..0ade037e05
--- /dev/null
+++ b/lib/compiler/test/core_SUITE_data/bs_shadowed_size_var.core
@@ -0,0 +1,66 @@
+module 'bs_shadowed_size_var'
+ ['filter_essentials'/1,
+ 'bs_shadowed_size_var'/0]
+ attributes []
+
+%% bs_shadowed_size_var() ->
+%% [42|<<"abcde">>] = Mod:filter_essentials([<<42:32>>|<<5:32,"abcde">>]),
+%% ok.
+
+'bs_shadowed_size_var'/0 =
+ fun () ->
+ case <> of
+ <> when 'true' ->
+ case apply 'filter_essentials'/1
+ ([#{#<0>(8,1,'integer',['unsigned'|['big']]),
+ #<0>(8,1,'integer',['unsigned'|['big']]),
+ #<0>(8,1,'integer',['unsigned'|['big']]),
+ #<42>(8,1,'integer',['unsigned'|['big']])}#|#{#<0>(8,1,'integer',['unsigned'|['big']]),
+ #<0>(8,1,'integer',['unsigned'|['big']]),
+ #<0>(8,1,'integer',['unsigned'|['big']]),
+ #<5>(8,1,'integer',['unsigned'|['big']]),
+ #<97>(8,1,'integer',['unsigned'|['big']]),
+ #<98>(8,1,'integer',['unsigned'|['big']]),
+ #<99>(8,1,'integer',['unsigned'|['big']]),
+ #<100>(8,1,'integer',['unsigned'|['big']]),
+ #<101>(8,1,'integer',['unsigned'|['big']])}#]) of
+ <[42|#{#<97>(8,1,'integer',['unsigned'|['big']]),
+ #<98>(8,1,'integer',['unsigned'|['big']]),
+ #<99>(8,1,'integer',['unsigned'|['big']]),
+ #<100>(8,1,'integer',['unsigned'|['big']]),
+ #<101>(8,1,'integer',['unsigned'|['big']])}#]> when 'true' ->
+ 'ok'
+ ( <_cor0> when 'true' ->
+ primop 'match_fail'
+ ({'badmatch',_cor0})
+ -| ['compiler_generated'] )
+ end
+ ( <> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause'})
+ -| [{'function_name',{'bs_shadowed_size_var',0}}] )
+ -| ['compiler_generated'] )
+ end
+
+%% Reduced code from beam_asm inlined using the old inliner.
+
+'filter_essentials'/1 =
+ fun (_cor0) ->
+ case _cor0 of
+ <[#{#<Sz>(32,1,'integer',['unsigned','big']) }#|T]> when 'true' ->
+ let <_cor4> =
+ case T of
+ %% Variable 'Sz' repeated here. Should work.
+ <#{#<Sz>(32,1,'integer',['unsigned','big']),
+ #<Data>(Sz,8,'binary',['unsigned','big'])}#> when 'true' ->
+ Data
+ <_cor5> when 'true' ->
+ primop 'match_fail'
+ ({'case_clause',{_cor5}})
+ end
+ in [Sz|_cor4]
+ <_cor5> when 'true' ->
+ primop 'match_fail'
+ ({'function_clause',_cor5})
+ end
+end
diff --git a/lib/compiler/test/core_SUITE_data/map_core_test.core b/lib/compiler/test/core_SUITE_data/map_core_test.core
index 2aa853d450..a75f6cf24f 100644
--- a/lib/compiler/test/core_SUITE_data/map_core_test.core
+++ b/lib/compiler/test/core_SUITE_data/map_core_test.core
@@ -7,11 +7,11 @@ module 'map_core_test' ['map_core_test'/0,
fun () ->
let <_cor0> =
%% Line 15
- ~{::<'check','ok'>,::<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ ~{'check'=>'ok',1337=>#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
- #<111>(8,1,'integer',['unsigned'|['big']])}#>,::<'val',0>}~
+ #<111>(8,1,'integer',['unsigned'|['big']])}#,'val'=>0}~
in let <M> =
%% Line 15
apply 'id'/1
@@ -23,7 +23,7 @@ module 'map_core_test' ['map_core_test'/0,
in %% Line 16
case apply 'call'/2
(M, _cor2) of
- <~{~<1337,#{#<104>(8,1,'integer',['unsigned'|['big']]),
+ <~{1337:=#{#<104>(8,1,'integer',['unsigned'|['big']]),
#<101>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
#<108>(8,1,'integer',['unsigned'|['big']]),
@@ -39,7 +39,7 @@ module 'map_core_test' ['map_core_test'/0,
#<32>(8,1,'integer',['unsigned'|['big']]),
#<53>(8,1,'integer',['unsigned'|['big']]),
#<32>(8,1,'integer',['unsigned'|['big']]),
- #<54>(8,1,'integer',['unsigned'|['big']])}#>,~<'check','ok'>,~<'val',21>}~> when 'true' ->
+ #<54>(8,1,'integer',['unsigned'|['big']])}#,'check':='ok','val':=21}~> when 'true' ->
%% Line 17
'ok'
( <_cor3> when 'true' ->
@@ -51,7 +51,7 @@ module 'map_core_test' ['map_core_test'/0,
%% Line 20
fun (_cor1,_cor0) ->
case <_cor1,_cor0> of
- <M = ~{~<1337,Bin>,~<'check',_cor8>,~<'val',Val>}~,[V|Vs]> when 'true' ->
+ <M = ~{1337:=Bin,'check':=_cor8,'val':=Val}~,[V|Vs]> when 'true' ->
let <_cor3> =
%% Line 21
call 'erlang':'+'
@@ -67,7 +67,7 @@ module 'map_core_test' ['map_core_test'/0,
(Val, V)
in let <_cor5> =
%% Line 21
- ~{~<1337,_cor4>,~<'val',_cor2>|M}~
+ ~{1337:=_cor4,'val':=_cor2|M}~
in %% Line 21
apply 'call'/2
(_cor5, Vs)
diff --git a/lib/compiler/test/core_fold_SUITE.erl b/lib/compiler/test/core_fold_SUITE.erl
index 6a7036d728..bff9806bdd 100644
--- a/lib/compiler/test/core_fold_SUITE.erl
+++ b/lib/compiler/test/core_fold_SUITE.erl
@@ -23,7 +23,8 @@
t_element/1,setelement/1,t_length/1,append/1,t_apply/1,bifs/1,
eq/1,nested_call_in_case/1,guard_try_catch/1,coverage/1,
unused_multiple_values_error/1,unused_multiple_values/1,
- multiple_aliases/1,redundant_boolean_clauses/1,mixed_matching_clauses/1]).
+ multiple_aliases/1,redundant_boolean_clauses/1,
+ mixed_matching_clauses/1,unnecessary_building/1]).
-export([foo/0,foo/1,foo/2,foo/3]).
@@ -36,11 +37,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[t_element,setelement,t_length,append,t_apply,bifs,
eq,nested_call_in_case,guard_try_catch,coverage,
unused_multiple_values_error,unused_multiple_values,
- multiple_aliases,redundant_boolean_clauses,mixed_matching_clauses]}].
+ multiple_aliases,redundant_boolean_clauses,
+ mixed_matching_clauses,unnecessary_building]}].
init_per_suite(Config) ->
@@ -60,6 +62,12 @@ t_element(Config) when is_list(Config) ->
X = make_ref(),
?line X = id(element(1, {X,y,z})),
?line b = id(element(2, {a,b,c,d})),
+ (fun() ->
+ case {a,#{k=>X}} of
+ {a,#{k:=X}}=Tuple ->
+ #{k:=X} = id(element(2, Tuple))
+ end
+ end)(),
%% No optimization, but should work.
Tuple = id({x,y,z}),
@@ -80,6 +88,7 @@ t_element(Config) when is_list(Config) ->
{_,_,_}=Tup ->
?line {'EXIT',{badarg,_}} = (catch element(4, Tup))
end,
+ {'EXIT',{badarg,_}} = (catch element(1, tuple_size(Tuple))),
ok.
@@ -98,6 +107,7 @@ setelement(Config) when is_list(Config) ->
?line error = setelement_crash_2({a,b,c,d,e,f}, <<42>>),
{'EXIT',{badarg,_}} = (catch setelement(1, not_a_tuple, New)),
+ {'EXIT',{badarg,_}} = (catch setelement(3, {a,b}, New)),
ok.
@@ -189,7 +199,10 @@ foo(A, B, C) ->
A + B + C.
bifs(Config) when is_list(Config) ->
- ?line <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ <<1,2,3,4>> = id(list_to_binary([1,2,3,4])),
+ K = {a,key},
+ V = {a,value},
+ {ok,#{K:=V}} = id(list_to_tuple([ok,#{K=>V}])),
ok.
-define(CMP_SAME(A0, B), (fun(A) -> true = A == B, false = A /= B end)(id(A0))).
@@ -204,19 +217,31 @@ eq(Config) when is_list(Config) ->
?line ?CMP_DIFF(a, [a]),
?line ?CMP_DIFF(a, {1,2,3}),
+ ?CMP_SAME(#{a=>1.0,b=>2}, #{b=>2.0,a=>1}),
+ ?CMP_SAME(#{a=>[1.0],b=>[2]}, #{b=>[2.0],a=>[1]}),
+
+ %% The rule for comparing keys are different in 17.x and 18.x.
+ %% Just test that the results are consistent.
+ Bool = id(#{1=>a}) == id(#{1.0=>a}), %Unoptimizable.
+ Bool = id(#{1=>a}) == #{1.0=>a}, %Optimizable.
+ Bool = #{1=>a} == #{1.0=>a}, %Optimizable.
+ io:format("Bool = ~p\n", [Bool]),
+
ok.
%% OTP-7117.
nested_call_in_case(Config) when is_list(Config) ->
- ?line PrivDir = ?config(priv_dir, Config),
- ?line Dir = filename:dirname(code:which(?MODULE)),
- ?line Core = filename:join(Dir, "nested_call_in_case"),
- ?line Opts = [from_core,{outdir,PrivDir}|test_lib:opt_opts(?MODULE)],
- ?line io:format("~p", [Opts]),
- ?line {ok,Mod} = c:c(Core, Opts),
- ?line yes = Mod:a([1,2,3], 2),
- ?line no = Mod:a([1,2,3], 4),
- ?line {'EXIT',_} = (catch Mod:a(not_a_list, 42)),
+ PrivDir = ?config(priv_dir, Config),
+ Dir = test_lib:get_data_dir(Config),
+ Core = filename:join(Dir, "nested_call_in_case"),
+ Opts = [from_core,{outdir,PrivDir}|test_lib:opt_opts(?MODULE)],
+ io:format("~p", [Opts]),
+ {ok,Mod} = c:c(Core, Opts),
+ yes = Mod:a([1,2,3], 2),
+ no = Mod:a([1,2,3], 4),
+ {'EXIT',_} = (catch Mod:a(not_a_list, 42)),
+ _ = code:delete(Mod),
+ _ = code:purge(Mod),
ok.
guard_try_catch(_Config) ->
@@ -236,6 +261,8 @@ do_guard_try_catch(K, V) ->
false
end.
+-record(cover_opt_guard_try, {list=[]}).
+
coverage(Config) when is_list(Config) ->
?line {'EXIT',{{case_clause,{a,b,c}},_}} =
(catch cover_will_match_list_type({a,b,c})),
@@ -245,6 +272,9 @@ coverage(Config) when is_list(Config) ->
?line error = cover_will_match_lit_list(),
{ok,[a]} = cover_is_safe_bool_expr(a),
+ ok = cover_opt_guard_try(#cover_opt_guard_try{list=[a]}),
+ error = cover_opt_guard_try(#cover_opt_guard_try{list=[]}),
+
%% Make sure that we don't attempt to make literals
%% out of pids. (Putting a pid into a #c_literal{}
%% would crash later compiler passes.)
@@ -257,6 +287,12 @@ coverage(Config) when is_list(Config) ->
error = bsm_an_inlined(<<1,2,3>>, Config),
error = bsm_an_inlined([], Config),
+ %% Cover eval_rel_op/4.
+ Tuple = id({a,b}),
+ false = case Tuple of
+ {_,_} ->
+ Tuple =:= true
+ end,
ok.
cover_will_match_list_type(A) ->
@@ -298,12 +334,20 @@ cover_is_safe_bool_expr(X) ->
false
end.
+cover_opt_guard_try(Msg) ->
+ if
+ length(Msg#cover_opt_guard_try.list) =/= 1 ->
+ error;
+ true ->
+ ok
+ end.
+
bsm_an_inlined(<<_:8>>, _) -> ok;
bsm_an_inlined(_, _) -> error.
unused_multiple_values_error(Config) when is_list(Config) ->
PrivDir = ?config(priv_dir, Config),
- Dir = filename:dirname(code:which(?MODULE)),
+ Dir = test_lib:get_data_dir(Config),
Core = filename:join(Dir, "unused_multiple_values_error"),
Opts = [no_copt,clint,return,from_core,{outdir,PrivDir}
|test_lib:opt_opts(?MODULE)],
@@ -384,4 +428,29 @@ mixed_matching_clauses(Config) when is_list(Config) ->
end,
ok.
+unnecessary_building(Config) when is_list(Config) ->
+ Term1 = do_unnecessary_building_1(test_lib:id(a)),
+ [{a,a},{a,a}] = Term1,
+ 7 = erts_debug:size(Term1),
+
+ %% The Input term should not be rebuilt (thus, it should
+ %% only be counted once in the size of the combined term).
+ Input = test_lib:id({a,b,c}),
+ Term2 = test_lib:id(do_unnecessary_building_2(Input)),
+ {b,[{a,b,c},none],x} = Term2,
+ 4+4+4+2 = erts_debug:size([Term2|Input]),
+
+ ok.
+
+do_unnecessary_building_1(S) ->
+ %% The tuple must only be built once.
+ F0 = F1 = {S,S},
+ [F0,F1].
+
+do_unnecessary_building_2({a,_,_}=T) ->
+ %% The T term should not be rebuilt.
+ {b,
+ [_,_] = [T,none],
+ x}.
+
id(I) -> I.
diff --git a/lib/compiler/test/nested_call_in_case.core b/lib/compiler/test/core_fold_SUITE_data/nested_call_in_case.core
index 5c6b6909bd..c46906b2ed 100644
--- a/lib/compiler/test/nested_call_in_case.core
+++ b/lib/compiler/test/core_fold_SUITE_data/nested_call_in_case.core
@@ -16,6 +16,3 @@ module 'nested_call_in_case' ['a'/2]
-| ['compiler_generated'] )
end
end
-
-
-
diff --git a/lib/compiler/test/unused_multiple_values_error.core b/lib/compiler/test/core_fold_SUITE_data/unused_multiple_values_error.core
index e06587c936..e06587c936 100644
--- a/lib/compiler/test/unused_multiple_values_error.core
+++ b/lib/compiler/test/core_fold_SUITE_data/unused_multiple_values_error.core
diff --git a/lib/compiler/test/error_SUITE.erl b/lib/compiler/test/error_SUITE.erl
index 0d23f12fb5..acd785cc5a 100644
--- a/lib/compiler/test/error_SUITE.erl
+++ b/lib/compiler/test/error_SUITE.erl
@@ -235,10 +235,18 @@ transforms(Config) ->
">>,
{error,[{none,compile,{parse_transform,?MODULE,{too_bad,_}}}],[]} =
run_test(Ts2, test_filename(Config), [], dont_write_beam),
+ Ts3 = <<"
+ -compile({parse_transform,",?MODULE_STRING,"}).
+ ">>,
+ {error,[{none,compile,{parse_transform,?MODULE,{undef,_}}}],[]} =
+ run_test(Ts3, test_filename(Config), [call_undef], dont_write_beam),
ok.
-parse_transform(_, _) ->
- error(too_bad).
+parse_transform(_, Opts) ->
+ case lists:member(call_undef, Opts) of
+ false -> error(too_bad);
+ true -> camembert:délicieux()
+ end.
maps_warnings(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/float_SUITE.erl b/lib/compiler/test/float_SUITE.erl
index afc04fd440..fb8da37f4f 100644
--- a/lib/compiler/test/float_SUITE.erl
+++ b/lib/compiler/test/float_SUITE.erl
@@ -118,6 +118,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(0),
?line 1.0 = math:cosh(0),
?line 0.0 = math:tanh(0),
+ 1.0 = math:log2(2),
?line 1.0 = math:log10(10),
?line -1.0 = math:cos(math:pi()),
?line 1.0 = math:exp(0),
@@ -136,6 +137,7 @@ math_functions(Config) when is_list(Config) ->
?line 0.0 = math:sinh(id(0)),
?line 1.0 = math:cosh(id(0)),
?line 0.0 = math:tanh(id(0)),
+ 1.0 = math:log2(id(2)),
?line 1.0 = math:log10(id(10)),
?line 1.0 = math:exp(id(0)),
?line 0.0 = math:log(id(1)),
diff --git a/lib/compiler/test/guard_SUITE.erl b/lib/compiler/test/guard_SUITE.erl
index eb205d09a7..512207898e 100644
--- a/lib/compiler/test/guard_SUITE.erl
+++ b/lib/compiler/test/guard_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,7 +30,7 @@
old_guard_tests/1,
build_in_guard/1,gbif/1,
t_is_boolean/1,is_function_2/1,
- tricky/1,rel_ops/1,literal_type_tests/1,
+ tricky/1,rel_ops/1,rel_op_combinations/1,literal_type_tests/1,
basic_andalso_orelse/1,traverse_dcd/1,
check_qlc_hrl/1,andalso_semi/1,t_tuple_size/1,binary_part/1,
bad_constants/1,bad_guards/1]).
@@ -42,12 +42,13 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[misc,const_cond,basic_not,complex_not,nested_nots,
semicolon,complex_semicolon,comma,or_guard,
more_or_guards,complex_or_guards,and_guard,xor_guard,
more_xor_guards,build_in_guard,old_guard_tests,gbif,
- t_is_boolean,is_function_2,tricky,rel_ops,
+ t_is_boolean,is_function_2,tricky,
+ rel_ops,rel_op_combinations,
literal_type_tests,basic_andalso_orelse,traverse_dcd,
check_qlc_hrl,andalso_semi,t_tuple_size,binary_part,
bad_constants,bad_guards]}].
@@ -330,7 +331,15 @@ complex_semicolon(Config) when is_list(Config) ->
?line ok = csemi6({a,b}, 0),
?line ok = csemi6({}, 3),
?line ok = csemi6({a,b,c}, 3),
-
+
+ %% 7
+ error = csemi7(#{a=>1}, 1, 0),
+ error = csemi7(<<>>, 1, 0),
+ ok = csemi7(#{a=>1}, 3, 0),
+ ok = csemi7(#{a=>1}, 0, 3),
+ ok = csemi7(#{a=>1}, 3, 3),
+ ok = csemi7(#{a=>1, b=>3}, 0, 0),
+
ok.
csemi1(Type, Val) when is_list(Val), Type == float;
@@ -442,6 +451,9 @@ csemi5(_, _) -> error.
csemi6(A, B) when hd([tuple_size(A)]) > 1; abs(B) > 2 -> ok;
csemi6(_, _) -> error.
+csemi7(A, B, C) when A#{a:=B} > #{a=>1}; abs(C) > 2 -> ok;
+csemi7(_, _, _) -> error.
+
comma(Config) when is_list(Config) ->
%% ',' combinations of literal true/false.
@@ -1122,6 +1134,231 @@ rel_ops(Config) when is_list(Config) ->
-undef(TestOp).
+rel_op_combinations(Config) when is_list(Config) ->
+ Digits0 = lists:seq(16#0030, 16#0039) ++
+ lists:seq(16#0660, 16#0669) ++
+ lists:seq(16#06F0, 16#06F9),
+ Digits = gb_sets:from_list(Digits0),
+ rel_op_combinations_1(16#0700, Digits),
+
+ BrokenRange0 = lists:seq(3, 5) ++
+ lists:seq(10, 12) ++ lists:seq(14, 20),
+ BrokenRange = gb_sets:from_list(BrokenRange0),
+ rel_op_combinations_2(30, BrokenRange),
+
+ Red0 = [{I,2*I} || I <- lists:seq(0, 50)] ++
+ [{I,5*I} || I <- lists:seq(51, 80)],
+ Red = gb_trees:from_orddict(Red0),
+ rel_op_combinations_3(100, Red).
+
+rel_op_combinations_1(0, _) ->
+ ok;
+rel_op_combinations_1(N, Digits) ->
+ Bool = gb_sets:is_member(N, Digits),
+ Bool = is_digit_1(N),
+ Bool = is_digit_2(N),
+ Bool = is_digit_3(N),
+ Bool = is_digit_4(N),
+ Bool = is_digit_5(N),
+ Bool = is_digit_6(N),
+ Bool = is_digit_7(N),
+ Bool = is_digit_8(N),
+ rel_op_combinations_1(N-1, Digits).
+
+is_digit_1(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_1(X) when 16#0030 =< X, X =< 16#0039 -> true;
+is_digit_1(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_1(_) -> false.
+
+is_digit_2(X) when (16#0030-1) < X, X =< 16#0039 -> true;
+is_digit_2(X) when (16#0660-1) < X, X =< 16#0669 -> true;
+is_digit_2(X) when (16#06F0-1) < X, X =< 16#06F9 -> true;
+is_digit_2(_) -> false.
+
+is_digit_3(X) when 16#0660 =< X, X < (16#0669+1) -> true;
+is_digit_3(X) when 16#0030 =< X, X < (16#0039+1) -> true;
+is_digit_3(X) when 16#06F0 =< X, X < (16#06F9+1) -> true;
+is_digit_3(_) -> false.
+
+is_digit_4(X) when (16#0660-1) < X, X < (16#0669+1) -> true;
+is_digit_4(X) when (16#0030-1) < X, X < (16#0039+1) -> true;
+is_digit_4(X) when (16#06F0-1) < X, X < (16#06F9+1) -> true;
+is_digit_4(_) -> false.
+
+is_digit_5(X) when X >= 16#0660, X =< 16#0669 -> true;
+is_digit_5(X) when X >= 16#0030, X =< 16#0039 -> true;
+is_digit_5(X) when X >= 16#06F0, X =< 16#06F9 -> true;
+is_digit_5(_) -> false.
+
+is_digit_6(X) when X > (16#0660-1), X =< 16#0669 -> true;
+is_digit_6(X) when X > (16#0030-1), X =< 16#0039 -> true;
+is_digit_6(X) when X > (16#06F0-1), X =< 16#06F9 -> true;
+is_digit_6(_) -> false.
+
+is_digit_7(X) when 16#0660 =< X, X =< 16#0669 -> true;
+is_digit_7(X) when 16#0030 =< X, X =< 16#003A, X =/= 16#003A -> true;
+is_digit_7(X) when 16#06F0 =< X, X =< 16#06F9 -> true;
+is_digit_7(_) -> false.
+
+is_digit_8(X) when X =< 16#0039, X > (16#0030-1) -> true;
+is_digit_8(X) when X =< 16#06F9, X > (16#06F0-1) -> true;
+is_digit_8(X) when X =< 16#0669, X > (16#0660-1) -> true;
+is_digit_8(16#0670) -> false;
+is_digit_8(_) -> false.
+
+rel_op_combinations_2(0, _) ->
+ ok;
+rel_op_combinations_2(N, Range) ->
+ Bool = gb_sets:is_member(N, Range),
+ Bool = broken_range_1(N),
+ Bool = broken_range_2(N),
+ Bool = broken_range_3(N),
+ Bool = broken_range_4(N),
+ Bool = broken_range_5(N),
+ Bool = broken_range_6(N),
+ Bool = broken_range_7(N),
+ Bool = broken_range_8(N),
+ Bool = broken_range_9(N),
+ Bool = broken_range_10(N),
+ Bool = broken_range_11(N),
+ Bool = broken_range_12(N),
+ Bool = broken_range_13(N),
+ rel_op_combinations_2(N-1, Range).
+
+broken_range_1(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_1(X) when X >= 3, X =< 5 -> true;
+broken_range_1(_) -> false.
+
+broken_range_2(X) when X >= 10, X =< 12 -> true;
+broken_range_2(X) when X >= 14, X =< 20 -> true;
+broken_range_2(X) when X >= 3, X =< 5 -> true;
+broken_range_2(_) -> false.
+
+broken_range_3(X) when X >= 10, X =< 12 -> true;
+broken_range_3(X) when X >= 14, X < 21 -> true;
+broken_range_3(3) -> true;
+broken_range_3(4) -> true;
+broken_range_3(5) -> true;
+broken_range_3(_) -> false.
+
+broken_range_4(X) when X =< 5, X >= 3 -> true;
+broken_range_4(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_4(X) when X =< 100 -> false;
+broken_range_4(_) -> false.
+
+broken_range_5(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_5(X) when X > 2, X =< 5 -> true;
+broken_range_5(_) -> false.
+
+broken_range_6(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_6(X) when X > 2, X < 6 -> true;
+broken_range_6(_) -> false.
+
+broken_range_7(X) when X > 2, X < 6 -> true;
+broken_range_7(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_7(X) when X > 30 -> false;
+broken_range_7(_) -> false.
+
+broken_range_8(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_8(X) when X =:= 3 -> true;
+broken_range_8(X) when X >= 3, X =< 5 -> true;
+broken_range_8(_) -> false.
+
+broken_range_9(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_9(X) when X =:= 13 -> false;
+broken_range_9(X) when X >= 3, X =< 5 -> true;
+broken_range_9(_) -> false.
+
+broken_range_10(X) when X >= 3, X =< 5 -> true;
+broken_range_10(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_10(X) when X =/= 13 -> false;
+broken_range_10(_) -> false.
+
+broken_range_11(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_11(X) when is_tuple(X), X =:= 10 -> true;
+broken_range_11(X) when X >= 3, X =< 5 -> true;
+broken_range_11(_) -> false.
+
+broken_range_12(X) when X >= 3, X =< 5 -> true;
+broken_range_12(X) when X >= 10, X =< 20, X =/= 13 -> true;
+broken_range_12(X) when X < 30, X > 20 -> false;
+broken_range_12(_) -> false.
+
+broken_range_13(X) when X >= 10, X =< 20, 13 =/= X -> true;
+broken_range_13(X) when X >= 3, X =< 5 -> true;
+broken_range_13(_) -> false.
+
+rel_op_combinations_3(0, _) ->
+ ok;
+rel_op_combinations_3(N, Red) ->
+ Val = case gb_trees:lookup(N, Red) of
+ none -> none;
+ {value,V} -> V
+ end,
+ Val = redundant_1(N),
+ Val = redundant_2(N),
+ Val = redundant_3(N),
+ Val = redundant_4(N),
+ Val = redundant_5(N),
+ Val = redundant_6(N),
+ Val = redundant_7(N),
+ Val = redundant_8(N),
+ Val = redundant_9(N),
+ Val = redundant_10(N),
+ Val = redundant_11(N),
+ rel_op_combinations_3(N-1, Red).
+
+redundant_1(X) when X >= 51, X =< 80 -> 5*X;
+redundant_1(X) when X < 51 -> 2*X;
+redundant_1(_) -> none.
+
+redundant_2(X) when X < 51 -> 2*X;
+redundant_2(X) when X >= 51, X =< 80 -> 5*X;
+redundant_2(_) -> none.
+
+redundant_3(X) when X < 51 -> 2*X;
+redundant_3(X) when X =< 80, X >= 51 -> 5*X;
+redundant_3(X) when X =/= 100 -> none;
+redundant_3(_) -> none.
+
+redundant_4(X) when X < 51 -> 2*X;
+redundant_4(X) when X =< 80, X > 50 -> 5*X;
+redundant_4(X) when X =/= 100 -> none;
+redundant_4(_) -> none.
+
+redundant_5(X) when X < 51 -> 2*X;
+redundant_5(X) when X > 50, X < 81 -> 5*X;
+redundant_5(X) when X =< 10 -> none;
+redundant_5(_) -> none.
+
+redundant_6(X) when X > 50, X =< 80 -> 5*X;
+redundant_6(X) when X < 51 -> 2*X;
+redundant_6(_) -> none.
+
+redundant_7(X) when is_integer(X), X >= 51, X =< 80 -> 5*X;
+redundant_7(X) when is_integer(X), X < 51 -> 2*X;
+redundant_7(_) -> none.
+
+redundant_8(X) when X >= 51, X =< 80 -> 5*X;
+redundant_8(X) when X < 51 -> 2*X;
+redundant_8(_) -> none.
+
+redundant_9(X) when X >= 51, X =< 80 -> 5*X;
+redundant_9(X) when X < 51 -> 2*X;
+redundant_9(90) -> none;
+redundant_9(X) when X =/= 90 -> none;
+redundant_9(_) -> none.
+
+redundant_10(X) when X >= 51, X =< 80 -> 5*X;
+redundant_10(X) when X < 51 -> 2*X;
+redundant_10(90) -> none;
+redundant_10(X) when X =:= 90 -> none;
+redundant_10(_) -> none.
+
+redundant_11(X) when X < 51 -> 2*X;
+redundant_11(X) when X =:= 10 -> 2*X;
+redundant_11(X) when X >= 51, X =< 80 -> 5*X;
+redundant_11(_) -> none.
%% Test type tests on literal values. (From emulator test suites.)
literal_type_tests(Config) when is_list(Config) ->
@@ -1136,10 +1373,11 @@ literal_type_tests_1(Config) ->
[{is_function,L1,L2} ||
L1 <- literals(), L2 <- literals()]),
?line Mod = literal_test,
- ?line Func = {function, 0, test, 0, [{clause,0,[],[],Tests}]},
- ?line Form = [{attribute,0,module,Mod},
- {attribute,0,compile,export_all},
- Func, {eof,0}],
+ Anno = erl_anno:new(0),
+ Func = {function, Anno, test, 0, [{clause,Anno,[],[],Tests}]},
+ Form = [{attribute,Anno,module,Mod},
+ {attribute,Anno,compile,export_all},
+ Func, {eof,Anno}],
%% Print generated code for inspection.
?line lists:foreach(fun (F) -> io:put_chars([erl_pp:form(F),"\n"]) end, Form),
@@ -1174,7 +1412,8 @@ test(T, L) ->
{ok,Toks,_Line} = erl_scan:string(S),
{ok,E} = erl_parse:parse_exprs(Toks),
{value,Val,_Bs} = erl_eval:exprs(E, []),
- {match,0,{atom,0,Val},hd(E)}.
+ Anno = erl_anno:new(0),
+ {match,Anno,{atom,Anno,Val},hd(E)}.
test(T, L1, L2) ->
S0 = io_lib:format("begin io:format(\"~~p~n\", [{~p,~p,~p}]), if ~w(~w, ~w) -> true; true -> false end end. ", [T,L1,L2,T,L1,L2]),
@@ -1182,7 +1421,8 @@ test(T, L1, L2) ->
{ok,Toks,_Line} = erl_scan:string(S),
{ok,E} = erl_parse:parse_exprs(Toks),
{value,Val,_Bs} = erl_eval:exprs(E, []),
- {match,0,{atom,0,Val},hd(E)}.
+ Anno = erl_anno:new(0),
+ {match,Anno,{atom,Anno,Val},hd(E)}.
smoke_disasm(Config, Mod, Bin) ->
Priv = ?config(priv_dir, Config),
@@ -1377,6 +1617,8 @@ t_tuple_size(Config) when is_list(Config) ->
?line {ok,Mod,Code} = compile:file(File, [from_asm,binary]),
?line code:load_binary(Mod, File, Code),
?line 14 = Mod:t({1,2,3,4}),
+ _ = code:delete(Mod),
+ _ = code:purge(Mod),
ok.
@@ -1556,6 +1798,36 @@ bad_constants(Config) when is_list(Config) ->
bad_guards(Config) when is_list(Config) ->
if erlang:float(self()); true -> ok end,
+
+ fc(catch bad_guards_1(1, [])),
+ fc(catch bad_guards_1(1, [2])),
+ fc(catch bad_guards_1(atom, [2])),
+
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [])),
+ fc(catch bad_guards_2(#{a=>0,b=>0}, [x])),
+ fc(catch bad_guards_2(not_a_map, [x])),
+ fc(catch bad_guards_2(42, [x])),
+
+ fc(catch bad_guards_3(#{a=>0,b=>0}, [])),
+ fc(catch bad_guards_3(#{a=>0,b=>0}, [x])),
+ fc(catch bad_guards_3(not_a_map, [x])),
+ fc(catch bad_guards_3(42, [x])),
+
+ ok.
+
+%% beam_bool used to produce GC BIF instructions whose
+%% Live operands included uninitialized registers.
+
+bad_guards_1(X, [_]) when {{X}}, -X ->
+ ok.
+
+bad_guards_2(M, [_]) when M#{a := 0, b => 0}, map_size(M) ->
+ ok.
+
+%% beam_type used to produce an GC BIF instruction whose Live operand
+%% included uninitialized registers.
+
+bad_guards_3(M, [_]) when is_map(M) andalso M#{a := 0, b => 0}, length(M) ->
ok.
%% Call this function to turn off constant propagation.
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 398398a397..62bada1407 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -18,12 +18,12 @@
%%
-module(lc_SUITE).
--author('[email protected]').
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,deeply_nested/1,no_generator/1,
- empty_generator/1,no_export/1]).
+ empty_generator/1,no_export/1,shadow/1,
+ effect/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -31,10 +31,18 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
test_lib:recompile(?MODULE),
- [basic, deeply_nested, no_generator, empty_generator, no_export].
+ [{group,p}].
groups() ->
- [].
+ [{p,test_lib:parallel(),
+ [basic,
+ deeply_nested,
+ no_generator,
+ empty_generator,
+ no_export,
+ shadow,
+ effect
+ ]}].
init_per_suite(Config) ->
Config.
@@ -59,34 +67,34 @@ end_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
ok.
basic(Config) when is_list(Config) ->
- ?line L0 = lists:seq(1, 10),
- ?line L1 = my_map(fun(X) -> {x,X} end, L0),
- ?line L1 = [{x,X} || X <- L0],
- ?line L0 = my_map(fun({x,X}) -> X end, L1),
- ?line [1,2,3,4,5] = [X || X <- L0, X < 6],
- ?line [4,5,6] = [X || X <- L0, X > 3, X < 7],
- ?line [] = [X || X <- L0, X > 32, X < 7],
- ?line [1,3,5,7,9] = [X || X <- L0, odd(X)],
- ?line [2,4,6,8,10] = [X || X <- L0, not odd(X)],
- ?line [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
- ?line [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
+ L0 = lists:seq(1, 10),
+ L1 = my_map(fun(X) -> {x,X} end, L0),
+ L1 = [{x,X} || X <- L0],
+ L0 = my_map(fun({x,X}) -> X end, L1),
+ [1,2,3,4,5] = [X || X <- L0, X < 6],
+ [4,5,6] = [X || X <- L0, X > 3, X < 7],
+ [] = [X || X <- L0, X > 32, X < 7],
+ [1,3,5,7,9] = [X || X <- L0, odd(X)],
+ [2,4,6,8,10] = [X || X <- L0, not odd(X)],
+ [1,3,5,9] = [X || X <- L0, odd(X), X =/= 7],
+ [2,4,8,10] = [X || X <- L0, not odd(X), X =/= 6],
%% Append is specially handled.
- ?line [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
+ [1,3,5,9,2,4,8,10] = [X || X <- L0, odd(X), X =/= 7] ++
[X || X <- L0, not odd(X), X =/= 6],
%% Guards BIFs are evaluated in guard context. Weird, but true.
- ?line [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
+ [{a,b,true},{x,y,true,true}] = [X || X <- tuple_list(), element(3, X)],
%% Filter expressions with andalso/orelse.
- ?line "abc123" = alphanum("?abc123.;"),
+ "abc123" = alphanum("?abc123.;"),
%% Error cases.
- ?line [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
- ?line {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
- ?line [] = [X || X <- L1, X+1 < 2],
- ?line {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
- ?line fc([x], catch [E || E <- id(x)]),
+ [] = [{xx,X} || X <- L0, element(2, X) == no_no_no],
+ {'EXIT',_} = (catch [X || X <- L1, list_to_atom(X) == dum]),
+ [] = [X || X <- L1, X+1 < 2],
+ {'EXIT',_} = (catch [X || X <- L1, odd(X)]),
+ fc([x], catch [E || E <- id(x)]),
ok.
tuple_list() ->
@@ -116,12 +124,12 @@ deeply_nested_1() ->
X16 <- [4],X17 <- [3],X18 <- [fun() -> X16+X17 end],X19 <- [2],X20 <- [1]].
no_generator(Config) when is_list(Config) ->
- ?line Seq = lists:seq(-10, 17),
- ?line [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
+ Seq = lists:seq(-10, 17),
+ [no_gen_verify(no_gen(A, B), A, B) || A <- Seq, B <- Seq],
%% Literal expression, for coverage.
- ?line [a] = [a || true],
- ?line [a,b,c] = [a || true] ++ [b,c],
+ [a] = [a || true],
+ [a,b,c] = [a || true] ++ [b,c],
ok.
no_gen(A, B) ->
@@ -174,13 +182,51 @@ no_gen_eval(Fun, Res) ->
no_gen_one_more(A, B) -> A + 1 =:= B.
empty_generator(Config) when is_list(Config) ->
- ?line [] = [X || {X} <- [], (false or (X/0 > 3))],
+ [] = [X || {X} <- [], (false or (X/0 > 3))],
ok.
no_export(Config) when is_list(Config) ->
[] = [ _X = a || false ] ++ [ _X = a || false ],
ok.
+%% Test that variables in list comprehensions are
+%% correctly shadowed.
+
+shadow(Config) when is_list(Config) ->
+ Shadowed = nomatch,
+ _ = id(Shadowed), %Eliminate warning.
+ L = [{Shadowed,Shadowed+1} || Shadowed <- lists:seq(7, 9)],
+ [{7,8},{8,9},{9,10}] = id(L),
+ [8,9] = id([Shadowed || {_,Shadowed} <- id(L),
+ Shadowed < 10]),
+ ok.
+
+effect(Config) when is_list(Config) ->
+ [{42,{a,b,c}}] =
+ do_effect(fun(F, L) ->
+ [F({V1,V2}) ||
+ #{<<1:500>>:=V1,<<2:301>>:=V2} <- L],
+ ok
+ end, id([#{},x,#{<<1:500>>=>42,<<2:301>>=>{a,b,c}}])),
+
+ %% Will trigger the time-trap timeout if not tail-recursive.
+ case ?MODULE of
+ lc_SUITE ->
+ _ = [{'EXIT',{badarg,_}} =
+ (catch binary_to_atom(<<C/utf8>>, utf8)) ||
+ C <- lists:seq(16#10000, 16#FFFFF)];
+ _ ->
+ ok
+ end,
+
+ ok.
+
+do_effect(Lc, L) ->
+ put(?MODULE, []),
+ F = fun(V) -> put(?MODULE, [V|get(?MODULE)]) end,
+ ok = Lc(F, L),
+ lists:reverse(erase(?MODULE)).
+
id(I) -> I.
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok;
diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl
index 75efce9d7b..8768e47b65 100644
--- a/lib/compiler/test/map_SUITE.erl
+++ b/lib/compiler/test/map_SUITE.erl
@@ -22,16 +22,22 @@
-export([
%% literals
- t_build_and_match_literals/1,
- t_update_literals/1,t_match_and_update_literals/1,
+ t_build_and_match_literals/1, t_build_and_match_literals_large/1,
+ t_update_literals/1, t_update_literals_large/1,
+ t_match_and_update_literals/1, t_match_and_update_literals_large/1,
t_update_map_expressions/1,
- t_update_assoc/1,t_update_exact/1,
- t_guard_bifs/1, t_guard_sequence/1, t_guard_update/1,
- t_guard_receive/1, t_guard_fun/1,
+ t_update_assoc/1, t_update_assoc_large/1,
+ t_update_exact/1, t_update_exact_large/1,
+ t_guard_bifs/1,
+ t_guard_sequence/1, t_guard_sequence_large/1,
+ t_guard_update/1, t_guard_update_large/1,
+ t_guard_receive/1, t_guard_receive_large/1,
+ t_guard_fun/1,
t_list_comprehension/1,
t_map_sort_literals/1,
t_map_size/1,
t_build_and_match_aliasing/1,
+ t_is_map/1,
%% variables
t_build_and_match_variables/1,
@@ -61,17 +67,25 @@
suite() -> [].
-all() -> [
+all() ->
+ test_lib:recompile(?MODULE),
+ [
%% literals
- t_build_and_match_literals,
- t_update_literals, t_match_and_update_literals,
+ t_build_and_match_literals, t_build_and_match_literals_large,
+ t_update_literals, t_update_literals_large,
+ t_match_and_update_literals, t_match_and_update_literals_large,
t_update_map_expressions,
- t_update_assoc,t_update_exact,
- t_guard_bifs, t_guard_sequence, t_guard_update,
- t_guard_receive,t_guard_fun, t_list_comprehension,
+ t_update_assoc, t_update_assoc_large,
+ t_update_exact, t_update_exact_large,
+ t_guard_bifs,
+ t_guard_sequence, t_guard_sequence_large,
+ t_guard_update, t_guard_update_large,
+ t_guard_receive, t_guard_receive_large,
+ t_guard_fun, t_list_comprehension,
t_map_sort_literals,
t_map_size,
t_build_and_match_aliasing,
+ t_is_map,
%% variables
t_build_and_match_variables,
@@ -155,6 +169,461 @@ t_build_and_match_literals(Config) when is_list(Config) ->
{'EXIT',{{badmatch,_},_}} = (catch (#{#{"a"=>42} := 3}=id(#{#{"a"=>3}=>42}))),
ok.
+t_build_and_match_literals_large(Config) when is_list(Config) ->
+ % normal non-repeating
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0,
+
+ 60 = map_size(M0),
+ 60 = maps:size(M0),
+
+ % with repeating
+ M1 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10",
+ 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11",
+ 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+
+ 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13",
+ 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14",
+
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }),
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ 60 = map_size(M1),
+ 60 = maps:size(M1),
+
+ % with floats
+
+ M2 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9"}),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ 90 = map_size(M2),
+ 90 = maps:size(M2),
+
+ % with bignums
+ M3 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ 36893488147419103232=>big1, 73786976294838206464=>big2,
+ 147573952589676412928=>big3, 18446744073709551616=>big4,
+ 4294967296=>big5, 8589934592=>big6,
+ 4294967295=>big7, 67108863=>big8
+ }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ 98 = map_size(M3),
+ 98 = maps:size(M3),
+
+ %% with maps
+
+ M4 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M4,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4,
+
+
+ #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15",
+ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := V4,
+ #{ third => small, map => key } := "small map key 3",
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4,
+
+ a5 = V1,
+ "c5" = V2,
+ "e5" = V3,
+ "small map key 2" = V4,
+ "large map key 1" = V5,
+
+ 95 = map_size(M4),
+ 95 = maps:size(M4),
+
+ % call for value
+
+ M5 = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ #{ one => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M5,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5,
+
+ 95 = map_size(M5),
+ 95 = maps:size(M5),
+
+ %% remember
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ ok.
+
t_build_and_match_aliasing(Config) when is_list(Config) ->
M1 = id(#{a=>1,b=>2,c=>3,d=>4}),
@@ -200,14 +669,25 @@ t_map_size(Config) when is_list(Config) ->
false = map_is_size(M#{ "c" => 2}, 2),
%% Error cases.
- {'EXIT',{badarg,_}} = (catch map_size([])),
- {'EXIT',{badarg,_}} = (catch map_size(<<1,2,3>>)),
- {'EXIT',{badarg,_}} = (catch map_size(1)),
+ {'EXIT',{{badmap,[]},_}} = (catch map_size([])),
+ {'EXIT',{{badmap,<<1,2,3>>},_}} = (catch map_size(<<1,2,3>>)),
+ {'EXIT',{{badmap,1},_}} = (catch map_size(1)),
ok.
map_is_size(M,N) when map_size(M) =:= N -> true;
map_is_size(_,_) -> false.
+t_is_map(Config) when is_list(Config) ->
+ true = is_map(#{}),
+ true = is_map(#{a=>1}),
+ false = is_map({a,b}),
+ false = is_map(x),
+ if is_map(#{}) -> ok end,
+ if is_map(#{b=>1}) -> ok end,
+ if not is_map([1,2,3]) -> ok end,
+ if not is_map(x) -> ok end,
+ ok.
+
% test map updates without matching
t_update_literals(Config) when is_list(Config) ->
Map = #{x=>1,y=>2,z=>3,q=>4},
@@ -216,13 +696,75 @@ t_update_literals(Config) when is_list(Config) ->
]),
ok.
+t_update_literals_large(Config) when is_list(Config) ->
+ Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ #{ one => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [
+ {"a","1"},{"b","2"},{"c","3"},{"d","4"}
+ ]),
+ ok.
+
+
+
loop_update_literals_x_q(Map, []) -> Map;
loop_update_literals_x_q(Map, [{X,Q}|Vs]) ->
loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs).
% test map updates with matching
t_match_and_update_literals(Config) when is_list(Config) ->
- Map = #{x=>0,y=>"untouched",z=>"also untouched",q=>1},
+ Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+ #{ "one" => small, map => key } => "small map key 1" },
+
#{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
{1,2},{3,4},{5,6},{7,8}
]),
@@ -236,8 +778,77 @@ t_match_and_update_literals(Config) when is_list(Config) ->
#{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
ok.
+t_match_and_update_literals_large(Config) when is_list(Config) ->
+ Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+
+ #{ "one" => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
+ {1,2},{3,4},{5,6},{7,8}
+ ]),
+ M0 = id(Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat}),
+ M1 = id(Map#{}),
+ M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M0 = M2,
+
+ #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
+ ok.
+
loop_match_and_update_literals_x_q(Map, []) -> Map;
-loop_match_and_update_literals_x_q(#{q:=Q0,x:=X0} = Map, [{X,Q}|Vs]) ->
+loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0,
+ #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) ->
loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs).
@@ -263,9 +874,9 @@ t_update_map_expressions(Config) when is_list(Config) ->
#{ "a" := b } = F(),
- %% Error cases, FIXME: should be 'badmap'?
- {'EXIT',{badarg,_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
- {'EXIT',{badarg,_}} = (catch (id([]))#{ a := 42, b => 2 }),
+ %% Error cases.
+ {'EXIT',{{badmap,<<>>},_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
+ {'EXIT',{{badmap,[]},_}} = (catch (id([]))#{ a := 42, b => 2 }),
ok.
@@ -286,10 +897,86 @@ t_update_assoc(Config) when is_list(Config) ->
%% Errors cases.
BadMap = id(badmap),
- {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}),
- {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting=>val}),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>val}),
+ {'EXIT',{{badmap,<<>>},_}} = (catch <<>>#{nonexisting=>val}),
+
+ %% Evaluation order.
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{whatever=>id(error(blurf))}),
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{id(error(blurf))=>whatever}),
ok.
+t_update_assoc_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ M1 = M0#{1=>42,2=>100,4=>[a,b,c]},
+ #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1,
+ #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} =
+ M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]},
+
+ M2 = M0#{13.0=>new},
+ #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2,
+ M2 = M0#{13.0:=wrong,13.0=>new},
+
+ %% Errors cases.
+ BadMap = id({no,map}),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>M0}),
+ ok.
+
+
+
t_update_exact(Config) when is_list(Config) ->
M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}),
@@ -310,13 +997,111 @@ t_update_exact(Config) when is_list(Config) ->
1.0 => new_val4 },
%% Errors cases.
- {'EXIT',{badarg,_}} = (catch ((id(nil))#{ a := b })),
- {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
- {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation
+ {'EXIT',{{badmap,nil},_}} = (catch ((id(nil))#{ a := b })),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,42},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,42.0},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+ {'EXIT',{{badmap,<<>>},_}} = (catch <<>>#{nonexisting:=val}),
+ {'EXIT',{{badkey,<<0:257>>},_}} =
+ (catch M0#{<<0:257>> := val}), %limitation
+
+ %% A workaround for a bug allowed an empty map to be updated.
+ {'EXIT',{{badkey,a},_}} = (catch (id(#{}))#{a:=1}),
+ {'EXIT',{{badkey,a},_}} = (catch #{}#{a:=1}),
+ Empty = #{},
+ {'EXIT',{{badkey,a},_}} = (catch Empty#{a:=1}),
+
+ %% Evaluation order.
+ BadMap = id([no,map]),
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{whatever:=id(error(blurf))}),
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{id(error(blurf)):=whatever}),
+ {'EXIT',{{badmap,BadMap},_}} =
+ (catch BadMap#{id(nonexisting):=whatever}),
+ ok.
+
+t_update_exact_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]},
+ #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c],
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1,
+
+ M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]},
+
+ M2 = M0#{13.0:=new},
+ #{10:=a0,20:=b0,13.0:=new} = M2,
+ M2 = M0#{13.0=>wrong,13.0:=new},
+
+ %% Errors cases.
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,42},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,42.0},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+
ok.
t_update_values(Config) when is_list(Config) ->
@@ -403,6 +1188,75 @@ t_guard_sequence(Config) when is_list(Config) ->
{'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})),
ok.
+t_guard_sequence_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+ {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>id("a")}),
+ {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>id("b")}),
+ {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>id("c")}),
+ {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>id("d")}),
+ {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>id("e")}),
+
+ {1,M1} = map_guard_sequence_2(M1 = id(M0#{a=>3})),
+ {2,M2} = map_guard_sequence_2(M2 = id(M0#{a=>4, b=>4})),
+ {3,gg,M3} = map_guard_sequence_2(M3 = id(M0#{a=>gg, b=>4})),
+ {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(M0#{a=>sc, b=>3, c=>sc2})),
+ {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(M0#{a=>kk, b=>other, c=>sc2})),
+
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>id("e")})),
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})),
+ ok.
+
map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val};
map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val};
map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val};
@@ -423,6 +1277,65 @@ t_guard_update(Config) when is_list(Config) ->
third = map_guard_update(#{x=>old,y=>old}, #{x=>third,y=>old}),
ok.
+t_guard_update_large(Config) when is_list(Config) ->
+ M0 = id(#{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 70.0=>fa0,80.0=>fb0,90.0=>"fc0",
+ 71.0=>fa1,81.0=>fb1,91.0=>"fc1",
+ 72.0=>fa2,82.0=>fb2,92.0=>"fc2",
+ 73.0=>fa3,83.0=>fb3,93.0=>"fc3",
+ 74.0=>fa4,84.0=>fb4,94.0=>"fc4",
+
+ 75.0=>fa5,85.0=>fb5,95.0=>"fc5",
+ 76.0=>fa6,86.0=>fb6,96.0=>"fc6",
+ 77.0=>fa7,87.0=>fb7,97.0=>"fc7",
+ 78.0=>fa8,88.0=>fb8,98.0=>"fc8",
+ 79.0=>fa9,89.0=>fb9,99.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ error = map_guard_update(M0#{},M0#{}),
+ first = map_guard_update(M0#{},M0#{x=>first}),
+ second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}),
+ ok.
+
map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first;
map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second;
map_guard_update(M1, M2) when M1#{x:=third} =:= M2 -> third;
@@ -453,6 +1366,43 @@ t_guard_receive(Config) when is_list(Config) ->
done = call(Pid, done),
ok.
+-define(t_guard_receive_large_procs, 1500).
+
+t_guard_receive_large(Config) when is_list(Config) ->
+ M = lists:foldl(fun(_,#{procs := Ps } = M) ->
+ M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }}
+ end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)),
+ lists:foreach(fun(Pid) ->
+ Pid ! {self(), hello}
+ end, maps:keys(maps:get(procs,M))),
+ ok = guard_receive_large_loop(M),
+ ok.
+
+guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) ->
+ ok;
+guard_receive_large_loop(M) ->
+ receive
+ #{pid := Pid, msg := hello} ->
+ case M of
+ #{done := Count, procs := #{Pid := 150}} ->
+ Pid ! {self(), done},
+ guard_receive_large_loop(M#{done := Count + 1});
+ #{procs := #{Pid := Count} = Ps} ->
+ Pid ! {self(), hello},
+ guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}})
+ end
+ end.
+
+grecv_loop() ->
+ receive
+ {_, done} ->
+ ok;
+ {Pid, hello} ->
+ Pid ! #{pid=>self(), msg=>hello},
+ grecv_loop()
+ end.
+
+
call(Pid, M) ->
Pid ! {self(), M}, receive {Pid, Res} -> Res end.
@@ -486,6 +1436,11 @@ t_list_comprehension(Config) when is_list(Config) ->
Ls = id([#{<<2:301>> => I, "wat" => I + 1} || I <- [1,2,3]]),
[#{<<2:301>>:=1,"wat":=2},#{<<2:301>>:=2,"wat":=3},#{<<2:301>>:=3,"wat":=4}] = Ls,
[{1,2},{2,3},{3,4}] = id([{I2,I1} || #{"wat" := I1, <<2:301>> := I2} <- Ls]),
+
+ Ks = lists:seq($a,$z),
+ Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks],
+ [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms,
+ [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms),
ok.
t_guard_fun(Config) when is_list(Config) ->
@@ -535,7 +1490,7 @@ t_map_sort_literals(Config) when is_list(Config) ->
true = id(#{ c => 1, b => 1, a => 1 }) < id(#{ b => 1, c => 1, d => 1}),
true = id(#{ "a" => 1 }) < id(#{ <<"a">> => 1}),
false = id(#{ <<"a">> => 1 }) < id(#{ "a" => 1}),
- false = id(#{ 1 => 1 }) < id(#{ 1.0 => 1}),
+ true = id(#{ 1 => 1 }) < id(#{ 1.0 => 1}),
false = id(#{ 1.0 => 1 }) < id(#{ 1 => 1}),
%% value order
@@ -633,6 +1588,7 @@ t_build_and_match_nil(Config) when is_list(Config) ->
"treat" => V2,
[] => V1 }),
#{ [] := V3, [] := V3 } = id(#{ [] => V1, [] => V3 }),
+ #{ <<1>> := V3, [] := V1 } = id(#{ [] => V1, <<1>> => V3 }),
ok.
t_build_and_match_structure(Config) when is_list(Config) ->
@@ -726,8 +1682,8 @@ t_update_assoc_variables(Config) when is_list(Config) ->
%% Errors cases.
BadMap = id(badmap),
- {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}),
- {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting=>val}),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>val}),
+ {'EXIT',{{badmap,<<>>},_}} = (catch <<>>#{nonexisting=>val}),
ok.
t_update_exact_variables(Config) when is_list(Config) ->
@@ -757,13 +1713,14 @@ t_update_exact_variables(Config) when is_list(Config) ->
#{ "wat" := 3, 2 := a } = id(#{ "wat" => 1, K2 => 2 }#{ K2 := a, "wat" := 3 }),
%% Errors cases.
- {'EXIT',{badarg,_}} = (catch ((id(nil))#{ a := b })),
- {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
- {'EXIT',{badarg,_}} = (catch <<>>#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation
+ {'EXIT',{{badmap,nil},_}} = (catch ((id(nil))#{ a := b })),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,42},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,42.0},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+ {'EXIT',{{badmap,<<>>},_}} = (catch <<>>#{nonexisting:=val}),
+ {'EXIT',{{badkey,<<0:257>>},_}} =
+ (catch M0#{<<0:257>> := val}), %limitation
ok.
t_nested_pattern_expressions(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/match_SUITE.erl b/lib/compiler/test/match_SUITE.erl
index ae7d764535..9aec0b3d4e 100644
--- a/lib/compiler/test/match_SUITE.erl
+++ b/lib/compiler/test/match_SUITE.erl
@@ -22,7 +22,8 @@
init_per_group/2,end_per_group/2,
pmatch/1,mixed/1,aliases/1,match_in_call/1,
untuplify/1,shortcut_boolean/1,letify_guard/1,
- selectify/1,underscore/1,coverage/1]).
+ selectify/1,underscore/1,match_map/1,map_vars_used/1,
+ coverage/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -33,9 +34,10 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[pmatch,mixed,aliases,match_in_call,untuplify,
- shortcut_boolean,letify_guard,selectify,underscore,coverage]}].
+ shortcut_boolean,letify_guard,selectify,
+ underscore,match_map,map_vars_used,coverage]}].
init_per_suite(Config) ->
@@ -139,6 +141,13 @@ aliases(Config) when is_list(Config) ->
?line {a,b} = list_alias2([a,b]),
?line {a,b} = list_alias3([a,b]),
+ %% Non-matching aliases.
+ none = mixed_aliases(<<42>>),
+ none = mixed_aliases([b]),
+ none = mixed_aliases([d]),
+ none = mixed_aliases({a,42}),
+ none = mixed_aliases(42),
+
ok.
str_alias(V) ->
@@ -242,6 +251,12 @@ list_alias2([X,Y]=[a,b]) ->
list_alias3([X,b]=[a,Y]) ->
{X,Y}.
+mixed_aliases(<<X:8>> = x) -> {a,X};
+mixed_aliases([b] = <<X:8>>) -> {b,X};
+mixed_aliases(<<X:8>> = {a,X}) -> {c,X};
+mixed_aliases([X] = <<X:8>>) -> {d,X};
+mixed_aliases(_) -> none.
+
%% OTP-7018.
match_in_call(Config) when is_list(Config) ->
@@ -400,6 +415,36 @@ underscore(Config) when is_list(Config) ->
_ = is_list(Config),
ok.
+-record(s, {map,t}).
+
+match_map(Config) when is_list(Config) ->
+ Map = #{key=>{x,y},ignore=>anything},
+ #s{map=Map,t={x,y}} = do_match_map(#s{map=Map}),
+ {a,#{k:={a,b,c}}} = do_match_map_2(#{k=>{a,b,c}}),
+ ok.
+
+do_match_map(#s{map=#{key:=Val}}=S) ->
+ %% Would crash with a 'badarg' exception.
+ S#s{t=Val}.
+
+do_match_map_2(Map) ->
+ case {a,Map} of
+ {a,#{k:=_}}=Tuple ->
+ Tuple
+ end.
+
+map_vars_used(Config) when is_list(Config) ->
+ {some,value} = do_map_vars_used(a, b, #{{a,b}=>42,v=>{some,value}}),
+ ok.
+
+do_map_vars_used(X, Y, Map) ->
+ case {X,Y} of
+ T ->
+ %% core_lib:is_var_used/2 would not consider T used.
+ #{T:=42,v:=Val} = Map,
+ Val
+ end.
+
coverage(Config) when is_list(Config) ->
%% Cover beam_dead.
ok = coverage_1(x, a),
diff --git a/lib/compiler/test/misc_SUITE.erl b/lib/compiler/test/misc_SUITE.erl
index 44c7161530..f3b92aad5b 100644
--- a/lib/compiler/test/misc_SUITE.erl
+++ b/lib/compiler/test/misc_SUITE.erl
@@ -60,7 +60,7 @@ all() ->
[{group,p}].
groups() ->
- [{p,[],%%test_lib:parallel(),
+ [{p,[],
[tobias,empty_string,md5,silly_coverage,
confused_literals,integer_encoding,override_bif]}].
@@ -225,14 +225,15 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
?line expect_error(fun() -> beam_bool:module(BoolInput, []) end),
- %% beam_dead
+ %% beam_dead. This is tricky. Our function must look OK to
+ %% beam_utils:clean_labels/1, but must crash beam_dead.
DeadInput = {?MODULE,[{foo,0}],[],
[{function,foo,0,2,
[{label,1},
{func_info,{atom,?MODULE},{atom,foo},0},
{label,2},
- {jump,bad}]}],99},
- ?line expect_error(fun() -> beam_block:module(DeadInput, []) end),
+ {test,is_eq_exact,{f,1},[bad,operands]}]}],99},
+ expect_error(fun() -> beam_dead:module(DeadInput, []) end),
%% beam_clean
CleanInput = {?MODULE,[{foo,0}],[],
@@ -279,6 +280,14 @@ silly_coverage(Config) when is_list(Config) ->
{label,2}|non_proper_list]}],99},
expect_error(fun() -> beam_z:module(BeamZInput, []) end),
+ %% beam_validator.
+ BeamValInput = {?MODULE,[{foo,0}],[],
+ [{function,foo,0,2,
+ [{label,1},
+ {func_info,{atom,?MODULE},{atom,foo},0},
+ {label,2}|non_proper_list]}],99},
+ expect_error(fun() -> beam_validator:module(BeamValInput, []) end),
+
ok.
expect_error(Fun) ->
@@ -329,8 +338,16 @@ integer_encoding_1(Config) ->
?line do_integer_encoding(-(id(1) bsl 10000), Src, Data),
?line do_integer_encoding(id(1) bsl 10000, Src, Data),
- ?line do_integer_encoding(2048, 0, Src, Data),
-
+ do_integer_encoding(1024, 0, Src, Data),
+ _ = [begin
+ B = 1 bsl I,
+ do_integer_encoding(-B-1, Src, Data),
+ do_integer_encoding(-B, Src, Data),
+ do_integer_encoding(-B+1, Src, Data),
+ do_integer_encoding(B-1, Src, Data),
+ do_integer_encoding(B, Src, Data),
+ do_integer_encoding(B+1, Src, Data)
+ end || I <- lists:seq(1, 128)],
io:put_chars(Src, "Last].\n\n"),
?line ok = file:close(Src),
io:put_chars(Data, "0].\n\n"),
@@ -363,11 +380,9 @@ do_integer_encoding(N, I0, Src, Data) ->
do_integer_encoding(I, Src, Data) ->
Str = integer_to_list(I),
- io:put_chars(Src, Str),
- io:put_chars(Src, ", \n"),
- io:put_chars(Data, Str),
- io:put_chars(Data, ", \n").
-
+ io:put_chars(Src, [Str,",\n"]),
+ io:put_chars(Data, [Str,",\n"]).
+
id(I) -> I.
diff --git a/lib/compiler/test/receive_SUITE.erl b/lib/compiler/test/receive_SUITE.erl
index 00a6e900d4..fb82bf6101 100644
--- a/lib/compiler/test/receive_SUITE.erl
+++ b/lib/compiler/test/receive_SUITE.erl
@@ -187,12 +187,13 @@ ref_opt(Config) when is_list(Config) ->
end.
ref_opt_1(Config) ->
- ?line DataDir = ?config(data_dir, Config),
- ?line PrivDir = ?config(priv_dir, Config),
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
Sources = filelib:wildcard(filename:join([DataDir,"ref_opt","*.{erl,S}"])),
- ?line test_lib:p_run(fun(Src) ->
- do_ref_opt(Src, PrivDir)
- end, Sources),
+ test_lib:p_run(fun(Src) ->
+ do_ref_opt(Src, PrivDir)
+ end, Sources),
+ cover_recv_instructions(),
ok.
do_ref_opt(Source, PrivDir) ->
@@ -202,9 +203,9 @@ do_ref_opt(Source, PrivDir) ->
{outdir,PrivDir}] ++
[from_asm || Ext =:= ".S" ]),
Base = filename:rootname(filename:basename(Source), Ext),
- code:purge(list_to_atom(Base)),
- BeamFile = filename:join(PrivDir, Base),
- code:load_abs(BeamFile),
+ code:purge(list_to_atom(Base)),
+ BeamFile = filename:join(PrivDir, Base),
+ code:load_abs(BeamFile),
ok = Mod:Mod(),
{beam_file,Mod,_,_,_,Code} = beam_disasm:file(BeamFile),
case Base of
@@ -232,6 +233,27 @@ collect_recv_opt_instrs(Code) ->
end] || {function,_,_,_,Is} <- Code],
lists:append(L).
+cover_recv_instructions() ->
+ %% We want to cover the handling of recv_mark and recv_set in beam_utils.
+ %% Since those instructions are introduced in a late optimization pass,
+ %% beam_utils:live_opt() will not see them unless the compilation is
+ %% started from a .S file. The compile_SUITE:asm/1 test case will
+ %% compile all test suite files to .S and then run them through the
+ %% compiler again.
+ %%
+ %% Here will we will ensure that this modules contains recv_mark
+ %% and recv_set instructions.
+ Pid = spawn_link(fun() ->
+ receive {Parent,Ref} ->
+ Parent ! Ref
+ end
+ end),
+ Ref = make_ref(),
+ Pid ! {self(),Ref},
+ receive
+ Ref -> ok
+ end.
+
export(Config) when is_list(Config) ->
Ref = make_ref(),
?line self() ! {result,Ref,42},
diff --git a/lib/compiler/test/record_SUITE.erl b/lib/compiler/test/record_SUITE.erl
index f736e14bf6..8cc90026ec 100644
--- a/lib/compiler/test/record_SUITE.erl
+++ b/lib/compiler/test/record_SUITE.erl
@@ -246,6 +246,14 @@ record_test_2(Config) when is_list(Config) ->
?line Barf = update_barf(Barf0),
?line #barf{a="abc",b=1} = id(Barf),
+ %% Test optimization of is_record/3.
+ false = case id({a,b}) of
+ {_,_}=Tuple -> is_record(Tuple, foo)
+ end,
+ false = case id(true) of
+ true=Bool -> is_record(Bool, foo)
+ end,
+
ok.
record_test_3(Config) when is_list(Config) ->
diff --git a/lib/compiler/test/test_lib.erl b/lib/compiler/test/test_lib.erl
index a8befbecd9..4ffac95489 100644
--- a/lib/compiler/test/test_lib.erl
+++ b/lib/compiler/test/test_lib.erl
@@ -18,11 +18,13 @@
%%
-module(test_lib).
--include("test_server.hrl").
+-include_lib("test_server/include/test_server.hrl").
-compile({no_auto_import,[binary_part/2]}).
--export([recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
+-export([id/1,recompile/1,parallel/0,uniq/0,opt_opts/1,get_data_dir/1,
smoke_disasm/1,p_run/2,binary_part/2]).
+id(I) -> I.
+
recompile(Mod) when is_atom(Mod) ->
case whereis(cover_server) of
undefined -> ok;
@@ -44,6 +46,10 @@ smoke_disasm(File) when is_list(File) ->
Res = beam_disasm:file(File),
{beam_file,_Mod} = {element(1, Res),element(2, Res)}.
+%% If we are running cover, we don't want to run test cases that
+%% invokes the compiler in parallel, as doing so would probably
+%% be slower than running them sequentially.
+
parallel() ->
case ?t:is_cover() orelse erlang:system_info(schedulers) =:= 1 of
true -> [];
@@ -51,10 +57,8 @@ parallel() ->
end.
uniq() ->
- U0 = erlang:ref_to_list(make_ref()),
- U1 = re:replace(U0, "^#Ref", ""),
- U = re:replace(U1, "[^[A-Za-z0-9_]+", "_", [global]),
- re:replace(U, "_*$", "", [{return,list}]).
+ U = erlang:unique_integer([positive]),
+ "_" ++ integer_to_list(U).
%% Retrieve the "interesting" compiler options (options for optimization
%% and compatibility) for the given module.
@@ -90,13 +94,18 @@ get_data_dir(Config) ->
%% Will fail the test case if there were any errors.
p_run(Test, List) ->
+ S = erlang:system_info(schedulers),
N = case ?t:is_cover() of
false ->
- erlang:system_info(schedulers);
+ S + 1;
true ->
- %% Cover is running. Using more than one process
- %% will probably only slow down compilation.
- 1
+ %% Cover is running. Using too many processes
+ %% could slow us down. Measurements on my computer
+ %% showed that using 4 parallel processes was
+ %% slightly faster than using 3. Using more than
+ %% 4 would not buy us much and could actually be
+ %% slower.
+ max(S, 4)
end,
p_run_loop(Test, List, N, [], 0, 0).
diff --git a/lib/compiler/test/trycatch_SUITE.erl b/lib/compiler/test/trycatch_SUITE.erl
index 4530d08c77..80d93fbfa4 100644
--- a/lib/compiler/test/trycatch_SUITE.erl
+++ b/lib/compiler/test/trycatch_SUITE.erl
@@ -24,7 +24,8 @@
catch_oops/1,after_oops/1,eclectic/1,rethrow/1,
nested_of/1,nested_catch/1,nested_after/1,
nested_horrid/1,last_call_optimization/1,bool/1,
- plain_catch_coverage/1,andalso_orelse/1,get_in_try/1]).
+ plain_catch_coverage/1,andalso_orelse/1,get_in_try/1,
+ hockey/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -35,11 +36,12 @@ all() ->
[{group,p}].
groups() ->
- [{p,test_lib:parallel(),
+ [{p,[parallel],
[basic,lean_throw,try_of,try_after,catch_oops,
after_oops,eclectic,rethrow,nested_of,nested_catch,
nested_after,nested_horrid,last_call_optimization,
- bool,plain_catch_coverage,andalso_orelse,get_in_try]}].
+ bool,plain_catch_coverage,andalso_orelse,get_in_try,
+ hockey]}].
init_per_suite(Config) ->
@@ -790,7 +792,6 @@ nested_after_1({X1,C1,V1},
nested_horrid(Config) when is_list(Config) ->
- _V = {make_ref(),nested_horrid,4.711},
{[true,true],{[true,1.0],1.0}} =
nested_horrid_1({true,void,void}, 1.0),
ok.
@@ -944,3 +945,14 @@ get_valid_line([_|T]=Path, Annotations) ->
_:not_found ->
get_valid_line(T, Annotations)
end.
+
+hockey(_) ->
+ {'EXIT',{{badmatch,_},[_|_]}} = (catch hockey()),
+ ok.
+
+hockey() ->
+ %% beam_jump used to generate a call into the try block.
+ %% beam_validator disapproved.
+ receive _ -> (b = fun() -> ok end)
+ + hockey, +x after 0 -> ok end, try (a = fun() -> ok end) + hockey, +
+ y catch _ -> ok end.
diff --git a/lib/compiler/test/warnings_SUITE.erl b/lib/compiler/test/warnings_SUITE.erl
index be0348a92d..4e266875ee 100644
--- a/lib/compiler/test/warnings_SUITE.erl
+++ b/lib/compiler/test/warnings_SUITE.erl
@@ -38,8 +38,9 @@
-export([pattern/1,pattern2/1,pattern3/1,pattern4/1,
guard/1,bad_arith/1,bool_cases/1,bad_apply/1,
files/1,effect/1,bin_opt_info/1,bin_construction/1,
- comprehensions/1,maps/1,redundant_boolean_clauses/1,
- latin1_fallback/1]).
+ comprehensions/1,maps/1,maps_bin_opt_info/1,
+ redundant_boolean_clauses/1,
+ latin1_fallback/1,underscore/1,no_warnings/1]).
% Default timetrap timeout (set in init_per_testcase).
-define(default_timeout, ?t:minutes(2)).
@@ -64,7 +65,9 @@ groups() ->
[pattern,pattern2,pattern3,pattern4,guard,
bad_arith,bool_cases,bad_apply,files,effect,
bin_opt_info,bin_construction,comprehensions,maps,
- redundant_boolean_clauses,latin1_fallback]}].
+ maps_bin_opt_info,
+ redundant_boolean_clauses,latin1_fallback,
+ underscore,no_warnings]}].
init_per_suite(Config) ->
Config.
@@ -280,11 +283,12 @@ bad_arith(Config) when is_list(Config) ->
{3,sys_core_fold,{eval_failure,badarith}},
{9,sys_core_fold,nomatch_guard},
{9,sys_core_fold,{eval_failure,badarith}},
+ {9,sys_core_fold,{no_effect,{erlang,is_integer,1}}},
{10,sys_core_fold,nomatch_guard},
{10,sys_core_fold,{eval_failure,badarith}},
{15,sys_core_fold,{eval_failure,badarith}}
] }}],
- ?line [] = run(Config, Ts),
+ [] = run(Config, Ts),
ok.
bool_cases(Config) when is_list(Config) ->
@@ -368,7 +372,7 @@ files(Config) when is_list(Config) ->
%% Test warnings for term construction and BIF calls in effect context.
effect(Config) when is_list(Config) ->
- Ts = [{lc,
+ Ts = [{effect,
<<"
t(X) ->
case X of
@@ -474,6 +478,19 @@ effect(Config) when is_list(Config) ->
m9(Bs) ->
[{B,ok} = {B,foo:bar(B)} || B <- Bs],
ok.
+
+ m10(ConfigTableSize) ->
+ case ConfigTableSize of
+ apa ->
+ CurrentConfig = {id(camel_phase3),id(sms)},
+ case CurrentConfig of
+ {apa, bepa} -> ok;
+ _ -> ok
+ end
+ end,
+ ok.
+
+ id(I) -> I.
">>,
[],
{warnings,[{5,sys_core_fold,{no_effect,{erlang,is_integer,1}}},
@@ -578,11 +595,11 @@ maps(Config) when is_list(Config) ->
<<"
t() ->
M = {a,[]},
- {'EXIT',{badarg,_}} = (catch(M#{ a => 1})),
+ {'EXIT',{badarg,_}} = (catch(M#{ a => 1 })),
ok.
">>,
[],
- {warnings,[{4,v3_kernel,bad_map}]}},
+ {warnings,[{4,sys_core_fold,{eval_failure,badmap}}]}},
{bad_map_src2,
<<"
t() ->
@@ -592,7 +609,7 @@ maps(Config) when is_list(Config) ->
id(I) -> I.
">>,
[inline],
- {warnings,[{4,v3_kernel,bad_map}]}},
+ []},
{bad_map_src3,
<<"
t() ->
@@ -600,7 +617,7 @@ maps(Config) when is_list(Config) ->
ok.
">>,
[],
- {warnings,[{3,v3_core,bad_map}]}},
+ {warnings,[{3,v3_core,badmap}]}},
{ok_map_literal_key,
<<"
t() ->
@@ -618,6 +635,19 @@ maps(Config) when is_list(Config) ->
run(Config, Ts),
ok.
+maps_bin_opt_info(Config) when is_list(Config) ->
+ Ts = [{map_bsm,
+ <<"
+ t1(<<0:8,7:8,T/binary>>,#{val := I}=M) ->
+ t1(T, M#{val := I+1});
+ t1(<<_:8>>,M) ->
+ M.
+ ">>,
+ [bin_opt_info],
+ {warnings,[{2,beam_bsm,bin_opt}]}}],
+ [] = run(Config, Ts),
+ ok.
+
redundant_boolean_clauses(Config) when is_list(Config) ->
Ts = [{redundant_boolean_clauses,
<<"
@@ -678,6 +708,80 @@ latin1_fallback(Conf) when is_list(Conf) ->
ok.
+underscore(Config) when is_list(Config) ->
+ S0 = <<"f(A) ->
+ _VAR1 = <<A>>,
+ _VAR2 = {ok,A},
+ _VAR3 = [A],
+ ok.
+ g(A) ->
+ _VAR1 = A/0,
+ _VAR2 = date(),
+ ok.
+ h() ->
+ _VAR1 = fun() -> ok end,
+ ok.
+ i(A) ->
+ _VAR1 = #{A=>42},
+ ok.
+ ">>,
+ Ts0 = [{underscore0,
+ S0,
+ [],
+ {warnings,[{2,sys_core_fold,useless_building},
+ {3,sys_core_fold,useless_building},
+ {4,sys_core_fold,useless_building},
+ {7,sys_core_fold,result_ignored},
+ {8,sys_core_fold,{no_effect,{erlang,date,0}}},
+ {11,sys_core_fold,useless_building},
+ {14,sys_core_fold,useless_building}
+ ]}}],
+ [] = run(Config, Ts0),
+
+ %% Replace all "_VAR<digit>" variables with a plain underscore.
+ %% Now there should be no warnings.
+ S1 = re:replace(S0, "_VAR\\d+", "_", [global]),
+ io:format("~s\n", [S1]),
+ Ts1 = [{underscore1,S1,[],[]}],
+ [] = run(Config, Ts1),
+
+ ok.
+
+no_warnings(Config) when is_list(Config) ->
+ Ts = [{no_warnings,
+ <<"-record(r, {s=ordsets:new(),a,b}).
+
+ a() ->
+ R = #r{}, %No warning expected.
+ {R#r.a,R#r.b}.
+
+ b(X) ->
+ T = true,
+ Var = [X], %No warning expected.
+ case T of
+ false -> Var;
+ true -> []
+ end.
+
+ c() ->
+ R0 = {r,\"abc\",undefined,os:timestamp()}, %No warning.
+ case R0 of
+ {r,V1,_V2,V3} -> {r,V1,\"def\",V3}
+ end.
+
+ d(In0, Bool) ->
+ {In1,Int} = case id(Bool) of
+ false -> {In0,0}
+ end,
+ [In1,Int].
+
+ id(I) -> I.
+ ">>,
+ [],
+ []}],
+ run(Config, Ts),
+ ok.
+
%%%
%%% End of test cases.
%%%
@@ -699,10 +803,10 @@ run(Config, Tests) ->
%% Compiles a test module and returns the list of errors and warnings.
run_test(Conf, Test0, Warnings) ->
- Mod = "warnings_"++test_lib:uniq(),
- Filename = Mod ++ ".erl",
+ Module = "warnings_"++test_lib:uniq(),
+ Filename = Module ++ ".erl",
?line DataDir = ?privdir,
- Test = ["-module(", Mod, "). ", Test0],
+ Test = ["-module(", Module, "). ", Test0],
?line File = filename:join(DataDir, Filename),
?line Opts = [binary,export_all,return|Warnings],
?line ok = file:write_file(File, Test),
diff --git a/lib/compiler/test/z_SUITE.erl b/lib/compiler/test/z_SUITE.erl
new file mode 100644
index 0000000000..eff8a1877f
--- /dev/null
+++ b/lib/compiler/test/z_SUITE.erl
@@ -0,0 +1,62 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(z_SUITE).
+
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ loaded/1]).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ test_lib:recompile(?MODULE),
+ [loaded].
+
+groups() ->
+ [].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+loaded(_Config) ->
+ 0 = do_loaded(code:all_loaded(), 0),
+ ok.
+
+do_loaded([{M,_}|Ms], E0) ->
+ E = try
+ _ = M:module_info(),
+ _ = M:module_info(functions),
+ E0
+ catch
+ C:Error ->
+ Stk = erlang:get_stacktrace(),
+ io:format("~p:~p\n~p\n", [C,Error,Stk]),
+ E0 + 1
+ end,
+ do_loaded(Ms, E);
+do_loaded([], E) -> E.
diff --git a/lib/compiler/vsn.mk b/lib/compiler/vsn.mk
index b1a6c15ac9..69f71ba5dd 100644
--- a/lib/compiler/vsn.mk
+++ b/lib/compiler/vsn.mk
@@ -1 +1 @@
-COMPILER_VSN = 5.0.3
+COMPILER_VSN = 6.0
diff --git a/lib/cosEvent/src/cosEvent.app.src b/lib/cosEvent/src/cosEvent.app.src
index 66b0d2e168..5ffd12bc6b 100644
--- a/lib/cosEvent/src/cosEvent.app.src
+++ b/lib/cosEvent/src/cosEvent.app.src
@@ -39,7 +39,7 @@
{applications, [orber, stdlib, kernel]},
{env, []},
{mod, {cosEventApp, []}},
- {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-7.0"]}
]}.
diff --git a/lib/cosEvent/src/oe_CosEventComm_PullerS_impl.erl b/lib/cosEvent/src/oe_CosEventComm_PullerS_impl.erl
index 5f2733e72d..9c22eafaab 100644
--- a/lib/cosEvent/src/oe_CosEventComm_PullerS_impl.erl
+++ b/lib/cosEvent/src/oe_CosEventComm_PullerS_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -251,7 +251,8 @@ send_sync(_OE_This, _OE_From, State, Any) ->
store_event(DB, Max, Event) ->
case ets:info(DB, size) of
CurrentSize when CurrentSize < Max ->
- ets:insert(DB, {now(), Event});
+ ets:insert(DB, {{erlang:system_time(), erlang:unique_integer([positive])},
+ Event});
_ ->
orber:dbg("[~p] oe_CosEventComm_PullerS:store_event(~p); DB full drop event.",
[?LINE, Event], ?DEBUG_LEVEL),
diff --git a/lib/cosEvent/vsn.mk b/lib/cosEvent/vsn.mk
index 40bf1ba49d..3149020d7c 100644
--- a/lib/cosEvent/vsn.mk
+++ b/lib/cosEvent/vsn.mk
@@ -1,3 +1,2 @@
-
-COSEVENT_VSN = 2.1.15
+COSEVENT_VSN = 2.2
diff --git a/lib/cosEventDomain/src/cosEventDomain.app.src b/lib/cosEventDomain/src/cosEventDomain.app.src
index 60114b6a91..f218ac853e 100644
--- a/lib/cosEventDomain/src/cosEventDomain.app.src
+++ b/lib/cosEventDomain/src/cosEventDomain.app.src
@@ -28,6 +28,6 @@
{applications, [orber, stdlib, kernel]},
{env, []},
{mod, {cosEventDomainApp, []}},
- {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-6.0",
+ {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-7.0",
"cosNotification-1.1.21"]}
]}.
diff --git a/lib/cosEventDomain/src/cosEventDomainApp.erl b/lib/cosEventDomain/src/cosEventDomainApp.erl
index 734e4deccb..86069d9e09 100644
--- a/lib/cosEventDomain/src/cosEventDomainApp.erl
+++ b/lib/cosEventDomain/src/cosEventDomainApp.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -36,9 +36,6 @@
%%--------------- EXPORTS ------------------------------------
%% External MISC
-export([get_option/3,
- create_name/2,
- create_name/1,
- create_id/0,
create_id/1,
is_debug_compiled/0,
install/0,
@@ -222,31 +219,10 @@ get_option(Key, OptionList, DefaultList) ->
{error, "Invalid option"}
end
end.
-%%-----------------------------------------------------------%
-%% function : create_name/2
-%% Arguments:
-%% Returns :
-%% Exception:
-%% Effect :
-%%------------------------------------------------------------
-create_name(Name,Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',Name,'_',MSec, '_', Sec, '_', USec]).
-
-%%-----------------------------------------------------------%
-%% function : create_name/1
-%% Arguments:
-%% Returns :
-%% Exception:
-%% Effect :
-%%------------------------------------------------------------
-create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
%%------------------------------------------------------------
-%% function : create_id/0
-%% Arguments: -
+%% function : create_id/1
+%% Arguments: CosEventDomainAdmin::DomainID (long)
%% Returns : CosEventDomainAdmin::DomainID (long)
%% Exception:
%% Purpose :
@@ -256,10 +232,6 @@ create_id(2147483647) ->
create_id(OldID) ->
OldID+1.
-
-create_id() ->
- {_A,_B,C}=now(),
- C.
%%------------------------------------------------------------
%% function : get_qos
%% Arguments:
diff --git a/lib/cosEventDomain/vsn.mk b/lib/cosEventDomain/vsn.mk
index 6317ed3c22..bdde1f6ab2 100644
--- a/lib/cosEventDomain/vsn.mk
+++ b/lib/cosEventDomain/vsn.mk
@@ -1,3 +1,2 @@
-
-COSEVENTDOMAIN_VSN = 1.1.14
+COSEVENTDOMAIN_VSN = 1.2
diff --git a/lib/cosFileTransfer/src/cosFileTransfer.app.src b/lib/cosFileTransfer/src/cosFileTransfer.app.src
index 21226b0c6b..033eec9700 100644
--- a/lib/cosFileTransfer/src/cosFileTransfer.app.src
+++ b/lib/cosFileTransfer/src/cosFileTransfer.app.src
@@ -38,6 +38,6 @@
{env, []},
{mod, {cosFileTransferApp, []}},
{runtime_dependencies, ["stdlib-2.0","ssl-5.3.4","orber-3.6.27","kernel-3.0",
- "inets-5.10","erts-6.0","cosProperty-1.1.17"]}
+ "inets-5.10","erts-7.0","cosProperty-1.1.17"]}
]}.
diff --git a/lib/cosFileTransfer/src/cosFileTransferApp.erl b/lib/cosFileTransfer/src/cosFileTransferApp.erl
index 443c917a97..bcc9f485a0 100644
--- a/lib/cosFileTransfer/src/cosFileTransferApp.erl
+++ b/lib/cosFileTransfer/src/cosFileTransferApp.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -208,8 +208,9 @@ type_check(Obj, Mod) ->
%% Effect :
%%------------------------------------------------------------
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Time,'_',Unique]).
%%-----------------------------------------------------------%
diff --git a/lib/cosFileTransfer/test/fileTransfer_SUITE.erl b/lib/cosFileTransfer/test/fileTransfer_SUITE.erl
index dfe6fabfab..5e75a9919f 100644
--- a/lib/cosFileTransfer/test/fileTransfer_SUITE.erl
+++ b/lib/cosFileTransfer/test/fileTransfer_SUITE.erl
@@ -732,8 +732,9 @@ create_file_on_source_node({'NATIVE', _}, _Config, Host, FileName, Path, Data) -
?match(ok, file:write_file(FileName, list_to_binary(Data))).
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat([Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat([Type, '_', Time, '_', Unique]).
diff --git a/lib/cosFileTransfer/vsn.mk b/lib/cosFileTransfer/vsn.mk
index f52a1bd800..00bfdb3087 100644
--- a/lib/cosFileTransfer/vsn.mk
+++ b/lib/cosFileTransfer/vsn.mk
@@ -1 +1 @@
-COSFILETRANSFER_VSN = 1.1.16
+COSFILETRANSFER_VSN = 1.2
diff --git a/lib/cosNotification/src/CosNotification_Common.erl b/lib/cosNotification/src/CosNotification_Common.erl
index af9b2d4368..cdaaeee7f8 100644
--- a/lib/cosNotification/src/CosNotification_Common.erl
+++ b/lib/cosNotification/src/CosNotification_Common.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -39,8 +39,9 @@
%%--------------- EXPORTS ------------------------------------
%% External MISC
-export([get_option/3,
- create_name/2,
+ create_name/0,
create_name/1,
+ create_name/2,
create_id/0,
create_id/1,
is_debug_compiled/0,
@@ -110,17 +111,20 @@ get_option(Key, OptionList, DefaultList) ->
{error, "Invalid option"}
end
end.
-%%-----------------------------------------------------------%
-%% function : create_name/2
+
+%%------------------------------------------------------------
+%% function : create_name
%% Arguments:
%% Returns :
-%% Exception:
-%% Effect :
+%% Effect : Create a unique name to use when, for eaxmple, starting
+%% a new server.
%%------------------------------------------------------------
-create_name(Name,Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',Name,'_',MSec, '_', Sec, '_', USec]).
-
+create_name() ->
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Time,'_',Unique]).
+
+
%%-----------------------------------------------------------%
%% function : create_name/1
%% Arguments:
@@ -129,8 +133,21 @@ create_name(Name,Type) ->
%% Effect :
%%------------------------------------------------------------
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Time,'_',Unique]).
+
+%%-----------------------------------------------------------%
+%% function : create_name/2
+%% Arguments:
+%% Returns :
+%% Exception:
+%% Effect :
+%%------------------------------------------------------------
+create_name(Name,Type) ->
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Name,'_',Time,'_',Unique]).
%%------------------------------------------------------------
%% function : create_id/0
@@ -146,16 +163,16 @@ create_name(Type) ->
%%------------------------------------------------------------
create_id(-1) ->
1;
-create_id( 2147483647) ->
+create_id(2147483647) ->
-2147483648;
create_id(OldID) ->
OldID+1.
create_id() ->
- {_A,_B,C}=now(),
+ {_A,_B,C}=erlang:timestamp(),
C.
-%%-----------------------------------------------------------%
+%%------------------------------------------------------------
%% function : type_check
%% Arguments: Obj - objectrefernce to test.
%% Mod - Module which contains typeID/0.
diff --git a/lib/cosNotification/src/CosNotification_Definitions.hrl b/lib/cosNotification/src/CosNotification_Definitions.hrl
index 8325b5aa5e..5db081ec2e 100644
--- a/lib/cosNotification/src/CosNotification_Definitions.hrl
+++ b/lib/cosNotification/src/CosNotification_Definitions.hrl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -315,7 +315,9 @@
{tty, false},
{logfile, false},
{server_options, []}]).
--define(not_CreateDBKey, term_to_binary({now(), node()})).
+-define(not_CreateDBKey, term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()})).
-define(DEBUG_LEVEL, 3).
diff --git a/lib/cosNotification/src/PullerSupplier_impl.erl b/lib/cosNotification/src/PullerSupplier_impl.erl
index 9f12f9c742..22e8355f3a 100644
--- a/lib/cosNotification/src/PullerSupplier_impl.erl
+++ b/lib/cosNotification/src/PullerSupplier_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -887,7 +887,7 @@ callAny(_OE_THIS, OE_FROM, State, EventIn, Status) ->
%% Start timers which send a message each time we should push events. Only used
%% when this objects is defined to supply sequences.
start_timer(State) ->
- TS = now(),
+ TS = erlang:timestamp(),
case catch timer:send_after(timer:seconds(?get_PacingInterval(State)),
{pacing, TS}) of
{ok,PacTRef} ->
diff --git a/lib/cosNotification/src/cosNotification.app.src b/lib/cosNotification/src/cosNotification.app.src
index ad02eb4421..52ce164d46 100644
--- a/lib/cosNotification/src/cosNotification.app.src
+++ b/lib/cosNotification/src/cosNotification.app.src
@@ -117,6 +117,6 @@
{applications, [orber, stdlib, kernel]},
{env, []},
{mod, {cosNotificationApp, []}},
- {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-6.0",
+ {runtime_dependencies, ["stdlib-2.5","orber-3.6.27","kernel-3.0","erts-7.0",
"cosTime-1.1.14","cosEvent-2.1.15"]}
]}.
diff --git a/lib/cosNotification/src/cosNotificationApp.erl b/lib/cosNotification/src/cosNotificationApp.erl
index ba44163272..251779c558 100644
--- a/lib/cosNotification/src/cosNotificationApp.erl
+++ b/lib/cosNotification/src/cosNotificationApp.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -221,7 +221,7 @@ start_global_factory() ->
start_global_factory(Args) when is_list(Args) ->
SO = 'CosNotification_Common':get_option(server_options, Args, ?not_DEFAULT_SETTINGS),
- Name = create_name(),
+ Name = 'CosNotification_Common':create_name(),
SPEC = ['CosNotifyChannelAdmin_EventChannelFactory',Args,
[{sup_child, true},
{regname, {global, Name}}|SO]],
@@ -432,16 +432,4 @@ init(app_init) ->
'CosNotifyChannelAdmin_EventChannel_impl']}]}}.
-
-%%------------------------------------------------------------
-%% function : create_name
-%% Arguments:
-%% Returns :
-%% Effect : Create a unique name to use when, for eaxmple, starting
-%% a new server.
-%%------------------------------------------------------------
-create_name() ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',MSec, '_', Sec, '_', USec]).
-
%%--------------- END OF MODULE ------------------------------
diff --git a/lib/cosNotification/src/cosNotification_eventDB.erl b/lib/cosNotification/src/cosNotification_eventDB.erl
index 89332d53f2..f8e2384d15 100644
--- a/lib/cosNotification/src/cosNotification_eventDB.erl
+++ b/lib/cosNotification/src/cosNotification_eventDB.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -71,10 +71,8 @@
%% that the first and last Key change place. {K1,K2}<->{K2,K1} and
%% {K1,K2,K3}<->{K3,K2,K1}.
%%----------------------------------------------------------------------
-
-module(cosNotification_eventDB).
-
%%--------------- INCLUDES -----------------------------------
-include_lib("orber/include/corba.hrl").
-include_lib("orber/include/ifr_types.hrl").
@@ -221,16 +219,16 @@ gc_events(DBRef, _Priority) when ?is_TimeoutNotUsed(DBRef) ->
gc_events(DBRef, _Priority) when ?is_StopTNotSupported(DBRef) ->
ok;
gc_events(DBRef, Priority) ->
- {M,S,U} = now(),
+ TS = erlang:monotonic_time(),
+ {resolution, TR} = lists:keyfind(resolution, 1, erlang:system_info(os_monotonic_time_source)),
case get(oe_GC_timestamp) of
- Num when {M,S,U} > Num ->
- put(oe_GC_timestamp, {M,S+?get_GCTime(DBRef),U}),
+ Num when TS > Num ->
+ put(oe_GC_timestamp, TS + ?get_GCTime(DBRef) * TR),
spawn_link(?MODULE, gc_start, [DBRef, Priority]);
_->
ok
end.
-
%%------------------------------------------------------------
%% function : gc_start
%% Arguments:
@@ -266,13 +264,13 @@ gc_discard_DB({Key1, Key2, Key3}, DRef) ->
%% Returns :
%%------------------------------------------------------------
create_FIFO_Key() ->
- {M, S, U} = erlang:now(),
+ {M, S, U} = erlang:timestamp(),
-M*1000000000000 - S*1000000 - U.
%%------------------------------------------------------------
%% function : convert_FIFO_Key
%% Arguments:
-%% Returns : A now tuple
+%% Returns : A timestamp tuple
%% Comment : Used when we must reuse a timestamp, i.e., only
%% when we must reorder the DB.
%%------------------------------------------------------------
@@ -322,7 +320,7 @@ extract_start_time(#'CosNotification_StructuredEvent'
_ ->
false
end,
- convert_time(ST, TRef, now());
+ convert_time(ST, TRef, erlang:timestamp());
extract_start_time(_, _, _) ->
false.
@@ -337,12 +335,12 @@ extract_start_time(_, _, _) ->
%% - undefined eq. value needed but no filter associated.
%% Now - used when we want to reuse old TimeStamp which
%% must be done when changing QoS.
-%% Returns : A modified return from now().
+%% Returns : A modified return from erlang:timestamp().
%%------------------------------------------------------------
extract_deadline(_, _, _, _, false) ->
false;
extract_deadline(Event, DefaultT, StopTSupported, TRef, MappingVal) ->
- extract_deadline(Event, DefaultT, StopTSupported, TRef, MappingVal, now()).
+ extract_deadline(Event, DefaultT, StopTSupported, TRef, MappingVal, erlang:timestamp()).
extract_deadline(_, _, _, _, false, _) ->
false;
@@ -403,14 +401,14 @@ get_time_diff(UTC, TRef) ->
UB-LB.
check_deadline(DL) when is_tuple(DL) ->
- {M,S,U} = now(),
+ {M,S,U} = erlang:timestamp(),
DL >= {-M,-S,-U};
check_deadline(_DL) ->
%% This case will cover if no timeout is set.
false.
check_start_time(ST) when is_tuple(ST) ->
- {M,S,U} = now(),
+ {M,S,U} = erlang:timestamp(),
ST >= {-M,-S,-U};
check_start_time(_ST) ->
%% This case will cover if no earliest delivery time is set.
@@ -1139,8 +1137,10 @@ create_db(QoS, GCTime, GCLimit, TimeRef) ->
?is_TimeoutNotUsed(DBRef), ?is_StopTNotSupported(DBRef) ->
ok;
true ->
- {M,S,U} = now(),
- put(oe_GC_timestamp, {M,S+GCTime,U})
+ TS = erlang:monotonic_time(),
+ {resolution, TR} = lists:keyfind(resolution, 1,
+ erlang:system_info(os_monotonic_time_source)),
+ put(oe_GC_timestamp, TS+GCTime*TR)
end,
DBRef.
diff --git a/lib/cosNotification/test/notify_test_impl.erl b/lib/cosNotification/test/notify_test_impl.erl
index dae7777089..4fe246ef16 100644
--- a/lib/cosNotification/test/notify_test_impl.erl
+++ b/lib/cosNotification/test/notify_test_impl.erl
@@ -289,10 +289,10 @@ disconnect_pull_supplier(_Self, State) ->
%%--------------- LOCAL FUNCTIONS ----------------------------
delay(Obj, Event, Time, Mod, F) ->
- io:format("notify_test:delay(~p) TIME: ~p~n",[Event, now()]),
+ io:format("notify_test:delay(~p) TIME: ~p~n",[Event, erlang:timestamp()]),
timer:sleep(Time),
Mod:F(Obj, Event),
- io:format("notify_test:delay() DONE: ~p~n",[now()]),
+ io:format("notify_test:delay() DONE: ~p~n",[erlang:timestamp()]),
ok.
%%--------------- END OF MODULE ------------------------------
diff --git a/lib/cosNotification/vsn.mk b/lib/cosNotification/vsn.mk
index 28d6ec71bf..c1affdf0de 100644
--- a/lib/cosNotification/vsn.mk
+++ b/lib/cosNotification/vsn.mk
@@ -1,2 +1,2 @@
-COSNOTIFICATION_VSN = 1.1.21
+COSNOTIFICATION_VSN = 1.2
diff --git a/lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl b/lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl
index 157b243c53..788518c7bb 100644
--- a/lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl
+++ b/lib/cosProperty/src/CosPropertyService_PropertySetDef_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -128,7 +128,9 @@
%% {stop, Reason}
%%----------------------------------------------------------------------
init({DefMode, AllowedTypes, AllowedProperties, InitProperties, MyType}) ->
- Key = term_to_binary({now(), node()}),
+ Key = term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}),
_F = ?write_function(#oe_CosPropertyService{key=Key,
properties=InitProperties}),
write_result(mnesia:transaction(_F)),
diff --git a/lib/cosProperty/src/cosProperty.app.src b/lib/cosProperty/src/cosProperty.app.src
index b977bb5984..7fad7a602a 100644
--- a/lib/cosProperty/src/cosProperty.app.src
+++ b/lib/cosProperty/src/cosProperty.app.src
@@ -43,5 +43,5 @@
{env, []},
{mod, {cosProperty, []}},
{runtime_dependencies, ["stdlib-2.0","orber-3.6.27","mnesia-4.12",
- "kernel-3.0","erts-6.0"]}
+ "kernel-3.0","erts-7.0"]}
]}.
diff --git a/lib/cosProperty/src/cosProperty.erl b/lib/cosProperty/src/cosProperty.erl
index 2368ee3db6..57c35dedf9 100644
--- a/lib/cosProperty/src/cosProperty.erl
+++ b/lib/cosProperty/src/cosProperty.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -406,8 +406,9 @@ type_check(Obj, Mod) ->
%% Effect :
%%------------------------------------------------------------
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Time,'_',Unique]).
%%--------------- END OF MODULE ------------------------------
diff --git a/lib/cosProperty/vsn.mk b/lib/cosProperty/vsn.mk
index 0f546a2da8..d96508c2d2 100644
--- a/lib/cosProperty/vsn.mk
+++ b/lib/cosProperty/vsn.mk
@@ -1,2 +1,2 @@
-COSPROPERTY_VSN = 1.1.17
+COSPROPERTY_VSN = 1.2
diff --git a/lib/cosTime/src/CosTime_TimeService_impl.erl b/lib/cosTime/src/CosTime_TimeService_impl.erl
index bac4ae087c..f44e7ba2f4 100644
--- a/lib/cosTime/src/CosTime_TimeService_impl.erl
+++ b/lib/cosTime/src/CosTime_TimeService_impl.erl
@@ -166,7 +166,7 @@ new_interval(_, _, _, _) ->
create_universal_time() ->
%% Time is supposed to be #100 nano-secs passed.
%% We add micro secs for a greater precision.
- {MS,S,US} = now(),
+ {MS,S,US} = erlang:timestamp(),
case catch calendar:datetime_to_gregorian_seconds(
calendar:now_to_universal_time({MS,S,US})) of
Secs when is_integer(Secs) ->
diff --git a/lib/cosTime/src/cosTime.app.src b/lib/cosTime/src/cosTime.app.src
index cd01de35cb..ac71fe1b29 100644
--- a/lib/cosTime/src/cosTime.app.src
+++ b/lib/cosTime/src/cosTime.app.src
@@ -27,6 +27,6 @@
{applications, [orber, stdlib, kernel]},
{env, []},
{mod, {cosTime, []}},
- {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-6.0",
+ {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-7.0",
"cosEvent-2.1.15"]}
]}.
diff --git a/lib/cosTime/src/cosTime.erl b/lib/cosTime/src/cosTime.erl
index f7d03650af..45f305df39 100644
--- a/lib/cosTime/src/cosTime.erl
+++ b/lib/cosTime/src/cosTime.erl
@@ -333,8 +333,9 @@ type_check(Obj, Mod) ->
%%------------------------------------------------------------
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Time,'_',Unique]).
%%--------------- END OF MODULE ------------------------------
diff --git a/lib/cosTime/src/cosTimeApp.hrl b/lib/cosTime/src/cosTimeApp.hrl
index f3082816f7..bdf0bf7278 100644
--- a/lib/cosTime/src/cosTimeApp.hrl
+++ b/lib/cosTime/src/cosTimeApp.hrl
@@ -41,7 +41,7 @@
-define(max_TimeT, 18446744073709551616).
%% The calendar module uses year 0 as base for gregorian functions.
-%% 'ABSOULTE_TIME_DIFF' is #seconfs from year 0 until 15 october 1582, 00:00.
+%% 'ABSOULTE_TIME_DIFF' is #seconds from year 0 until 15 october 1582, 00:00.
-define(ABSOLUTE_TIME_DIFF, 49947926400).
%% As above but diff year 0 to 00:00 GMT, January 1, 1970
-define(STANDARD_TIME_DIFF, 62167219200).
diff --git a/lib/cosTime/vsn.mk b/lib/cosTime/vsn.mk
index 9e9e5c0250..32416f0087 100644
--- a/lib/cosTime/vsn.mk
+++ b/lib/cosTime/vsn.mk
@@ -1,3 +1,2 @@
-COSTIME_VSN = 1.1.14
-
+COSTIME_VSN = 1.2
diff --git a/lib/cosTransactions/src/CosTransactions_TransactionFactory_impl.erl b/lib/cosTransactions/src/CosTransactions_TransactionFactory_impl.erl
index 36e37e2d5f..3954f04ad3 100644
--- a/lib/cosTransactions/src/CosTransactions_TransactionFactory_impl.erl
+++ b/lib/cosTransactions/src/CosTransactions_TransactionFactory_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -145,8 +145,8 @@ create(_Self, State, TimeOut) when is_integer(TimeOut) ->
_ ->
if
TimeOut > 0 ->
- {MegaSecs, Secs, _Microsecs} = erlang:now(),
- EState2 = ?tr_set_alarm(EState, MegaSecs*1000000+Secs+TimeOut),
+ TimeStampSec = erlang:monotonic_time(seconds),
+ EState2 = ?tr_set_alarm(EState, TimeStampSec+TimeOut),
EState3 = ?tr_set_timeout(EState2, TimeOut*1000),
ETraP = ?tr_start_child(?SUP_ETRAP(EState3)),
{reply, ETraP, State};
diff --git a/lib/cosTransactions/src/ETraP_Common.erl b/lib/cosTransactions/src/ETraP_Common.erl
index dd68e9b038..dca1c1aaa9 100644
--- a/lib/cosTransactions/src/ETraP_Common.erl
+++ b/lib/cosTransactions/src/ETraP_Common.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -86,8 +86,9 @@ get_option(Key, OptionList, DefaultList) ->
%%------------------------------------------------------------
create_name(Name,Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',Name,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Name,'_',Time,'_',Unique]).
%%------------------------------------------------------------
%% function : create_name/1
@@ -98,8 +99,9 @@ create_name(Name,Type) ->
%%------------------------------------------------------------
create_name(Type) ->
- {MSec, Sec, USec} = erlang:now(),
- lists:concat(['oe_',node(),'_',Type,'_',MSec, '_', Sec, '_', USec]).
+ Time = erlang:system_time(),
+ Unique = erlang:unique_integer([positive]),
+ lists:concat(['oe_',node(),'_',Type,'_',Time,'_',Unique]).
%%------------------------------------------------------------
%% function : try_timeout
@@ -114,10 +116,9 @@ try_timeout(TimeoutAt) ->
infinity ->
false;
_->
- {MegaSecs, Secs, _Microsecs} = erlang:now(),
- Time = MegaSecs*1000000+Secs,
+ TimeSec = erlang:monotonic_time(seconds),
if
- Time < TimeoutAt ->
+ TimeSec < TimeoutAt ->
false;
true ->
true
diff --git a/lib/cosTransactions/src/ETraP_Server_impl.erl b/lib/cosTransactions/src/ETraP_Server_impl.erl
index e2c5d88f9d..db23d6c166 100644
--- a/lib/cosTransactions/src/ETraP_Server_impl.erl
+++ b/lib/cosTransactions/src/ETraP_Server_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@
%% Log files are created in the current directory, which is why the
%% application requires read/write rights for current directory. The
%% file name looks like:
-%% "oe_nonode@nohost_subc_939_383117_295538" (the last part is now())
+%% "oe_nonode@nohost_subc_1429872479809947099_438" (the two last parts are
+%% erlang:system_time() and erlang:unique_integer([positive]))
%% It is equal to what the object is started as, i.e., {regname, {global, X}}.
%%
%% If the application is unable to read the log it will exit and the
diff --git a/lib/cosTransactions/src/cosTransactions.app.src b/lib/cosTransactions/src/cosTransactions.app.src
index 6b99915ad6..074d82f487 100644
--- a/lib/cosTransactions/src/cosTransactions.app.src
+++ b/lib/cosTransactions/src/cosTransactions.app.src
@@ -40,5 +40,5 @@
{applications, [orber, stdlib, kernel]},
{env, []},
{mod, {cosTransactions, []}},
- {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.0","orber-3.6.27","kernel-3.0","erts-7.0"]}
]}.
diff --git a/lib/cosTransactions/vsn.mk b/lib/cosTransactions/vsn.mk
index 7aed212523..929f8c73d1 100644
--- a/lib/cosTransactions/vsn.mk
+++ b/lib/cosTransactions/vsn.mk
@@ -1 +1 @@
-COSTRANSACTIONS_VSN = 1.2.14
+COSTRANSACTIONS_VSN = 1.3
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index a42de9adb1..adacdcbc73 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -242,6 +242,7 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM
static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM aes_ctr_stream_encrypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM strong_rand_bytes_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_bytes_3(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
@@ -379,6 +380,7 @@ static ErlNifFunc nif_funcs[] = {
{"aes_ctr_decrypt", 3, aes_ctr_encrypt},
{"aes_ctr_stream_encrypt", 2, aes_ctr_stream_encrypt},
{"aes_ctr_stream_decrypt", 2, aes_ctr_stream_encrypt},
+ {"aes_ecb_crypt", 3, aes_ecb_crypt},
{"rand_bytes", 1, rand_bytes_1},
{"strong_rand_bytes_nif", 1, strong_rand_bytes_nif},
{"rand_bytes", 3, rand_bytes_3},
@@ -410,7 +412,7 @@ static ErlNifFunc nif_funcs[] = {
{"bf_ecb_crypt", 3, bf_ecb_crypt},
{"blowfish_ofb64_encrypt", 3, blowfish_ofb64_encrypt},
- {"ec_key_generate", 1, ec_key_generate},
+ {"ec_key_generate", 2, ec_key_generate},
{"ecdsa_sign_nif", 4, ecdsa_sign_nif},
{"ecdsa_verify_nif", 5, ecdsa_verify_nif},
{"ecdh_compute_key_nif", 3, ecdh_compute_key_nif},
@@ -1686,14 +1688,15 @@ static ERL_NIF_TERM aes_cfb_8_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM
int new_ivlen = 0;
ERL_NIF_TERM ret;
- if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key)
+ || !(key.size == 16 || key.size == 24 || key.size == 32)
|| !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &text)) {
return enif_make_badarg(env);
}
memcpy(ivec_clone, ivec.data, 16);
- AES_set_encrypt_key(key.data, 128, &aes_key);
+ AES_set_encrypt_key(key.data, key.size * 8, &aes_key);
AES_cfb8_encrypt((unsigned char *) text.data,
enif_make_new_binary(env, text.size, &ret),
text.size, &aes_key, ivec_clone, &new_ivlen,
@@ -1712,14 +1715,15 @@ static ERL_NIF_TERM aes_cfb_128_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TE
CHECK_OSE_CRYPTO();
- if (!enif_inspect_iolist_as_binary(env, argv[0], &key) || key.size != 16
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key)
+ || !(key.size == 16 || key.size == 24 || key.size == 32)
|| !enif_inspect_binary(env, argv[1], &ivec) || ivec.size != 16
|| !enif_inspect_iolist_as_binary(env, argv[2], &text)) {
return enif_make_badarg(env);
}
memcpy(ivec_clone, ivec.data, 16);
- AES_set_encrypt_key(key.data, 128, &aes_key);
+ AES_set_encrypt_key(key.data, key.size * 8, &aes_key);
AES_cfb128_encrypt((unsigned char *) text.data,
enif_make_new_binary(env, text.size, &ret),
text.size, &aes_key, ivec_clone, &new_ivlen,
@@ -2032,6 +2036,38 @@ static ERL_NIF_TERM chacha20_poly1305_decrypt(ErlNifEnv* env, int argc, const ER
#endif
}
+static ERL_NIF_TERM aes_ecb_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
+{/* (Key, Data, IsEncrypt) */
+ ErlNifBinary key_bin, data_bin;
+ AES_KEY aes_key;
+ int i;
+ unsigned char* ret_ptr;
+ ERL_NIF_TERM ret;
+
+ CHECK_OSE_CRYPTO();
+
+ if (!enif_inspect_iolist_as_binary(env, argv[0], &key_bin)
+ || (key_bin.size != 16 && key_bin.size != 32)
+ || !enif_inspect_iolist_as_binary(env, argv[1], &data_bin)
+ || data_bin.size % 16 != 0) {
+ return enif_make_badarg(env);
+ }
+
+ if (argv[2] == atom_true) {
+ i = AES_ENCRYPT;
+ AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+ else {
+ i = AES_DECRYPT;
+ AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key);
+ }
+
+ ret_ptr = enif_make_new_binary(env, data_bin.size, &ret);
+ AES_ecb_encrypt(data_bin.data, ret_ptr, &aes_key, i);
+ CONSUME_REDS(env,data_bin);
+ return ret;
+}
+
static ERL_NIF_TERM rand_bytes_1(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Bytes) */
unsigned bytes;
@@ -2464,11 +2500,12 @@ done:
static ERL_NIF_TERM aes_cbc_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Key, IVec, Data, IsEncrypt) */
ErlNifBinary key_bin, ivec_bin, data_bin;
- AES_KEY aes_key;
unsigned char ivec[16];
- int i;
+ int enc, i = 0, outlen = 0;
+ EVP_CIPHER_CTX ctx;
+ const EVP_CIPHER *cipher = NULL;
unsigned char* ret_ptr;
- ERL_NIF_TERM ret;
+ ERL_NIF_TERM ret;
CHECK_OSE_CRYPTO();
@@ -2482,20 +2519,43 @@ static ERL_NIF_TERM aes_cbc_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM a
return enif_make_badarg(env);
}
- if (argv[3] == atom_true) {
- i = AES_ENCRYPT;
- AES_set_encrypt_key(key_bin.data, key_bin.size*8, &aes_key);
- }
- else {
- i = AES_DECRYPT;
- AES_set_decrypt_key(key_bin.data, key_bin.size*8, &aes_key);
- }
+ if (argv[3] == atom_true)
+ enc = 1;
+ else
+ enc = 0;
+
+ EVP_CIPHER_CTX_init(&ctx);
+
+ if (key_bin.size == 16)
+ cipher = EVP_aes_128_cbc();
+ else if (key_bin.size == 32)
+ cipher = EVP_aes_256_cbc();
+
+ memcpy(ivec, ivec_bin.data, 16); /* writeable copy */
+
+ /* openssl docs say we need to leave at least 3 blocks available
+ at the end of the buffer for EVP calls. let's be safe */
+ ret_ptr = enif_make_new_binary(env, data_bin.size + 16*3, &ret);
+
+ if (EVP_CipherInit_ex(&ctx, cipher, NULL, key_bin.data, ivec, enc) != 1)
+ return enif_make_badarg(env);
+
+ /* disable padding, we only handle whole blocks */
+ EVP_CIPHER_CTX_set_padding(&ctx, 0);
+
+ if (EVP_CipherUpdate(&ctx, ret_ptr, &i, data_bin.data, data_bin.size) != 1)
+ return enif_make_badarg(env);
+ outlen += i;
+ if (EVP_CipherFinal_ex(&ctx, ret_ptr + outlen, &i) != 1)
+ return enif_make_badarg(env);
+ outlen += i;
+
+ EVP_CIPHER_CTX_cleanup(&ctx);
- ret_ptr = enif_make_new_binary(env, data_bin.size, &ret);
- memcpy(ivec, ivec_bin.data, 16); /* writable copy */
- AES_cbc_encrypt(data_bin.data, ret_ptr, data_bin.size, &aes_key, ivec, i);
CONSUME_REDS(env,data_bin);
- return ret;
+
+ /* the garbage collector is going to love this */
+ return enif_make_sub_binary(env, ret, 0, outlen);
}
static ERL_NIF_TERM aes_ige_crypt_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
@@ -3689,32 +3749,37 @@ out:
static ERL_NIF_TERM ec_key_generate(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
#if defined(HAVE_EC)
- EC_KEY *key = ec_key_new(env, argv[0]);
+ EC_KEY *key = NULL;
+ const EC_GROUP *group;
+ const EC_POINT *public_key;
+ ERL_NIF_TERM priv_key;
+ ERL_NIF_TERM pub_key = atom_undefined;
CHECK_OSE_CRYPTO();
- if (key && EC_KEY_generate_key(key)) {
- const EC_GROUP *group;
- const EC_POINT *public_key;
- ERL_NIF_TERM priv_key;
- ERL_NIF_TERM pub_key = atom_undefined;
-
- group = EC_KEY_get0_group(key);
- public_key = EC_KEY_get0_public_key(key);
+ if (!get_ec_key(env, argv[0], argv[1], atom_undefined, &key))
+ goto badarg;
- if (group && public_key) {
- pub_key = point2term(env, group, public_key,
- EC_KEY_get_conv_form(key));
- }
- priv_key = bn2term(env, EC_KEY_get0_private_key(key));
- EC_KEY_free(key);
- return enif_make_tuple2(env, pub_key, priv_key);
+ if (argv[1] == atom_undefined) {
+ if (!EC_KEY_generate_key(key))
+ goto badarg;
}
- else {
- if (key)
- EC_KEY_free(key);
- return enif_make_badarg(env);
+
+ group = EC_KEY_get0_group(key);
+ public_key = EC_KEY_get0_public_key(key);
+
+ if (group && public_key) {
+ pub_key = point2term(env, group, public_key,
+ EC_KEY_get_conv_form(key));
}
+ priv_key = bn2term(env, EC_KEY_get0_private_key(key));
+ EC_KEY_free(key);
+ return enif_make_tuple2(env, pub_key, priv_key);
+
+badarg:
+ if (key)
+ EC_KEY_free(key);
+ return enif_make_badarg(env);
#else
return atom_notsup;
#endif
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 271130a9e6..4a8ba5c1bf 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -170,6 +170,36 @@
<funcs>
<func>
+ <name>block_encrypt(Type, Key, PlainText) -> CipherText</name>
+ <fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>block_decrypt(Type, Key, CipherText) -> PlainText</name>
+ <fsummary>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher</fsummary>
+ <type>
+ <v>Type = des_ecb | blowfish_ecb | aes_ecb </v>
+ <v>Key = block_key() </v>
+ <v>PlainText = iodata() </v>
+ </type>
+ <desc>
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.</p>
+ <p>May throw exception <c>notsup</c> in case the chosen <c>Type</c>
+ is not supported by the underlying OpenSSL implementation.</p>
+ </desc>
+ </func>
+
+ <func>
<name>block_encrypt(Type, Key, Ivec, PlainText) -> CipherText</name>
<name>block_encrypt(AeadType, Key, Ivec, {AAD, PlainText}) -> {CipherText, CipherTag}</name>
<fsummary>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher</fsummary>
@@ -181,7 +211,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Encrypt <c>PlainText</c>according to <c>Type</c> block cipher.
+ <p>Encrypt <c>PlainText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, encrypt
<c>PlainText</c>according to <c>Type</c> block cipher and calculate
@@ -203,7 +233,7 @@
<v>AAD = IVec = CipherText = CipherTag = binary()</v>
</type>
<desc>
- <p>Decrypt <c>CipherText</c>according to <c>Type</c> block cipher.
+ <p>Decrypt <c>CipherText</c> according to <c>Type</c> block cipher.
<c>IVec</c> is an arbitrary initializing vector.</p>
<p>In AEAD (Authenticated Encryption with Associated Data) mode, decrypt
<c>CipherText</c>according to <c>Type</c> block cipher and check the authenticity
@@ -269,7 +299,7 @@
<v>SrpUserParams = {user, [Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>SrpHostParams = {host, [Verifier::binary(), Generator::binary(), Prime::binary(), Version::atom()]}</v>
<v>PublicKey = dh_public() | ecdh_public() | srp_public() </v>
- <v>PrivKeyIn = undefined | dh_private() | srp_private() </v>
+ <v>PrivKeyIn = undefined | dh_private() | ecdh_private() | srp_private() </v>
<v>PrivKeyOut = dh_private() | ecdh_private() | srp_private() </v>
</type>
<desc>
diff --git a/lib/crypto/doc/src/notes.xml b/lib/crypto/doc/src/notes.xml
index 605d61e8e4..a0ebc4b3dd 100644
--- a/lib/crypto/doc/src/notes.xml
+++ b/lib/crypto/doc/src/notes.xml
@@ -30,6 +30,23 @@
</header>
<p>This document describes the changes made to the Crypto application.</p>
+<section><title>Crypto 3.5</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Extend block_encrypt/decrypt for aes_cfb8 and aes_cfb128
+ to accept keys of length 128, 192 and 256 bits. Before
+ only 128 bit keys were accepted.</p>
+ <p>
+ Own Id: OTP-12467</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Crypto 3.4.2</title>
<section><title>Improvements and New Features</title>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 7f82fa83fd..e8845ed52f 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -211,7 +211,7 @@ supports()->
[{hashs, Hashs},
{ciphers, [des_cbc, des_cfb, des3_cbc, des_ede3, blowfish_cbc,
blowfish_cfb64, blowfish_ofb64, blowfish_ecb, aes_cbc128, aes_cfb8, aes_cfb128,
- aes_cbc256, rc2_cbc, aes_ctr, rc4] ++ Ciphers},
+ aes_cbc256, rc2_cbc, aes_ctr, rc4, aes_ecb] ++ Ciphers},
{public_keys, [rsa, dss, dh, srp] ++ PubKeys}
].
@@ -368,19 +368,24 @@ block_decrypt(chacha20_poly1305, Key, Ivec, {AAD, Data, Tag}) ->
end;
block_decrypt(rc2_cbc, Key, Ivec, Data) ->
rc2_cbc_decrypt(Key, Ivec, Data).
--spec block_encrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+
+-spec block_encrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_encrypt(des_ecb, Key, Data) ->
des_ecb_encrypt(Key, Data);
block_encrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_encrypt(Key, Data).
+ blowfish_ecb_encrypt(Key, Data);
+block_encrypt(aes_ecb, Key, Data) ->
+ aes_ecb_encrypt(Key, Data).
--spec block_decrypt(des_ecb | blowfish_ecb, Key::iodata(), Data::iodata()) -> binary().
+-spec block_decrypt(des_ecb | blowfish_ecb | aes_ecb, Key::iodata(), Data::iodata()) -> binary().
block_decrypt(des_ecb, Key, Data) ->
des_ecb_decrypt(Key, Data);
block_decrypt(blowfish_ecb, Key, Data) ->
- blowfish_ecb_decrypt(Key, Data).
+ blowfish_ecb_decrypt(Key, Data);
+block_decrypt(aes_ecb, Key, Data) ->
+ aes_ecb_decrypt(Key, Data).
-spec next_iv(des_cbc | des3_cbc | aes_cbc | aes_ige, Data::iodata()) -> binary().
@@ -588,9 +593,8 @@ generate_key(srp, {user, [Generator, Prime, Version]}, PrivateArg)
end,
user_srp_gen_key(Private, Generator, Prime);
-generate_key(ecdh, Curve, undefined) ->
- ec_key_generate(nif_curve_params(Curve)).
-
+generate_key(ecdh, Curve, PrivKey) ->
+ ec_key_generate(nif_curve_params(Curve), ensure_int_as_bin(PrivKey)).
compute_key(dh, OthersPublicKey, MyPrivateKey, DHParameters) ->
case dh_compute_key_nif(ensure_int_as_bin(OthersPublicKey),
@@ -1393,6 +1397,18 @@ aes_ctr_encrypt(_Key, _IVec, _Data) -> ?nif_stub.
aes_ctr_decrypt(_Key, _IVec, _Cipher) -> ?nif_stub.
%%
+%% AES - in electronic codebook mode (ECB)
+%%
+aes_ecb_encrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, true).
+
+aes_ecb_decrypt(Key, Data) ->
+ aes_ecb_crypt(Key, Data, false).
+
+aes_ecb_crypt(_Key, __Data, _IsEncrypt) -> ?nif_stub.
+
+
+%%
%% AES - in counter mode (CTR) with state maintained for multi-call streaming
%%
-type ctr_state() :: { iodata(), binary(), binary(), integer() }.
@@ -1555,7 +1571,7 @@ dh_compute_key(OthersPublicKey, MyPrivateKey, DHParameters) ->
dh_compute_key_nif(_OthersPublicKey, _MyPrivateKey, _DHParameters) -> ?nif_stub.
-ec_key_generate(_Key) -> ?nif_stub.
+ec_key_generate(_Curve, _Key) -> ?nif_stub.
ecdh_compute_key_nif(_Others, _Curve, _My) -> ?nif_stub.
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 1031e6403f..ff7af1f2c1 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -58,6 +58,7 @@ all() ->
{group, aes_cfb8},
{group, aes_cfb128},
{group, aes_cbc256},
+ {group, aes_ecb},
{group, aes_ige256},
{group, rc2_cbc},
{group, rc4},
@@ -84,7 +85,7 @@ groups() ->
{dss, [], [sign_verify]},
{ecdsa, [], [sign_verify]},
{dh, [], [generate_compute]},
- {ecdh, [], [compute]},
+ {ecdh, [], [compute, generate]},
{srp, [], [generate_compute]},
{des_cbc, [], [block]},
{des_cfb, [], [block]},
@@ -96,6 +97,7 @@ groups() ->
{aes_cfb8,[], [block]},
{aes_cfb128,[], [block]},
{aes_cbc256,[], [block]},
+ {aes_ecb,[], [block]},
{aes_ige256,[], [block]},
{blowfish_cbc, [], [block]},
{blowfish_ecb, [], [block]},
@@ -243,6 +245,12 @@ compute(Config) when is_list(Config) ->
Gen = proplists:get_value(compute, Config),
lists:foreach(fun do_compute/1, Gen).
%%--------------------------------------------------------------------
+generate() ->
+ [{doc, " Test crypto:generate_key"}].
+generate(Config) when is_list(Config) ->
+ Gen = proplists:get_value(generate, Config),
+ lists:foreach(fun do_generate/1, Gen).
+%%--------------------------------------------------------------------
mod_pow() ->
[{doc, "mod_pow testing (A ^ M % P with bignums)"}].
mod_pow(Config) when is_list(Config) ->
@@ -494,6 +502,14 @@ do_compute({ecdh = Type, Pub, Priv, Curve, SharedSecret}) ->
ct:fail({{crypto, compute_key, [Type, Pub, Priv, Curve]}, {expected, SharedSecret}, {got, Other}})
end.
+do_generate({ecdh = Type, Curve, Priv, Pub}) ->
+ case crypto:generate_key(Type, Curve, Priv) of
+ {Pub, _} ->
+ ok;
+ {Other, _} ->
+ ct:fail({{crypto, generate_key, [Type, Priv, Curve]}, {expected, Pub}, {got, Other}})
+ end.
+
hexstr2point(X, Y) ->
<<4:8, (hexstr2bin(X))/binary, (hexstr2bin(Y))/binary>>.
@@ -721,7 +737,8 @@ group_config(srp, Config) ->
[{generate_compute, GenerateCompute} | Config];
group_config(ecdh, Config) ->
Compute = ecdh(),
- [{compute, Compute} | Config];
+ Generate = ecc(),
+ [{compute, Compute}, {generate, Generate} | Config];
group_config(dh, Config) ->
GenerateCompute = [dh()],
[{generate_compute, GenerateCompute} | Config];
@@ -749,6 +766,9 @@ group_config(aes_cbc128, Config) ->
group_config(aes_cbc256, Config) ->
Block = aes_cbc256(),
[{block, Block} | Config];
+group_config(aes_ecb, Config) ->
+ Block = aes_ecb(),
+ [{block, Block} | Config];
group_config(aes_ige256, Config) ->
Block = aes_ige256(),
[{block, Block} | Config];
@@ -1183,6 +1203,106 @@ aes_cbc256() ->
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
+aes_ecb() ->
+ [
+ {aes_ecb,
+ <<"YELLOW SUBMARINE">>,
+ <<"YELLOW SUBMARINE">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"3000000000000000">>,
+ <<"1000000000000001">>},
+ {aes_ecb,
+ <<"1111111111111111">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"1111111111111111">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"7CA110454A1A6E57">>,
+ <<"01A1D6D039776742">>},
+ {aes_ecb,
+ <<"0131D9619DC1376E">>,
+ <<"5CD54CA83DEF57DA">>},
+ {aes_ecb,
+ <<"07A1133E4A0B2686">>,
+ <<"0248D43806F67172">>},
+ {aes_ecb,
+ <<"3849674C2602319E">>,
+ <<"51454B582DDF440A">>},
+ {aes_ecb,
+ <<"04B915BA43FEB5B6">>,
+ <<"42FD443059577FA2">>},
+ {aes_ecb,
+ <<"0113B970FD34F2CE">>,
+ <<"059B5E0851CF143A">>},
+ {aes_ecb,
+ <<"0170F175468FB5E6">>,
+ <<"0756D8E0774761D2">>},
+ {aes_ecb,
+ <<"43297FAD38E373FE">>,
+ <<"762514B829BF486A">>},
+ {aes_ecb,
+ <<"07A7137045DA2A16">>,
+ <<"3BDD119049372802">>},
+ {aes_ecb,
+ <<"04689104C2FD3B2F">>,
+ <<"26955F6835AF609A">>},
+ {aes_ecb,
+ <<"37D06BB516CB7546">>,
+ <<"164D5E404F275232">>},
+ {aes_ecb,
+ <<"1F08260D1AC2465E">>,
+ <<"6B056E18759F5CCA">>},
+ {aes_ecb,
+ <<"584023641ABA6176">>,
+ <<"004BD6EF09176062">>},
+ {aes_ecb,
+ <<"025816164629B007">>,
+ <<"480D39006EE762F2">>},
+ {aes_ecb,
+ <<"49793EBC79B3258F">>,
+ <<"437540C8698F3CFA">>},
+ {aes_ecb,
+ <<"018310DC409B26D6">>,
+ <<"1D9D5C5018F728C2">>},
+ {aes_ecb,
+ <<"1C587F1C13924FEF">>,
+ <<"305532286D6F295A">>},
+ {aes_ecb,
+ <<"0101010101010101">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"1F1F1F1F0E0E0E0E">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"E0FEE0FEF1FEF1FE">>,
+ <<"0123456789ABCDEF">>},
+ {aes_ecb,
+ <<"0000000000000000">>,
+ <<"FFFFFFFFFFFFFFFF">>},
+ {aes_ecb,
+ <<"FFFFFFFFFFFFFFFF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"0123456789ABCDEF">>,
+ <<"0000000000000000">>},
+ {aes_ecb,
+ <<"FEDCBA9876543210">>,
+ <<"FFFFFFFFFFFFFFFF">>}
+ ].
+
aes_ige256() ->
[{aes_ige256,
hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
@@ -1218,6 +1338,38 @@ aes_cfb8() ->
{aes_cfb8,
hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"),
hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb8,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("39ffed143b28b1c832113c6331e5407b"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb8,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"),
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
@@ -1237,6 +1389,38 @@ aes_cfb128() ->
{aes_cfb128,
hexstr2bin("2b7e151628aed2a6abf7158809cf4f3c"),
hexstr2bin("26751F67A3CBB140B1808CF187A4F4DF"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("cdc80d6fddf18cab34c25909c99a4174"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("67ce7f7f81173621961a2b70171d3d7a"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb128,
+ hexstr2bin("8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b"),
+ hexstr2bin("2e1e8a1dd59b88b1c8e60fed1efac4c9"),
+ hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("000102030405060708090a0b0c0d0e0f"),
+ hexstr2bin("6bc1bee22e409f96e93d7e117393172a")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("dc7e84bfda79164b7ecd8486985d3860"),
+ hexstr2bin("ae2d8a571e03ac9c9eb76fac45af8e51")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("39ffed143b28b1c832113c6331e5407b"),
+ hexstr2bin("30c81c46a35ce411e5fbc1191a0a52ef")},
+ {aes_cfb128,
+ hexstr2bin("603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4"),
+ hexstr2bin("df10132415e54b92a13ed0a8267ae2f9"),
hexstr2bin("f69f2445df4f9b17ad2b417be66c3710")}
].
@@ -1700,8 +1884,9 @@ dss_params() ->
18320614775012672475365915366944922415598782131828709277168615511695849821411624805195787607930033958243224786899641459701930253094446221381818858674389863050420226114787005820357372837321561754462061849169568607689530279303056075793886577588606958623645901271866346406773590024901668622321064384483571751669].
ec_key_named() ->
- {D2_pub, D2_priv} = crypto:generate_key(ecdh, sect113r2),
- {[D2_priv, sect113r2], [D2_pub, sect113r2]}.
+ Curve = secp112r2,
+ {D2_pub, D2_priv} = crypto:generate_key(ecdh, Curve),
+ {[D2_priv, Curve], [D2_pub, Curve]}.
ec_msg() ->
<<99,234,6,64,190,237,201,99,80,248,58,40,70,45,149,218,5,246,242,63>>.
@@ -1968,6 +2153,27 @@ rsa_oaep() ->
Msg = hexstr2bin("750c4047f547e8e41411856523298ac9bae245efaf1397fbe56f9dd5"),
{rsa, Public, Private, Msg, rsa_pkcs1_oaep_padding}.
+ecc() ->
+%% http://point-at-infinity.org/ecc/nisttv
+%%
+%% Test vectors for the NIST elliptic curves P192, P224, P256, P384, P521,
+%% B163, B233, B283, B409, B571, K163, K233, K283, K409 and K571. For more
+%% information about the curves see
+%% http://csrc.nist.gov/encryption/dss/ecdsa/NISTReCur.pdf
+%%
+ [{ecdh,secp192r1,1,
+ hexstr2point("188DA80EB03090F67CBF20EB43A18800F4FF0AFD82FF1012",
+ "07192B95FFC8DA78631011ED6B24CDD573F977A11E794811")},
+ {ecdh,secp192r1,2,
+ hexstr2point("DAFEBF5828783F2AD35534631588A3F629A70FB16982A888",
+ "DD6BDA0D993DA0FA46B27BBC141B868F59331AFA5C7E93AB")},
+ {ecdh,secp192r1,3,
+ hexstr2point("76E32A2557599E6EDCD283201FB2B9AADFD0D359CBB263DA",
+ "782C37E372BA4520AA62E0FED121D49EF3B543660CFD05FD")},
+ {ecdh,secp192r1,4,
+ hexstr2point("35433907297CC378B0015703374729D7A4FE46647084E4BA",
+ "A2649984F2135C301EA3ACB0776CD4F125389B311DB3BE32")}].
+
no_padding() ->
Public = [_, Mod] = rsa_public(),
Private = rsa_private(),
diff --git a/lib/crypto/test/old_crypto_SUITE.erl b/lib/crypto/test/old_crypto_SUITE.erl
index 040edbf092..80306927c5 100644
--- a/lib/crypto/test/old_crypto_SUITE.erl
+++ b/lib/crypto/test/old_crypto_SUITE.erl
@@ -1887,9 +1887,9 @@ ec(Config) when is_list(Config) ->
ec_do() ->
%% test for a name curve
- {D2_pub, D2_priv} = crypto:generate_key(ecdh, sect113r2),
- PrivECDH = [D2_priv, sect113r2],
- PubECDH = [D2_pub, sect113r2],
+ {D2_pub, D2_priv} = crypto:generate_key(ecdh, secp112r2),
+ PrivECDH = [D2_priv, secp112r2],
+ PubECDH = [D2_pub, secp112r2],
%%TODO: find a published test case for a EC key
%% test for a full specified curve and public key,
diff --git a/lib/crypto/vsn.mk b/lib/crypto/vsn.mk
index b87685cb3f..55b1b3e8c4 100644
--- a/lib/crypto/vsn.mk
+++ b/lib/crypto/vsn.mk
@@ -1 +1 @@
-CRYPTO_VSN = 3.4.2
+CRYPTO_VSN = 3.6
diff --git a/lib/debugger/doc/src/notes.xml b/lib/debugger/doc/src/notes.xml
index b4baa2a1cd..7384189a6f 100644
--- a/lib/debugger/doc/src/notes.xml
+++ b/lib/debugger/doc/src/notes.xml
@@ -32,6 +32,21 @@
<p>This document describes the changes made to the Debugger
application.</p>
+<section><title>Debugger 4.0.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix save state which did not work on Mac.</p>
+ <p>
+ Own Id: OTP-12378</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Debugger 4.0.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/debugger/src/dbg_icmd.erl b/lib/debugger/src/dbg_icmd.erl
index ce12c1beb3..b9d7506cde 100644
--- a/lib/debugger/src/dbg_icmd.erl
+++ b/lib/debugger/src/dbg_icmd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/debugger/src/dbg_ieval.erl b/lib/debugger/src/dbg_ieval.erl
index 96f9f91808..e6da8409d4 100644
--- a/lib/debugger/src/dbg_ieval.erl
+++ b/lib/debugger/src/dbg_ieval.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -650,24 +650,19 @@ expr({tuple,Line,Es0}, Bs0, Ieval) ->
{value,list_to_tuple(Vs),Bs};
%% Map
-expr({map,Line,Fs0}, Bs0, Ieval) ->
- {Fs,Bs} = eval_map_fields(Fs0, Bs0, Ieval#ieval{line=Line,top=false}),
- Value = lists:foldl(fun ({map_assoc,K,V}, Mi) -> maps:put(K,V,Mi) end,
- #{}, Fs),
- {value,Value,Bs};
+expr({map,Line,Fs}, Bs0, Ieval) ->
+ {Map,Bs} = eval_new_map_fields(Fs, Bs0, Ieval#ieval{line=Line,top=false},
+ fun expr/3),
+ {value,Map,Bs};
expr({map,Line,E0,Fs0}, Bs0, Ieval0) ->
Ieval = Ieval0#ieval{line=Line,top=false},
{value,E,Bs1} = expr(E0, Bs0, Ieval),
- case E of
- #{} ->
- {Fs,Bs2} = eval_map_fields(Fs0, Bs0, Ieval),
- Value = lists:foldl(fun ({map_assoc,K,V}, Mi) -> maps:put(K,V,Mi);
- ({map_exact,K,V}, Mi) -> maps:update(K,V,Mi)
- end, E, Fs),
- {value,Value,merge_bindings(Bs2, Bs1, Ieval)};
- _ ->
- exception(error, {badarg,E}, Bs1, Ieval)
- end;
+ {Fs,Bs2} = eval_map_fields(Fs0, Bs0, Ieval),
+ _ = maps:put(k, v, E), %Validate map.
+ Value = lists:foldl(fun ({map_assoc,K,V}, Mi) -> maps:put(K,V,Mi);
+ ({map_exact,K,V}, Mi) -> maps:update(K,V,Mi)
+ end, E, Fs),
+ {value,Value,merge_bindings(Bs2, Bs1, Ieval)};
%% A block of statements
expr({block,Line,Es},Bs,Ieval) ->
seq(Es, Bs, Ieval#ieval{line=Line});
@@ -1477,11 +1472,13 @@ guard_expr({cons,_,H0,T0}, Bs) ->
guard_expr({tuple,_,Es0}, Bs) ->
{values,Es} = guard_exprs(Es0, Bs),
{value,list_to_tuple(Es)};
-guard_expr({map,_,Fs0}, Bs) ->
- Fs = eval_map_fields_guard(Fs0, Bs),
- Value = lists:foldl(fun ({map_assoc,K,V}, Mi) -> maps:put(K,V,Mi) end,
- #{}, Fs),
- {value,Value};
+guard_expr({map,_,Fs}, Bs0) ->
+ F = fun (G0, B0, _) ->
+ {value,G} = guard_expr(G0, B0),
+ {value,G,B0}
+ end,
+ {Map,_} = eval_new_map_fields(Fs, Bs0, #ieval{top=false}, F),
+ {value,Map};
guard_expr({map,_,E0,Fs0}, Bs) ->
{value,E} = guard_expr(E0, Bs),
Fs = eval_map_fields_guard(Fs0, Bs),
@@ -1530,6 +1527,17 @@ eval_map_fields([{map_field_exact,Line,K0,V0}|Fs], Bs0, Ieval0, F, Acc) ->
eval_map_fields([], Bs, _Ieval, _F, Acc) ->
{lists:reverse(Acc),Bs}.
+eval_new_map_fields(Fs, Bs0, Ieval, F) ->
+ eval_new_map_fields(Fs, Bs0, Ieval, F, []).
+
+eval_new_map_fields([{Line,K0,V0}|Fs], Bs0, Ieval0, F, Acc) ->
+ Ieval = Ieval0#ieval{line=Line},
+ {value,K,Bs1} = F(K0, Bs0, Ieval),
+ {value,V,Bs2} = F(V0, Bs1, Ieval),
+ eval_new_map_fields(Fs, Bs2, Ieval0, F, [{K,V}|Acc]);
+eval_new_map_fields([], Bs, _, _, Acc) ->
+ {maps:from_list(lists:reverse(Acc)),Bs}.
+
%% match(Pattern,Term,Bs) -> {match,Bs} | nomatch
match(Pat, Term, Bs) ->
try match1(Pat, Term, Bs, Bs)
diff --git a/lib/debugger/src/dbg_iload.erl b/lib/debugger/src/dbg_iload.erl
index ad05a7c529..379ffe8ce4 100644
--- a/lib/debugger/src/dbg_iload.erl
+++ b/lib/debugger/src/dbg_iload.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -163,11 +163,11 @@ clauses([C0|Cs]) ->
[C1|clauses(Cs)];
clauses([]) -> [].
-clause({clause,Line,H0,G0,B0}, Lc) ->
+clause({clause,Anno,H0,G0,B0}, Lc) ->
H1 = head(H0),
G1 = guard(G0),
B1 = exprs(B0, Lc),
- {clause,Line,H1,G1,B1}.
+ {clause,ln(Anno),H1,G1,B1}.
head(Ps) -> patterns(Ps).
@@ -181,46 +181,46 @@ patterns([]) -> [].
%% N.B. Only valid patterns are included here.
-pattern({var,Line,V}) -> {var,Line,V};
-pattern({char,Line,I}) -> {value,Line,I};
-pattern({integer,Line,I}) -> {value,Line,I};
-pattern({match,Line,Pat1,Pat2}) ->
- {match,Line,pattern(Pat1),pattern(Pat2)};
-pattern({float,Line,F}) -> {value,Line,F};
-pattern({atom,Line,A}) -> {value,Line,A};
-pattern({string,Line,S}) -> {value,Line,S};
-pattern({nil,Line}) -> {value,Line,[]};
-pattern({cons,Line,H0,T0}) ->
+pattern({var,Anno,V}) -> {var,ln(Anno),V};
+pattern({char,Anno,I}) -> {value,ln(Anno),I};
+pattern({integer,Anno,I}) -> {value,ln(Anno),I};
+pattern({match,Anno,Pat1,Pat2}) ->
+ {match,ln(Anno),pattern(Pat1),pattern(Pat2)};
+pattern({float,Anno,F}) -> {value,ln(Anno),F};
+pattern({atom,Anno,A}) -> {value,ln(Anno),A};
+pattern({string,Anno,S}) -> {value,ln(Anno),S};
+pattern({nil,Anno}) -> {value,ln(Anno),[]};
+pattern({cons,Anno,H0,T0}) ->
H1 = pattern(H0),
T1 = pattern(T0),
- {cons,Line,H1,T1};
-pattern({tuple,Line,Ps0}) ->
+ {cons,ln(Anno),H1,T1};
+pattern({tuple,Anno,Ps0}) ->
Ps1 = pattern_list(Ps0),
- {tuple,Line,Ps1};
-pattern({map,Line,Fs0}) ->
+ {tuple,ln(Anno),Ps1};
+pattern({map,Anno,Fs0}) ->
Fs1 = lists:map(fun ({map_field_exact,L,K,V}) ->
{map_field_exact,L,expr(K, false),pattern(V)}
end, Fs0),
- {map,Line,Fs1};
-pattern({op,_,'-',{integer,Line,I}}) ->
- {value,Line,-I};
-pattern({op,_,'+',{integer,Line,I}}) ->
- {value,Line,I};
-pattern({op,_,'-',{char,Line,I}}) ->
- {value,Line,-I};
-pattern({op,_,'+',{char,Line,I}}) ->
- {value,Line,I};
-pattern({op,_,'-',{float,Line,I}}) ->
- {value,Line,-I};
-pattern({op,_,'+',{float,Line,I}}) ->
- {value,Line,I};
-pattern({bin,Line,Grp}) ->
+ {map,ln(Anno),Fs1};
+pattern({op,_,'-',{integer,Anno,I}}) ->
+ {value,ln(Anno),-I};
+pattern({op,_,'+',{integer,Anno,I}}) ->
+ {value,ln(Anno),I};
+pattern({op,_,'-',{char,Anno,I}}) ->
+ {value,ln(Anno),-I};
+pattern({op,_,'+',{char,Anno,I}}) ->
+ {value,ln(Anno),I};
+pattern({op,_,'-',{float,Anno,I}}) ->
+ {value,ln(Anno),-I};
+pattern({op,_,'+',{float,Anno,I}}) ->
+ {value,ln(Anno),I};
+pattern({bin,Anno,Grp}) ->
Grp1 = pattern_list(Grp),
- {bin,Line,Grp1};
-pattern({bin_element,Line,Expr,Size,Type}) ->
+ {bin,ln(Anno),Grp1};
+pattern({bin_element,Anno,Expr,Size,Type}) ->
Expr1 = pattern(Expr),
Size1 = expr(Size, false),
- {bin_element,Line,Expr1,Size1,Type}.
+ {bin_element,ln(Anno),Expr1,Size1,Type}.
%% These patterns are processed "in parallel" for purposes of variable
%% definition etc.
@@ -240,90 +240,89 @@ and_guard([G0|Gs]) ->
[G1|and_guard(Gs)];
and_guard([]) -> [].
-guard_test({call,Line,{remote,_,{atom,_,erlang},{atom,_,F}},As0}) ->
+guard_test({call,Anno,{remote,_,{atom,_,erlang},{atom,_,F}},As0}) ->
As = gexpr_list(As0),
- {safe_bif,Line,erlang,F,As};
-guard_test({op,Line,Op,L0}) ->
+ {safe_bif,ln(Anno),erlang,F,As};
+guard_test({op,Anno,Op,L0}) ->
true = erl_internal:arith_op(Op, 1) orelse %Assertion.
erl_internal:bool_op(Op, 1),
L1 = gexpr(L0),
- {safe_bif,Line,erlang,Op,[L1]};
-guard_test({op,Line,Op,L0,R0}) when Op =:= 'andalso'; Op =:= 'orelse' ->
+ {safe_bif,ln(Anno),erlang,Op,[L1]};
+guard_test({op,Anno,Op,L0,R0}) when Op =:= 'andalso'; Op =:= 'orelse' ->
L1 = gexpr(L0),
R1 = gexpr(R0), %They see the same variables
- {Op,Line,L1,R1};
-guard_test({op,Line,Op,L0,R0}) ->
+ {Op,ln(Anno),L1,R1};
+guard_test({op,Anno,Op,L0,R0}) ->
true = erl_internal:comp_op(Op, 2) orelse %Assertion.
erl_internal:bool_op(Op, 2) orelse
erl_internal:arith_op(Op, 2),
L1 = gexpr(L0),
R1 = gexpr(R0), %They see the same variables
- {safe_bif,Line,erlang,Op,[L1,R1]};
+ {safe_bif,ln(Anno),erlang,Op,[L1,R1]};
guard_test({var,_,_}=V) ->V; % Boolean var
-guard_test({atom,Line,true}) -> {value,Line,true};
+guard_test({atom,Anno,true}) -> {value,ln(Anno),true};
%% All other constants at this level means false.
-guard_test({atom,Line,_}) -> {value,Line,false};
-guard_test({integer,Line,_}) -> {value,Line,false};
-guard_test({char,Line,_}) -> {value,Line,false};
-guard_test({float,Line,_}) -> {value,Line,false};
-guard_test({string,Line,_}) -> {value,Line,false};
-guard_test({nil,Line}) -> {value,Line,false};
-guard_test({cons,Line,_,_}) -> {value,Line,false};
-guard_test({tuple,Line,_}) -> {value,Line,false};
-guard_test({map,Line,_}) -> {value,Line,false};
-guard_test({map,Line,_,_}) -> {value,Line,false};
-guard_test({bin,Line,_}) -> {value,Line,false}.
-
-gexpr({var,Line,V}) -> {var,Line,V};
-gexpr({integer,Line,I}) -> {value,Line,I};
-gexpr({char,Line,I}) -> {value,Line,I};
-gexpr({float,Line,F}) -> {value,Line,F};
-gexpr({atom,Line,A}) -> {value,Line,A};
-gexpr({string,Line,S}) -> {value,Line,S};
-gexpr({nil,Line}) -> {value,Line,[]};
-gexpr({cons,Line,H0,T0}) ->
+guard_test({atom,Anno,_}) -> {value,ln(Anno),false};
+guard_test({integer,Anno,_}) -> {value,ln(Anno),false};
+guard_test({char,Anno,_}) -> {value,ln(Anno),false};
+guard_test({float,Anno,_}) -> {value,ln(Anno),false};
+guard_test({string,Anno,_}) -> {value,ln(Anno),false};
+guard_test({nil,Anno}) -> {value,ln(Anno),false};
+guard_test({cons,Anno,_,_}) -> {value,ln(Anno),false};
+guard_test({tuple,Anno,_}) -> {value,ln(Anno),false};
+guard_test({map,Anno,_}) -> {value,ln(Anno),false};
+guard_test({map,Anno,_,_}) -> {value,ln(Anno),false};
+guard_test({bin,Anno,_}) -> {value,ln(Anno),false}.
+
+gexpr({var,Anno,V}) -> {var,ln(Anno),V};
+gexpr({integer,Anno,I}) -> {value,ln(Anno),I};
+gexpr({char,Anno,I}) -> {value,ln(Anno),I};
+gexpr({float,Anno,F}) -> {value,ln(Anno),F};
+gexpr({atom,Anno,A}) -> {value,ln(Anno),A};
+gexpr({string,Anno,S}) -> {value,ln(Anno),S};
+gexpr({nil,Anno}) -> {value,ln(Anno),[]};
+gexpr({cons,Anno,H0,T0}) ->
case {gexpr(H0),gexpr(T0)} of
{{value,Line,H1},{value,Line,T1}} -> {value,Line,[H1|T1]};
- {H1,T1} -> {cons,Line,H1,T1}
+ {H1,T1} -> {cons,ln(Anno),H1,T1}
end;
-gexpr({tuple,Line,Es0}) ->
+gexpr({tuple,Anno,Es0}) ->
Es1 = gexpr_list(Es0),
- {tuple,Line,Es1};
-gexpr({map,Line,Fs0}) ->
- Fs1 = map_fields(Fs0, fun gexpr/1),
- {map,Line,Fs1};
-gexpr({map,Line,E0,Fs0}) ->
+ {tuple,ln(Anno),Es1};
+gexpr({map,Anno,Fs0}) ->
+ new_map(Fs0, Anno, fun gexpr/1);
+gexpr({map,Anno,E0,Fs0}) ->
E1 = gexpr(E0),
Fs1 = map_fields(Fs0, fun gexpr/1),
- {map,Line,E1,Fs1};
-gexpr({bin,Line,Flds0}) ->
+ {map,ln(Anno),E1,Fs1};
+gexpr({bin,Anno,Flds0}) ->
Flds = gexpr_list(Flds0),
- {bin,Line,Flds};
-gexpr({bin_element,Line,Expr0,Size0,Type}) ->
+ {bin,ln(Anno),Flds};
+gexpr({bin_element,Anno,Expr0,Size0,Type}) ->
Expr = gexpr(Expr0),
Size = gexpr(Size0),
- {bin_element,Line,Expr,Size,Type};
+ {bin_element,ln(Anno),Expr,Size,Type};
%%% The previous passes have added the module name 'erlang' to
%%% all BIF calls, even in guards.
-gexpr({call,Line,{remote,_,{atom,_,erlang},{atom,_,self}},[]}) ->
- {dbg, Line, self, []};
-gexpr({call,Line,{remote,_,{atom,_,erlang},{atom,_,F}},As0}) ->
+gexpr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,self}},[]}) ->
+ {dbg,ln(Anno),self,[]};
+gexpr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,F}},As0}) ->
As = gexpr_list(As0),
- {safe_bif,Line,erlang,F,As};
-gexpr({op,Line,Op,A0}) ->
+ {safe_bif,ln(Anno),erlang,F,As};
+gexpr({op,Anno,Op,A0}) ->
erl_internal:arith_op(Op, 1),
A1 = gexpr(A0),
- {safe_bif,Line,erlang,Op,[A1]};
-gexpr({op,Line,Op,L0,R0}) when Op =:= 'andalso'; Op =:= 'orelse' ->
+ {safe_bif,ln(Anno),erlang,Op,[A1]};
+gexpr({op,Anno,Op,L0,R0}) when Op =:= 'andalso'; Op =:= 'orelse' ->
L1 = gexpr(L0),
R1 = gexpr(R0), %They see the same variables
- {Op,Line,L1,R1};
-gexpr({op,Line,Op,L0,R0}) ->
+ {Op,ln(Anno),L1,R1};
+gexpr({op,Anno,Op,L0,R0}) ->
true = erl_internal:arith_op(Op, 2) orelse erl_internal:comp_op(Op, 2)
orelse erl_internal:bool_op(Op, 2),
L1 = gexpr(L0),
R1 = gexpr(R0), %They see the same variables
- {safe_bif,Line,erlang,Op,[L1,R1]}.
+ {safe_bif,ln(Anno),erlang,Op,[L1,R1]}.
%% These expressions are processed "in parallel" for purposes of variable
%% definition etc.
@@ -343,175 +342,175 @@ exprs([E0|Es], Lc) ->
[E1|exprs(Es, Lc)];
exprs([], _Lc) -> [].
-expr({var,Line,V}, _Lc) -> {var,Line,V};
-expr({integer,Line,I}, _Lc) -> {value,Line,I};
-expr({char,Line,I}, _Lc) -> {value,Line,I};
-expr({float,Line,F}, _Lc) -> {value,Line,F};
-expr({atom,Line,A}, _Lc) -> {value,Line,A};
-expr({string,Line,S}, _Lc) -> {value,Line,S};
-expr({nil,Line}, _Lc) -> {value,Line,[]};
-expr({cons,Line,H0,T0}, _Lc) ->
+expr({var,Anno,V}, _Lc) -> {var,ln(Anno),V};
+expr({integer,Anno,I}, _Lc) -> {value,ln(Anno),I};
+expr({char,Anno,I}, _Lc) -> {value,ln(Anno),I};
+expr({float,Anno,F}, _Lc) -> {value,ln(Anno),F};
+expr({atom,Anno,A}, _Lc) -> {value,ln(Anno),A};
+expr({string,Anno,S}, _Lc) -> {value,ln(Anno),S};
+expr({nil,Anno}, _Lc) -> {value,ln(Anno),[]};
+expr({cons,Anno,H0,T0}, _Lc) ->
case {expr(H0, false),expr(T0, false)} of
{{value,Line,H1},{value,Line,T1}} -> {value,Line,[H1|T1]};
- {H1,T1} -> {cons,Line,H1,T1}
+ {H1,T1} -> {cons,ln(Anno),H1,T1}
end;
-expr({tuple,Line,Es0}, _Lc) ->
+expr({tuple,Anno,Es0}, _Lc) ->
Es1 = expr_list(Es0),
- {tuple,Line,Es1};
-expr({map,Line,Fs0}, _Lc) ->
- Fs1 = map_fields(Fs0),
- {map,Line,Fs1};
-expr({map,Line,E0,Fs0}, _Lc) ->
+ {tuple,ln(Anno),Es1};
+expr({map,Anno,Fs}, _Lc) ->
+ new_map(Fs, Anno, fun (E) -> expr(E, false) end);
+expr({map,Anno,E0,Fs0}, _Lc) ->
E1 = expr(E0, false),
Fs1 = map_fields(Fs0),
- {map,Line,E1,Fs1};
-expr({block,Line,Es0}, Lc) ->
+ {map,ln(Anno),E1,Fs1};
+expr({block,Anno,Es0}, Lc) ->
%% Unfold block into a sequence.
Es1 = exprs(Es0, Lc),
- {block,Line,Es1};
-expr({'if',Line,Cs0}, Lc) ->
+ {block,ln(Anno),Es1};
+expr({'if',Anno,Cs0}, Lc) ->
Cs1 = icr_clauses(Cs0, Lc),
- {'if',Line,Cs1};
-expr({'case',Line,E0,Cs0}, Lc) ->
+ {'if',ln(Anno),Cs1};
+expr({'case',Anno,E0,Cs0}, Lc) ->
E1 = expr(E0, false),
Cs1 = icr_clauses(Cs0, Lc),
- {'case',Line,E1,Cs1};
-expr({'receive',Line,Cs0}, Lc) ->
+ {'case',ln(Anno),E1,Cs1};
+expr({'receive',Anno,Cs0}, Lc) ->
Cs1 = icr_clauses(Cs0, Lc),
- {'receive',Line,Cs1};
-expr({'receive',Line,Cs0,To0,ToEs0}, Lc) ->
+ {'receive',ln(Anno),Cs1};
+expr({'receive',Anno,Cs0,To0,ToEs0}, Lc) ->
To1 = expr(To0, false),
ToEs1 = exprs(ToEs0, Lc),
Cs1 = icr_clauses(Cs0, Lc),
- {'receive',Line,Cs1,To1,ToEs1};
-expr({'fun',Line,{clauses,Cs0},{_,_,Name}}, _Lc) when is_atom(Name) ->
+ {'receive',ln(Anno),Cs1,To1,ToEs1};
+expr({'fun',Anno,{clauses,Cs0},{_,_,Name}}, _Lc) when is_atom(Name) ->
%% New R10B-2 format (abstract_v2).
Cs = fun_clauses(Cs0),
- {make_fun,Line,Name,Cs};
-expr({'fun',Line,{function,F,A},{_Index,_OldUniq,Name}}, _Lc) ->
+ {make_fun,ln(Anno),Name,Cs};
+expr({'fun',Anno,{function,F,A},{_Index,_OldUniq,Name}}, _Lc) ->
%% New R8 format (abstract_v2).
+ Line = ln(Anno),
As = new_vars(A, Line),
Cs = [{clause,Line,As,[],[{local_call,Line,F,As,true}]}],
{make_fun,Line,Name,Cs};
-expr({named_fun,Line,FName,Cs0,{_,_,Name}}, _Lc) when is_atom(Name) ->
+expr({named_fun,Anno,FName,Cs0,{_,_,Name}}, _Lc) when is_atom(Name) ->
Cs = fun_clauses(Cs0),
- {make_named_fun,Line,Name,FName,Cs};
-expr({'fun',Line,{function,{atom,_,M},{atom,_,F},{integer,_,A}}}, _Lc)
+ {make_named_fun,ln(Anno),Name,FName,Cs};
+expr({'fun',Anno,{function,{atom,_,M},{atom,_,F},{integer,_,A}}}, _Lc)
when 0 =< A, A =< 255 ->
%% New format in R15 for fun M:F/A (literal values).
- {value,Line,erlang:make_fun(M, F, A)};
-expr({'fun',Line,{function,M,F,A}}, _Lc) ->
+ {value,ln(Anno),erlang:make_fun(M, F, A)};
+expr({'fun',Anno,{function,M,F,A}}, _Lc) ->
%% New format in R15 for fun M:F/A (one or more variables).
MFA = expr_list([M,F,A]),
- {make_ext_fun,Line,MFA};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,self}},[]}, _Lc) ->
- {dbg,Line,self,[]};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,get_stacktrace}},[]}, _Lc) ->
- {dbg,Line,get_stacktrace,[]};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,throw}},[_]=As}, _Lc) ->
- {dbg,Line,throw,expr_list(As)};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,error}},[_]=As}, _Lc) ->
- {dbg,Line,error,expr_list(As)};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,exit}},[_]=As}, _Lc) ->
- {dbg,Line,exit,expr_list(As)};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,raise}},[_,_,_]=As}, _Lc) ->
- {dbg,Line,raise,expr_list(As)};
-expr({call,Line,{remote,_,{atom,_,erlang},{atom,_,apply}},[_,_,_]=As0}, Lc) ->
+ {make_ext_fun,ln(Anno),MFA};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,self}},[]}, _Lc) ->
+ {dbg,ln(Anno),self,[]};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,get_stacktrace}},[]}, _Lc) ->
+ {dbg,ln(Anno),get_stacktrace,[]};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,throw}},[_]=As}, _Lc) ->
+ {dbg,ln(Anno),throw,expr_list(As)};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,error}},[_]=As}, _Lc) ->
+ {dbg,ln(Anno),error,expr_list(As)};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,exit}},[_]=As}, _Lc) ->
+ {dbg,ln(Anno),exit,expr_list(As)};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,raise}},[_,_,_]=As}, _Lc) ->
+ {dbg,ln(Anno),raise,expr_list(As)};
+expr({call,Anno,{remote,_,{atom,_,erlang},{atom,_,apply}},[_,_,_]=As0}, Lc) ->
As = expr_list(As0),
- {apply,Line,As,Lc};
-expr({call,Line,{remote,_,{atom,_,Mod},{atom,_,Func}},As0}, Lc) ->
+ {apply,ln(Anno),As,Lc};
+expr({call,Anno,{remote,_,{atom,_,Mod},{atom,_,Func}},As0}, Lc) ->
As = expr_list(As0),
case erlang:is_builtin(Mod, Func, length(As)) of
false ->
- {call_remote,Line,Mod,Func,As,Lc};
+ {call_remote,ln(Anno),Mod,Func,As,Lc};
true ->
case bif_type(Mod, Func, length(As0)) of
- safe -> {safe_bif,Line,Mod,Func,As};
- unsafe ->{bif,Line,Mod,Func,As}
+ safe -> {safe_bif,ln(Anno),Mod,Func,As};
+ unsafe ->{bif,ln(Anno),Mod,Func,As}
end
end;
-expr({call,Line,{remote,_,Mod0,Func0},As0}, Lc) ->
+expr({call,Anno,{remote,_,Mod0,Func0},As0}, Lc) ->
%% New R8 format (abstract_v2).
Mod = expr(Mod0, false),
Func = expr(Func0, false),
As = consify(expr_list(As0)),
- {apply,Line,[Mod,Func,As],Lc};
-expr({call,Line,{atom,_,Func},As0}, Lc) ->
+ {apply,ln(Anno),[Mod,Func,As],Lc};
+expr({call,Anno,{atom,_,Func},As0}, Lc) ->
As = expr_list(As0),
- {local_call,Line,Func,As,Lc};
-expr({call,Line,Fun0,As0}, Lc) ->
+ {local_call,ln(Anno),Func,As,Lc};
+expr({call,Anno,Fun0,As0}, Lc) ->
Fun = expr(Fun0, false),
As = expr_list(As0),
- {apply_fun,Line,Fun,As,Lc};
-expr({'catch',Line,E0}, _Lc) ->
+ {apply_fun,ln(Anno),Fun,As,Lc};
+expr({'catch',Anno,E0}, _Lc) ->
%% No new variables added.
E1 = expr(E0, false),
- {'catch',Line,E1};
-expr({'try',Line,Es0,CaseCs0,CatchCs0,As0}, Lc) ->
+ {'catch',ln(Anno),E1};
+expr({'try',Anno,Es0,CaseCs0,CatchCs0,As0}, Lc) ->
%% No new variables added.
Es = expr_list(Es0),
CaseCs = icr_clauses(CaseCs0, Lc),
CatchCs = icr_clauses(CatchCs0, Lc),
As = expr_list(As0),
- {'try',Line,Es,CaseCs,CatchCs,As};
-expr({lc,Line,E0,Gs0}, _Lc) -> %R8.
+ {'try',ln(Anno),Es,CaseCs,CatchCs,As};
+expr({lc,Anno,E0,Gs0}, _Lc) -> %R8.
Gs = lists:map(fun ({generate,L,P0,Qs}) ->
- {generate,L,expr(P0, false),expr(Qs, false)};
+ {generate,L,pattern(P0),expr(Qs, false)};
({b_generate,L,P0,Qs}) -> %R12.
- {b_generate,L,expr(P0, false),expr(Qs, false)};
+ {b_generate,L,pattern(P0),expr(Qs, false)};
(Expr) ->
case erl_lint:is_guard_test(Expr) of
true -> {guard,guard([[Expr]])};
false -> expr(Expr, false)
end
end, Gs0),
- {lc,Line,expr(E0, false),Gs};
-expr({bc,Line,E0,Gs0}, _Lc) -> %R12.
+ {lc,ln(Anno),expr(E0, false),Gs};
+expr({bc,Anno,E0,Gs0}, _Lc) -> %R12.
Gs = lists:map(fun ({generate,L,P0,Qs}) ->
- {generate,L,expr(P0, false),expr(Qs, false)};
+ {generate,L,pattern(P0),expr(Qs, false)};
({b_generate,L,P0,Qs}) -> %R12.
- {b_generate,L,expr(P0, false),expr(Qs, false)};
+ {b_generate,L,pattern(P0),expr(Qs, false)};
(Expr) ->
case erl_lint:is_guard_test(Expr) of
true -> {guard,guard([[Expr]])};
false -> expr(Expr, false)
end
end, Gs0),
- {bc,Line,expr(E0, false),Gs};
-expr({match,Line,P0,E0}, _Lc) ->
+ {bc,ln(Anno),expr(E0, false),Gs};
+expr({match,Anno,P0,E0}, _Lc) ->
E1 = expr(E0, false),
P1 = pattern(P0),
- {match,Line,P1,E1};
-expr({op,Line,Op,A0}, _Lc) ->
+ {match,ln(Anno),P1,E1};
+expr({op,Anno,Op,A0}, _Lc) ->
A1 = expr(A0, false),
- {op,Line,Op,[A1]};
-expr({op,Line,'++',L0,R0}, _Lc) ->
+ {op,ln(Anno),Op,[A1]};
+expr({op,Anno,'++',L0,R0}, _Lc) ->
L1 = expr(L0, false),
R1 = expr(R0, false), %They see the same variables
- {op,Line,append,[L1,R1]};
-expr({op,Line,'--',L0,R0}, _Lc) ->
+ {op,ln(Anno),append,[L1,R1]};
+expr({op,Anno,'--',L0,R0}, _Lc) ->
L1 = expr(L0, false),
R1 = expr(R0, false), %They see the same variables
- {op,Line,subtract,[L1,R1]};
-expr({op,Line,'!',L0,R0}, _Lc) ->
+ {op,ln(Anno),subtract,[L1,R1]};
+expr({op,Anno,'!',L0,R0}, _Lc) ->
L1 = expr(L0, false),
R1 = expr(R0, false), %They see the same variables
- {send,Line,L1,R1};
-expr({op,Line,Op,L0,R0}, _Lc) when Op =:= 'andalso'; Op =:= 'orelse' ->
+ {send,ln(Anno),L1,R1};
+expr({op,Anno,Op,L0,R0}, _Lc) when Op =:= 'andalso'; Op =:= 'orelse' ->
L1 = expr(L0, false),
R1 = expr(R0, false), %They see the same variables
- {Op,Line,L1,R1};
-expr({op,Line,Op,L0,R0}, _Lc) ->
+ {Op,ln(Anno),L1,R1};
+expr({op,Anno,Op,L0,R0}, _Lc) ->
L1 = expr(L0, false),
R1 = expr(R0, false), %They see the same variables
- {op,Line,Op,[L1,R1]};
-expr({bin,Line,Grp}, _Lc) ->
+ {op,ln(Anno),Op,[L1,R1]};
+expr({bin,Anno,Grp}, _Lc) ->
Grp1 = expr_list(Grp),
- {bin,Line,Grp1};
-expr({bin_element,Line,Expr,Size,Type}, _Lc) ->
+ {bin,ln(Anno),Grp1};
+expr({bin_element,Anno,Expr,Size,Type}, _Lc) ->
Expr1 = expr(Expr, false),
Size1 = expr(Size, false),
- {bin_element,Line,Expr1,Size1,Type};
+ {bin_element,ln(Anno),Expr1,Size1,Type};
expr(Other, _Lc) ->
exit({?MODULE,{unknown_expr,Other}}).
@@ -519,7 +518,6 @@ consify([A|As]) ->
{cons,0,A,consify(As)};
consify([]) -> {value,0,[]}.
-
%% -type expr_list([Expression]) -> [Expression].
%% These expressions are processed "in parallel" for purposes of variable
%% definition etc.
@@ -534,17 +532,35 @@ icr_clauses([C0|Cs], Lc) ->
[C1|icr_clauses(Cs, Lc)];
icr_clauses([], _) -> [].
-fun_clauses([{clause,L,H,G,B}|Cs]) ->
- [{clause,L,head(H),guard(G),exprs(B, true)}|fun_clauses(Cs)];
+fun_clauses([{clause,A,H,G,B}|Cs]) ->
+ [{clause,ln(A),head(H),guard(G),exprs(B, true)}|fun_clauses(Cs)];
fun_clauses([]) -> [].
+
+new_map(Fs0, Anno, F) ->
+ Line = ln(Anno),
+ Fs1 = map_fields(Fs0, F),
+ Fs2 = [{ln(A),K,V} || {map_field_assoc,A,K,V} <- Fs1],
+ try
+ {value,Line,map_literal(Fs2, #{})}
+ catch
+ throw:not_literal ->
+ {map,Line,Fs2}
+ end.
+
+map_literal([{_,{value,_,K},{value,_,V}}|T], M) ->
+ map_literal(T, maps:put(K, V, M));
+map_literal([_|_], _) ->
+ throw(not_literal);
+map_literal([], M) -> M.
+
map_fields(Fs) ->
map_fields(Fs, fun (E) -> expr(E, false) end).
-map_fields([{map_field_assoc,L,N,V}|Fs], F) ->
- [{map_field_assoc,L,F(N),F(V)}|map_fields(Fs)];
-map_fields([{map_field_exact,L,N,V}|Fs], F) ->
- [{map_field_exact,L,F(N),F(V)}|map_fields(Fs)];
+map_fields([{map_field_assoc,A,N,V}|Fs], F) ->
+ [{map_field_assoc,ln(A),F(N),F(V)}|map_fields(Fs)];
+map_fields([{map_field_exact,A,N,V}|Fs], F) ->
+ [{map_field_exact,ln(A),F(N),F(V)}|map_fields(Fs)];
map_fields([], _) -> [].
%% new_var_name() -> VarName.
@@ -564,6 +580,9 @@ new_vars(N, L, Vs) when N > 0 ->
new_vars(N-1, L, [V|Vs]);
new_vars(0, _, Vs) -> Vs.
+ln(Anno) ->
+ erl_anno:line(Anno).
+
bif_type(erlang, Name, Arity) ->
case erl_internal:guard_bif(Name, Arity) of
true ->
diff --git a/lib/debugger/src/dbg_wx_settings.erl b/lib/debugger/src/dbg_wx_settings.erl
index 20aac74c3d..2c332c0a54 100644
--- a/lib/debugger/src/dbg_wx_settings.erl
+++ b/lib/debugger/src/dbg_wx_settings.erl
@@ -65,14 +65,8 @@ open_win(Win, Pos, SFile, Str, What) ->
{style,What}]),
case wxFileDialog:showModal(FD) of
?wxID_OK ->
- case wxFileDialog:getPaths(FD) of
- [NewFile] ->
- wxFileDialog:destroy(FD),
- {ok, NewFile};
- _ ->
- wxFileDialog:destroy(FD),
- cancel
- end;
+ NewFile = wxFileDialog:getPath(FD),
+ {ok, NewFile};
_ ->
wxFileDialog:destroy(FD),
cancel
diff --git a/lib/debugger/src/dbg_wx_trace.erl b/lib/debugger/src/dbg_wx_trace.erl
index 4438466bb0..ffdfc46496 100644
--- a/lib/debugger/src/dbg_wx_trace.erl
+++ b/lib/debugger/src/dbg_wx_trace.erl
@@ -140,7 +140,7 @@ init(Pid, Parent, Meta, TraceWin, BackTrace, Strings) ->
int:meta(Meta, trace, State3#state.trace),
- gui_enable_updown(stack_trace, {1,1}),
+ gui_enable_updown(State3#state.stack_trace, {1,1}),
gui_enable_btrace(false, false),
dbg_wx_trace_win:display(Win,idle),
diff --git a/lib/debugger/src/debugger.app.src b/lib/debugger/src/debugger.app.src
index f102385d39..a013c5c11f 100644
--- a/lib/debugger/src/debugger.app.src
+++ b/lib/debugger/src/debugger.app.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -47,5 +47,5 @@
]},
{registered, [dbg_iserver, dbg_wx_mon, dbg_wx_winman]},
{applications, [kernel, stdlib]},
- {runtime_dependencies, ["wx-1.2","stdlib-2.0","kernel-3.0","erts-6.0",
+ {runtime_dependencies, ["wx-1.2","stdlib-2.5","kernel-3.0","erts-6.0",
"compiler-5.0"]}]}.
diff --git a/lib/debugger/src/int.erl b/lib/debugger/src/int.erl
index 908390ce50..6f84ca3bca 100644
--- a/lib/debugger/src/int.erl
+++ b/lib/debugger/src/int.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -365,7 +365,7 @@ stop() ->
%% function will receive the following messages:
%% {int, {interpret, Mod}}
%% {int, {no_interpret, Mod}}
-%% {int, {new_process, Pid, Function, Status, Info}}
+%% {int, {new_process, {Pid, Function, Status, Info}}}
%% {int, {new_status, Pid, Status, Info}}
%% {int, {new_break, {Point, Options}}}
%% {int, {delete_break, Point}}
diff --git a/lib/debugger/test/int_eval_SUITE.erl b/lib/debugger/test/int_eval_SUITE.erl
index 9d7ef238e3..cda5c97ec4 100644
--- a/lib/debugger/test/int_eval_SUITE.erl
+++ b/lib/debugger/test/int_eval_SUITE.erl
@@ -293,7 +293,7 @@ stacktrace(Config) when is_list(Config) ->
maps(Config) when is_list(Config) ->
Fun = fun () -> ?IM:empty_map_update([camembert]) end,
- {'EXIT',{{badarg,[camembert]},_}} = spawn_eval(Fun),
+ {'EXIT',{{badmap,[camembert]},_}} = spawn_eval(Fun),
[#{hello := 0, price := 0}] = spawn_eval(fun () -> ?IM:update_in_fun() end),
ok.
diff --git a/lib/debugger/test/map_SUITE.erl b/lib/debugger/test/map_SUITE.erl
index e525484a8e..74847e161f 100644
--- a/lib/debugger/test/map_SUITE.erl
+++ b/lib/debugger/test/map_SUITE.erl
@@ -24,12 +24,17 @@
]).
-export([
- t_build_and_match_literals/1,
- t_update_literals/1,t_match_and_update_literals/1,
+ t_build_and_match_literals/1, t_build_and_match_literals_large/1,
+ t_update_literals/1, t_update_literals_large/1,
+ t_match_and_update_literals/1, t_match_and_update_literals_large/1,
t_update_map_expressions/1,
- t_update_assoc/1,t_update_exact/1,
- t_guard_bifs/1, t_guard_sequence/1, t_guard_update/1,
- t_guard_receive/1, t_guard_fun/1,
+ t_update_assoc/1, t_update_assoc_large/1,
+ t_update_exact/1, t_update_exact_large/1,
+ t_guard_bifs/1,
+ t_guard_sequence/1, t_guard_sequence_large/1,
+ t_guard_update/1, t_guard_update_large/1,
+ t_guard_receive/1, t_guard_receive_large/1,
+ t_guard_fun/1,
t_list_comprehension/1,
t_map_sort_literals/1,
t_map_size/1,
@@ -90,12 +95,17 @@
suite() -> [].
all() -> [
- t_build_and_match_literals,
- t_update_literals, t_match_and_update_literals,
+ t_build_and_match_literals, t_build_and_match_literals_large,
+ t_update_literals, t_update_literals_large,
+ t_match_and_update_literals, t_match_and_update_literals_large,
t_update_map_expressions,
- t_update_assoc,t_update_exact,
- t_guard_bifs, t_guard_sequence, t_guard_update,
- t_guard_receive,t_guard_fun, t_list_comprehension,
+ t_update_assoc, t_update_assoc_large,
+ t_update_exact, t_update_exact_large,
+ t_guard_bifs,
+ t_guard_sequence, t_guard_sequence_large,
+ t_guard_update, t_guard_update_large,
+ t_guard_receive, t_guard_receive_large,
+ t_guard_fun, t_list_comprehension,
t_map_sort_literals,
t_build_and_match_aliasing,
@@ -189,6 +199,462 @@ t_build_and_match_literals(Config) when is_list(Config) ->
{'EXIT',{{badmatch,_},_}} = (catch (#{x:=3} = id(#{x=>"three"}))),
ok.
+t_build_and_match_literals_large(Config) when is_list(Config) ->
+ % normal non-repeating
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M0,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M0,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M0,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M0,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M0,
+
+ 60 = map_size(M0),
+ 60 = maps:size(M0),
+
+ % with repeating
+ M1 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 10=>na0,20=>nb0,30=>"nc0","40"=>"nd0",<<"50">>=>"ne0",{["00"]}=>"n10",
+ 11=>na1,21=>nb1,31=>"nc1","41"=>"nd1",<<"51">>=>"ne1",{["01"]}=>"n11",
+ 12=>na2,22=>nb2,32=>"nc2","42"=>"nd2",<<"52">>=>"ne2",{["02"]}=>"n12",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+
+ 13=>na3,23=>nb3,33=>"nc3","43"=>"nd3",<<"53">>=>"ne3",{["03"]}=>"n13",
+ 14=>na4,24=>nb4,34=>"nc4","44"=>"nd4",<<"54">>=>"ne4",{["04"]}=>"n14",
+
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" }),
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ 60 = map_size(M1),
+ 60 = maps:size(M1),
+
+ % with floats
+
+ M2 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9"}),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ 90 = map_size(M2),
+ 90 = maps:size(M2),
+
+ % with bignums
+ M3 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ 36893488147419103232=>big1, 73786976294838206464=>big2,
+ 147573952589676412928=>big3, 18446744073709551616=>big4,
+ 4294967296=>big5, 8589934592=>big6,
+ 4294967295=>big7, 67108863=>big8
+ }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ 98 = map_size(M3),
+ 98 = maps:size(M3),
+
+ %% with maps
+
+ M4 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M4,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M4,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M4,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M4,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M4,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M4,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M4,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M4,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M4,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M4,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M4,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M4,
+
+
+ #{ 15:=V1,25:=b5,35:=V2,"45":="d5",<<"55">>:=V3,{["05"]}:="15",
+ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := V4,
+ #{ third => small, map => key } := "small map key 3",
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := V5 } = M4,
+
+ a5 = V1,
+ "c5" = V2,
+ "e5" = V3,
+ "small map key 2" = V4,
+ "large map key 1" = V5,
+
+ 95 = map_size(M4),
+ 95 = maps:size(M4),
+
+ % call for value
+
+ M5 = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ #{ one => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M5,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M5,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M5,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M5,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M5,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M5,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M5,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M5,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M5,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M5,
+
+ #{ #{ one => small, map => key } := "small map key 1",
+ #{ second => small, map => key } := "small map key 2",
+ #{ third => small, map => key } := "small map key 3" } = M5,
+
+ #{ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 2" } = M5,
+
+ 95 = map_size(M5),
+ 95 = maps:size(M5),
+
+ %% remember
+
+ #{10:=a0,20:=b0,30:="c0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M0,
+ #{11:=a1,21:=b1,31:="c1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M0,
+ #{12:=a2,22:=b2,32:="c2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M0,
+ #{13:=a3,23:=b3,33:="c3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M0,
+ #{14:=a4,24:=b4,34:="c4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M0,
+
+ #{10:=na0,20:=nb0,30:="nc0","40":="nd0",<<"50">>:="ne0",{["00"]}:="n10"} = M1,
+ #{11:=na1,21:=nb1,31:="nc1","41":="nd1",<<"51">>:="ne1",{["01"]}:="n11"} = M1,
+ #{12:=na2,22:=nb2,32:="nc2","42":="nd2",<<"52">>:="ne2",{["02"]}:="n12"} = M1,
+ #{13:=na3,23:=nb3,33:="nc3","43":="nd3",<<"53">>:="ne3",{["03"]}:="n13"} = M1,
+ #{14:=na4,24:=nb4,34:="nc4","44":="nd4",<<"54">>:="ne4",{["04"]}:="n14"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M1,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M1,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M1,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M1,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M1,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M2,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M2,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M2,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M2,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M2,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M2,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M2,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M2,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M2,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M2,
+
+ #{15:=a5,25:=b5,35:="c5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16:=a6,26:=b6,36:="c6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17:=a7,27:=b7,37:="c7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18:=a8,28:=b8,38:="c8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19:=a9,29:=b9,39:="c9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{10.0:=fa0,20.0:=fb0,30.0:="fc0","40":="d0",<<"50">>:="e0",{["00"]}:="10"} = M3,
+ #{11.0:=fa1,21.0:=fb1,31.0:="fc1","41":="d1",<<"51">>:="e1",{["01"]}:="11"} = M3,
+ #{12.0:=fa2,22.0:=fb2,32.0:="fc2","42":="d2",<<"52">>:="e2",{["02"]}:="12"} = M3,
+ #{13.0:=fa3,23.0:=fb3,33.0:="fc3","43":="d3",<<"53">>:="e3",{["03"]}:="13"} = M3,
+ #{14.0:=fa4,24.0:=fb4,34.0:="fc4","44":="d4",<<"54">>:="e4",{["04"]}:="14"} = M3,
+
+ #{15.0:=fa5,25.0:=fb5,35.0:="fc5","45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{16.0:=fa6,26.0:=fb6,36.0:="fc6","46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{17.0:=fa7,27.0:=fb7,37.0:="fc7","47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{18.0:=fa8,28.0:=fb8,38.0:="fc8","48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+ #{19.0:=fa9,29.0:=fb9,39.0:="fc9","49":="d9",<<"59">>:="e9",{["09"]}:="19"} = M3,
+
+ #{36893488147419103232:=big1,67108863:=big8,"45":="d5",<<"55">>:="e5",{["05"]}:="15"} = M3,
+ #{147573952589676412928:=big3,8589934592:=big6,"46":="d6",<<"56">>:="e6",{["06"]}:="16"} = M3,
+ #{4294967296:=big5,18446744073709551616:=big4,"47":="d7",<<"57">>:="e7",{["07"]}:="17"} = M3,
+ #{4294967295:=big7,73786976294838206464:=big2,"48":="d8",<<"58">>:="e8",{["08"]}:="18"} = M3,
+
+ ok.
+
+
t_map_size(Config) when is_list(Config) ->
0 = map_size(id(#{})),
1 = map_size(id(#{a=>1})),
@@ -205,9 +671,10 @@ t_map_size(Config) when is_list(Config) ->
false = map_is_size(M#{ "c" => 2}, 2),
%% Error cases.
- {'EXIT',{badarg,_}} = (catch map_size([])),
- {'EXIT',{badarg,_}} = (catch map_size(<<1,2,3>>)),
- {'EXIT',{badarg,_}} = (catch map_size(1)),
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch map_size(T))
+ end),
ok.
map_is_size(M,N) when map_size(M) =:= N -> true;
@@ -221,13 +688,72 @@ t_update_literals(Config) when is_list(Config) ->
]),
ok.
+t_update_literals_large(Config) when is_list(Config) ->
+ Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ #{ one => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{x:="d",q:="4"} = loop_update_literals_x_q(Map, [
+ {"a","1"},{"b","2"},{"c","3"},{"d","4"}
+ ]),
+ ok.
+
loop_update_literals_x_q(Map, []) -> Map;
loop_update_literals_x_q(Map, [{X,Q}|Vs]) ->
loop_update_literals_x_q(Map#{q=>Q,x=>X},Vs).
% test map updates with matching
t_match_and_update_literals(Config) when is_list(Config) ->
- Map = #{x=>0,y=>"untouched",z=>"also untouched",q=>1},
+ Map = #{ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+ #{ "one" => small, map => key } => "small map key 1" },
#{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
{1,2},{3,4},{5,6},{7,8}
]),
@@ -241,8 +767,77 @@ t_match_and_update_literals(Config) when is_list(Config) ->
#{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
ok.
+t_match_and_update_literals_large(Config) when is_list(Config) ->
+ Map = id(#{ 10=>id(a0),20=>b0,30=>id("c0"),"40"=>"d0",<<"50">>=>id("e0"),{["00"]}=>"10",
+ 11=>id(a1),21=>b1,31=>id("c1"),"41"=>"d1",<<"51">>=>id("e1"),{["01"]}=>"11",
+ 12=>id(a2),22=>b2,32=>id("c2"),"42"=>"d2",<<"52">>=>id("e2"),{["02"]}=>"12",
+ 13=>id(a3),23=>b3,33=>id("c3"),"43"=>"d3",<<"53">>=>id("e3"),{["03"]}=>"13",
+ 14=>id(a4),24=>b4,34=>id("c4"),"44"=>"d4",<<"54">>=>id("e4"),{["04"]}=>"14",
+
+ 15=>id(a5),25=>b5,35=>id("c5"),"45"=>"d5",<<"55">>=>id("e5"),{["05"]}=>"15",
+ 16=>id(a6),26=>b6,36=>id("c6"),"46"=>"d6",<<"56">>=>id("e6"),{["06"]}=>"16",
+ 17=>id(a7),27=>b7,37=>id("c7"),"47"=>"d7",<<"57">>=>id("e7"),{["07"]}=>"17",
+ 18=>id(a8),28=>b8,38=>id("c8"),"48"=>"d8",<<"58">>=>id("e8"),{["08"]}=>"18",
+ 19=>id(a9),29=>b9,39=>id("c9"),"49"=>"d9",<<"59">>=>id("e9"),{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>id(fb0),30.0=>id("fc0"),
+ 11.0=>fa1,21.0=>id(fb1),31.0=>id("fc1"),
+ 12.0=>fa2,22.0=>id(fb2),32.0=>id("fc2"),
+ 13.0=>fa3,23.0=>id(fb3),33.0=>id("fc3"),
+ 14.0=>fa4,24.0=>id(fb4),34.0=>id("fc4"),
+
+ 15.0=>fa5,25.0=>id(fb5),35.0=>id("fc5"),
+ 16.0=>fa6,26.0=>id(fb6),36.0=>id("fc6"),
+ 17.0=>fa7,27.0=>id(fb7),37.0=>id("fc7"),
+ 18.0=>fa8,28.0=>id(fb8),38.0=>id("fc8"),
+ 19.0=>fa9,29.0=>id(fb9),39.0=>id("fc9"),
+
+ x=>0,y=>"untouched",z=>"also untouched",q=>1,
+
+ #{ "one" => small, map => key } => id("small map key 1"),
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => id("large map key 2") }),
+
+ #{x:=16,q:=21,y:="untouched",z:="also untouched"} = loop_match_and_update_literals_x_q(Map, [
+ {1,2},{3,4},{5,6},{7,8}
+ ]),
+ M0 = id(Map#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat}),
+ M1 = id(Map#{}),
+ M2 = M1#{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+ M0 = M2,
+
+ #{ 4 := another_number, int := 3 } = M2#{ 4 => another_number },
+ ok.
+
loop_match_and_update_literals_x_q(Map, []) -> Map;
-loop_match_and_update_literals_x_q(#{q:=Q0,x:=X0} = Map, [{X,Q}|Vs]) ->
+loop_match_and_update_literals_x_q(#{ q:=Q0, x:=X0,
+ #{ "one" => small, map => key } := "small map key 1" } = Map, [{X,Q}|Vs]) ->
loop_match_and_update_literals_x_q(Map#{q=>Q0+Q,x=>X0+X},Vs).
@@ -255,9 +850,9 @@ t_update_map_expressions(Config) when is_list(Config) ->
#{ a :=42, b:=42, c:=42 } = (maps:from_list([{a,1},{b,2},{c,3}]))#{ a := 42, b := 42, c := 42 },
#{ "a" :=1, "b":=42, "c":=42 } = (maps:from_list([{"a",1},{"b",2}]))#{ "b" := 42, "c" => 42 },
- %% Error cases, FIXME: should be 'badmap'?
- {'EXIT',{{badarg,<<>>},_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
- {'EXIT',{{badarg,[]},_}} = (catch (id([]))#{ a := 42, b => 2 }),
+ %% Error cases.
+ {'EXIT',{{badmap,<<>>},_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
+ {'EXIT',{{badmap,[]},_}} = (catch (id([]))#{ a := 42, b => 2 }),
ok.
@@ -273,11 +868,80 @@ t_update_assoc(Config) when is_list(Config) ->
M2 = M0#{3.0:=wrong,3.0=>new},
%% Errors cases.
- BadMap = id(badmap),
- {'EXIT',{{badarg,BadMap},_}} = (catch BadMap#{nonexisting=>val}),
+ BadMap = id(not_a_good_map),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>val}),
ok.
+t_update_assoc_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ M1 = M0#{1=>42,2=>100,4=>[a,b,c]},
+ #{1:=42,2:=100,10.0:=fa0,4:=[a,b,c],25:=b5} = M1,
+ #{ 10:=43, 24:=b4, 15:=a5, 35:="c5", 2.0:=100, 13.0:=fa3, 4.0:=[a,b,c]} =
+ M0#{1.0=>float,10:=43,2.0=>wrong,2.0=>100,4.0=>[a,b,c]},
+
+ M2 = M0#{13.0=>new},
+ #{10:=a0,20:=b0,13.0:=new,"40":="d0",<<"50">>:="e0"} = M2,
+ M2 = M0#{13.0:=wrong,13.0=>new},
+
+ %% Errors cases.
+ BadMap = id(a_bad_map),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>M0}),
+ ok.
+
+
t_update_exact(Config) when is_list(Config) ->
M0 = id(#{1=>a,2=>b,3.0=>c,4=>d,5=>e}),
@@ -291,13 +955,110 @@ t_update_exact(Config) when is_list(Config) ->
%% M2 = M0#{3=>wrong,3.0:=new}, %% FIXME
%% Errors cases.
- {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+ do_badmap(fun(T) ->
+ {'EXIT',{{badmap,T},_}} =
+ (catch T#{nonexisting=>val})
+ end),
+ Empty = id(#{}),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch Empty#{nonexisting:=val}),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+
+ %% Evaluation order.
+ BadMap = id([no,map]),
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{whatever:=id(error(blurf))}),
+ {'EXIT',{blurf,_}} =
+ (catch BadMap#{id(error(blurf)):=whatever}),
+ {'EXIT',{{badmap,BadMap},_}} =
+ (catch BadMap#{nonexisting:=whatever}),
+ ok.
+
+t_update_exact_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ M1 = M0#{10:=42,<<"55">>:=100,10.0:=[a,b,c]},
+ #{ 10:=42,<<"55">>:=100,{["05"]}:="15",10.0:=[a,b,c],
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } := "large map key 1" } = M1,
+
+ M1 = M0#{10:=wrong,10=>42,<<"55">>=>wrong,<<"55">>:=100,10.0:=[a,b,c]},
+
+ M2 = M0#{13.0:=new},
+ #{10:=a0,20:=b0,13.0:=new} = M2,
+ M2 = M0#{13.0=>wrong,13.0:=new},
+
+ %% Errors cases.
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
ok.
+
t_update_values(Config) when is_list(Config) ->
V0 = id(1337),
M0 = #{ a => 1, val => V0},
@@ -371,6 +1132,75 @@ t_guard_sequence(Config) when is_list(Config) ->
{'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(#{b=>5})),
ok.
+t_guard_sequence_large(Config) when is_list(Config) ->
+ M0 = id(#{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 10.0=>fa0,20.0=>fb0,30.0=>"fc0",
+ 11.0=>fa1,21.0=>fb1,31.0=>"fc1",
+ 12.0=>fa2,22.0=>fb2,32.0=>"fc2",
+ 13.0=>fa3,23.0=>fb3,33.0=>"fc3",
+ 14.0=>fa4,24.0=>fb4,34.0=>"fc4",
+
+ 15.0=>fa5,25.0=>fb5,35.0=>"fc5",
+ 16.0=>fa6,26.0=>fb6,36.0=>"fc6",
+ 17.0=>fa7,27.0=>fb7,37.0=>"fc7",
+ 18.0=>fa8,28.0=>fb8,38.0=>"fc8",
+ 19.0=>fa9,29.0=>fb9,39.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+ {1, "a"} = map_guard_sequence_1(M0#{seq=>1,val=>id("a")}),
+ {2, "b"} = map_guard_sequence_1(M0#{seq=>2,val=>id("b")}),
+ {3, "c"} = map_guard_sequence_1(M0#{seq=>3,val=>id("c")}),
+ {4, "d"} = map_guard_sequence_1(M0#{seq=>4,val=>id("d")}),
+ {5, "e"} = map_guard_sequence_1(M0#{seq=>5,val=>id("e")}),
+
+ {1,M1} = map_guard_sequence_2(M1 = id(M0#{a=>3})),
+ {2,M2} = map_guard_sequence_2(M2 = id(M0#{a=>4, b=>4})),
+ {3,gg,M3} = map_guard_sequence_2(M3 = id(M0#{a=>gg, b=>4})),
+ {4,sc,sc,M4} = map_guard_sequence_2(M4 = id(M0#{a=>sc, b=>3, c=>sc2})),
+ {5,kk,kk,M5} = map_guard_sequence_2(M5 = id(M0#{a=>kk, b=>other, c=>sc2})),
+
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_1(M0#{seq=>6,val=>id("e")})),
+ {'EXIT',{function_clause,_}} = (catch map_guard_sequence_2(M0#{b=>5})),
+ ok.
+
map_guard_sequence_1(#{seq:=1=Seq, val:=Val}) -> {Seq,Val};
map_guard_sequence_1(#{seq:=2=Seq, val:=Val}) -> {Seq,Val};
map_guard_sequence_1(#{seq:=3=Seq, val:=Val}) -> {Seq,Val};
@@ -390,6 +1220,65 @@ t_guard_update(Config) when is_list(Config) ->
second = map_guard_update(#{y=>old}, #{x=>second,y=>old}),
ok.
+t_guard_update_large(Config) when is_list(Config) ->
+ M0 = id(#{ 70=>a0,80=>b0,90=>"c0","40"=>"d0",<<"50">>=>"e0",{["00",03]}=>"10",
+ 71=>a1,81=>b1,91=>"c1","41"=>"d1",<<"51">>=>"e1",{["01",03]}=>"11",
+ 72=>a2,82=>b2,92=>"c2","42"=>"d2",<<"52">>=>"e2",{["02",03]}=>"12",
+ 73=>a3,83=>b3,93=>"c3","43"=>"d3",<<"53">>=>"e3",{["03",03]}=>"13",
+ 74=>a4,84=>b4,94=>"c4","44"=>"d4",<<"54">>=>"e4",{["04",03]}=>"14",
+
+ 75=>a5,85=>b5,95=>"c5","45"=>"d5",<<"55">>=>"e5",{["05",03]}=>"15",
+ 76=>a6,86=>b6,96=>"c6","46"=>"d6",<<"56">>=>"e6",{["06",03]}=>"16",
+ 77=>a7,87=>b7,97=>"c7","47"=>"d7",<<"57">>=>"e7",{["07",03]}=>"17",
+ 78=>a8,88=>b8,98=>"c8","48"=>"d8",<<"58">>=>"e8",{["08",03]}=>"18",
+ 79=>a9,89=>b9,99=>"c9","49"=>"d9",<<"59">>=>"e9",{["09",03]}=>"19",
+
+ 70.0=>fa0,80.0=>fb0,90.0=>"fc0",
+ 71.0=>fa1,81.0=>fb1,91.0=>"fc1",
+ 72.0=>fa2,82.0=>fb2,92.0=>"fc2",
+ 73.0=>fa3,83.0=>fb3,93.0=>"fc3",
+ 74.0=>fa4,84.0=>fb4,94.0=>"fc4",
+
+ 75.0=>fa5,85.0=>fb5,95.0=>"fc5",
+ 76.0=>fa6,86.0=>fb6,96.0=>"fc6",
+ 77.0=>fa7,87.0=>fb7,97.0=>"fc7",
+ 78.0=>fa8,88.0=>fb8,98.0=>"fc8",
+ 79.0=>fa9,89.0=>fb9,99.0=>"fc9",
+
+ #{ one => small, map => key } => "small map key 1",
+ #{ second => small, map => key } => "small map key 2",
+ #{ third => small, map => key } => "small map key 3",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ 16=>a6,26=>b6,36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 1",
+
+ #{ 10=>a0,20=>b0,30=>"c0","40"=>"d0",<<"50">>=>"e0",{["00"]}=>"10",
+ 11=>a1,21=>b1,31=>"c1","41"=>"d1",<<"51">>=>"e1",{["01"]}=>"11",
+ 12=>a2,22=>b2,32=>"c2","42"=>"d2",<<"52">>=>"e2",{["02"]}=>"12",
+ 13=>a3,23=>b3,33=>"c3","43"=>"d3",<<"53">>=>"e3",{["03"]}=>"13",
+ 14=>a4,24=>b4,34=>"c4","44"=>"d4",<<"54">>=>"e4",{["04"]}=>"14",
+
+ 15=>a5,25=>b5,35=>"c5","45"=>"d5",<<"55">>=>"e5",{["05"]}=>"15",
+ k16=>a6,k26=>b6,k36=>"c6","46"=>"d6",<<"56">>=>"e6",{["06"]}=>"16",
+ 17=>a7,27=>b7,37=>"c7","47"=>"d7",<<"57">>=>"e7",{["07"]}=>"17",
+ 18=>a8,28=>b8,38=>"c8","48"=>"d8",<<"58">>=>"e8",{["08"]}=>"18",
+ 19=>a9,29=>b9,39=>"c9","49"=>"d9",<<"59">>=>"e9",{["09"]}=>"19" } => "large map key 2" }),
+
+
+ error = map_guard_update(M0#{},M0#{}),
+ first = map_guard_update(M0#{},M0#{x=>first}),
+ second = map_guard_update(M0#{y=>old}, M0#{x=>second,y=>old}),
+ ok.
+
map_guard_update(M1, M2) when M1#{x=>first} =:= M2 -> first;
map_guard_update(M1, M2) when M1#{x=>second} =:= M2 -> second;
map_guard_update(_, _) -> error.
@@ -419,6 +1308,42 @@ t_guard_receive(Config) when is_list(Config) ->
done = call(Pid, done),
ok.
+-define(t_guard_receive_large_procs, 50).
+
+t_guard_receive_large(Config) when is_list(Config) ->
+ M = lists:foldl(fun(_,#{procs := Ps } = M) ->
+ M#{ procs := Ps#{ spawn_link(fun() -> grecv_loop() end) => 0 }}
+ end, #{procs => #{}, done => 0}, lists:seq(1,?t_guard_receive_large_procs)),
+ lists:foreach(fun(Pid) ->
+ Pid ! {self(), hello}
+ end, maps:keys(maps:get(procs,M))),
+ ok = guard_receive_large_loop(M),
+ ok.
+
+guard_receive_large_loop(#{done := ?t_guard_receive_large_procs}) ->
+ ok;
+guard_receive_large_loop(M) ->
+ receive
+ #{pid := Pid, msg := hello} ->
+ case M of
+ #{done := Count, procs := #{Pid := 15}} ->
+ Pid ! {self(), done},
+ guard_receive_large_loop(M#{done := Count + 1});
+ #{procs := #{Pid := Count} = Ps} ->
+ Pid ! {self(), hello},
+ guard_receive_large_loop(M#{procs := Ps#{Pid := Count + 1}})
+ end
+ end.
+
+grecv_loop() ->
+ receive
+ {_, done} ->
+ ok;
+ {Pid, hello} ->
+ Pid ! #{pid=>self(), msg=>hello},
+ grecv_loop()
+ end.
+
call(Pid, M) ->
Pid ! {self(), M}, receive {Pid, Res} -> Res end.
@@ -449,6 +1374,14 @@ guard_receive_loop() ->
t_list_comprehension(Config) when is_list(Config) ->
[#{k:=1},#{k:=2},#{k:=3}] = [#{k=>I} || I <- [1,2,3]],
+ Ls = id([#{<<2:301>> => I, "wat" => I + 1} || I <- [1,2,3]]),
+ [#{<<2:301>>:=1,"wat":=2},#{<<2:301>>:=2,"wat":=3},#{<<2:301>>:=3,"wat":=4}] = Ls,
+ [{1,2},{2,3},{3,4}] = id([{I2,I1} || #{"wat" := I1, <<2:301>> := I2} <- Ls]),
+
+ Ks = lists:seq($a,$z),
+ Ms = [#{[K1,K2]=>{K1,K2}} || K1 <- Ks, K2 <- Ks],
+ [#{"aa" := {$a,$a}},#{"ab":={$a,$b}}|_] = Ms,
+ [#{"zz" := {$z,$z}},#{"zy":={$z,$y}}|_] = lists:reverse(Ms),
ok.
t_guard_fun(Config) when is_list(Config) ->
@@ -494,7 +1427,7 @@ t_map_sort_literals(Config) when is_list(Config) ->
true = #{ c => 1, b => 1, a => 1 } < id(#{ b => 1, c => 1, d => 1}),
true = #{ "a" => 1 } < id(#{ <<"a">> => 1}),
false = #{ <<"a">> => 1 } < id(#{ "a" => 1}),
- false = #{ 1 => 1 } < id(#{ 1.0 => 1}),
+ true = #{ 1 => 1 } < id(#{ 1.0 => 1}),
false = #{ 1.0 => 1 } < id(#{ 1 => 1}),
%% value order
@@ -526,11 +1459,11 @@ t_bif_map_get(Config) when is_list(Config) ->
"v4" = maps:get(<<"k2">>, M#{ <<"k2">> => "v4" }),
%% error case
- {'EXIT',{badarg, [{maps,get,_,_}|_]}} = (catch maps:get(a,[])),
- {'EXIT',{badarg, [{maps,get,_,_}|_]}} = (catch maps:get(a,<<>>)),
- {'EXIT',{bad_key,[{maps,get,_,_}|_]}} = (catch maps:get({1,1}, #{{1,1.0} => "tuple"})),
- {'EXIT',{bad_key,[{maps,get,_,_}|_]}} = (catch maps:get(a,#{})),
- {'EXIT',{bad_key,[{maps,get,_,_}|_]}} = (catch maps:get(a,#{ b=>1, c=>2})),
+ {'EXIT',{{badmap,[]},[{maps,get,_,_}|_]}} = (catch maps:get(a, [])),
+ {'EXIT',{{badmap,<<>>},[{maps,get,_,_}|_]}} = (catch maps:get(a, <<>>)),
+ {'EXIT',{{badkey,{1,1}},[{maps,get,_,_}|_]}} = (catch maps:get({1,1}, #{{1,1.0} => "tuple"})),
+ {'EXIT',{{badkey,a},[{maps,get,_,_}|_]}} = (catch maps:get(a, #{})),
+ {'EXIT',{{badkey,a},[{maps,get,_,_}|_]}} = (catch maps:get(a, #{ b=>1, c=>2})),
ok.
t_bif_map_find(Config) when is_list(Config) ->
@@ -554,8 +1487,10 @@ t_bif_map_find(Config) when is_list(Config) ->
error = maps:find({1.0,1}, #{ a=>a, {1,1.0} => "tuple hi"}), % reverse types in tuple key
- {'EXIT',{badarg,[{maps,find,_,_}|_]}} = (catch maps:find(a,id([]))),
- {'EXIT',{badarg,[{maps,find,_,_}|_]}} = (catch maps:find(a,id(<<>>))),
+ {'EXIT',{{badmap,[]},[{maps,find,_,_}|_]}} =
+ (catch maps:find(a, id([]))),
+ {'EXIT',{{badmap,<<>>},[{maps,find,_,_}|_]}} =
+ (catch maps:find(a, id(<<>>))),
ok.
@@ -580,26 +1515,26 @@ t_bif_map_is_key(Config) when is_list(Config) ->
false = maps:is_key(1.0, maps:put(1, "number", M1)),
%% error case
- {'EXIT',{badarg,[{maps,is_key,_,_}|_]}} = (catch maps:is_key(a,id([]))),
- {'EXIT',{badarg,[{maps,is_key,_,_}|_]}} = (catch maps:is_key(a,id(<<>>))),
+ {'EXIT',{{badmap,[]},[{maps,is_key,_,_}|_]}} = (catch maps:is_key(a, id([]))),
+ {'EXIT',{{badmap,<<>>},[{maps,is_key,_,_}|_]}} = (catch maps:is_key(a, id(<<>>))),
ok.
t_bif_map_keys(Config) when is_list(Config) ->
[] = maps:keys(#{}),
- [1,2,3,4,5] = maps:keys(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e}),
- [1,2,3,4,5] = maps:keys(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c}),
+ [1,2,3,4,5] = lists:sort(maps:keys(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e})),
+ [1,2,3,4,5] = lists:sort(maps:keys(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c})),
- % values in key order: [4,int,"hi",<<"key">>]
M1 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, 4 => number},
- [4,int,"hi",<<"key">>] = maps:keys(M1),
+ [4,int,"hi",<<"key">>] = lists:sort(maps:keys(M1)),
%% error case
- {'EXIT',{badarg,[{maps,keys,_,_}|_]}} = (catch maps:keys(1 bsl 65 + 3)),
- {'EXIT',{badarg,[{maps,keys,_,_}|_]}} = (catch maps:keys(154)),
- {'EXIT',{badarg,[{maps,keys,_,_}|_]}} = (catch maps:keys(atom)),
- {'EXIT',{badarg,[{maps,keys,_,_}|_]}} = (catch maps:keys([])),
- {'EXIT',{badarg,[{maps,keys,_,_}|_]}} = (catch maps:keys(<<>>)),
+ BigNum = 1 bsl 65 + 3,
+ {'EXIT',{{badmap,BigNum},[{maps,keys,_,_}|_]}} = (catch maps:keys(BigNum)),
+ {'EXIT',{{badmap,154},[{maps,keys,_,_}|_]}} = (catch maps:keys(154)),
+ {'EXIT',{{badmap,atom},[{maps,keys,_,_}|_]}} = (catch maps:keys(atom)),
+ {'EXIT',{{badmap,[]},[{maps,keys,_,_}|_]}} = (catch maps:keys([])),
+ {'EXIT',{{badmap,<<>>},[{maps,keys,_,_}|_]}} = (catch maps:keys(<<>>)),
ok.
t_bif_map_new(Config) when is_list(Config) ->
@@ -628,93 +1563,64 @@ t_bif_map_merge(Config) when is_list(Config) ->
{1,2} := "tuple", "hi" := "hello again", <<"key">> := <<"value">>} = maps:merge(M0,M1),
%% error case
- {'EXIT',{badarg,[{maps,merge,_,_}|_]}} = (catch maps:merge((1 bsl 65 + 3), <<>>)),
- {'EXIT',{badarg,[{maps,merge,_,_}|_]}} = (catch maps:merge(<<>>, id(#{ a => 1}))),
- {'EXIT',{badarg,[{maps,merge,_,_}|_]}} = (catch maps:merge(id(#{ a => 2}), <<>> )),
+ BigNum = 1 bsl 65 + 3,
+ {'EXIT',{{badmap,BigNum},[{maps,merge,_,_}|_]}} = (catch maps:merge(BigNum, <<>>)),
+ {'EXIT',{{badmap,<<>>},[{maps,merge,_,_}|_]}} = (catch maps:merge(<<>>, id(#{ a => 1}))),
+ {'EXIT',{{badmap,<<>>},[{maps,merge,_,_}|_]}} = (catch maps:merge(id(#{ a => 2}), <<>> )),
ok.
-
t_bif_map_put(Config) when is_list(Config) ->
M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
4 => number, 18446744073709551629 => wat},
M1 = #{ "hi" := "hello"} = maps:put("hi", "hello", #{}),
- ["hi"] = maps:keys(M1),
- ["hello"] = maps:values(M1),
+ true = is_members(["hi"],maps:keys(M1)),
+ true = is_members(["hello"],maps:values(M1)),
M2 = #{ int := 3 } = maps:put(int, 3, M1),
- [int,"hi"] = maps:keys(M2),
- [3,"hello"] = maps:values(M2),
+ true = is_members([int,"hi"],maps:keys(M2)),
+ true = is_members([3,"hello"],maps:values(M2)),
M3 = #{ <<"key">> := <<"value">> } = maps:put(<<"key">>, <<"value">>, M2),
- [int,"hi",<<"key">>] = maps:keys(M3),
- [3,"hello",<<"value">>] = maps:values(M3),
+ true = is_members([int,"hi",<<"key">>],maps:keys(M3)),
+ true = is_members([3,"hello",<<"value">>],maps:values(M3)),
M4 = #{ 18446744073709551629 := wat } = maps:put(18446744073709551629, wat, M3),
- [18446744073709551629,int,"hi",<<"key">>] = maps:keys(M4),
- [wat,3,"hello",<<"value">>] = maps:values(M4),
+ true = is_members([18446744073709551629,int,"hi",<<"key">>],maps:keys(M4)),
+ true = is_members([wat,3,"hello",<<"value">>],maps:values(M4)),
M0 = #{ 4 := number } = M5 = maps:put(4, number, M4),
- [4,18446744073709551629,int,"hi",<<"key">>] = maps:keys(M5),
- [number,wat,3,"hello",<<"value">>] = maps:values(M5),
+ true = is_members([4,18446744073709551629,int,"hi",<<"key">>],maps:keys(M5)),
+ true = is_members([number,wat,3,"hello",<<"value">>],maps:values(M5)),
M6 = #{ <<"key">> := <<"other value">> } = maps:put(<<"key">>, <<"other value">>, M5),
- [4,18446744073709551629,int,"hi",<<"key">>] = maps:keys(M6),
- [number,wat,3,"hello",<<"other value">>] = maps:values(M6),
+ true = is_members([4,18446744073709551629,int,"hi",<<"key">>],maps:keys(M6)),
+ true = is_members([number,wat,3,"hello",<<"other value">>],maps:values(M6)),
%% error case
- {'EXIT',{badarg,[{maps,put,_,_}|_]}} = (catch maps:put(1,a,1 bsl 65 + 3)),
- {'EXIT',{badarg,[{maps,put,_,_}|_]}} = (catch maps:put(1,a,154)),
- {'EXIT',{badarg,[{maps,put,_,_}|_]}} = (catch maps:put(1,a,atom)),
- {'EXIT',{badarg,[{maps,put,_,_}|_]}} = (catch maps:put(1,a,[])),
- {'EXIT',{badarg,[{maps,put,_,_}|_]}} = (catch maps:put(1,a,<<>>)),
- ok.
-
-t_bif_map_remove(Config) when is_list(Config) ->
- 0 = erlang:map_size(maps:remove(some_key, #{})),
-
- M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
- 4 => number, 18446744073709551629 => wat},
-
- M1 = maps:remove("hi", M0),
- [4,18446744073709551629,int,<<"key">>] = maps:keys(M1),
- [number,wat,3,<<"value">>] = maps:values(M1),
-
- M2 = maps:remove(int, M1),
- [4,18446744073709551629,<<"key">>] = maps:keys(M2),
- [number,wat,<<"value">>] = maps:values(M2),
-
- M3 = maps:remove(<<"key">>, M2),
- [4,18446744073709551629] = maps:keys(M3),
- [number,wat] = maps:values(M3),
-
- M4 = maps:remove(18446744073709551629, M3),
- [4] = maps:keys(M4),
- [number] = maps:values(M4),
-
- M5 = maps:remove(4, M4),
- [] = maps:keys(M5),
- [] = maps:values(M5),
+ BigNum = 1 bsl 65 + 3,
+ {'EXIT',{{badmap,BigNum},[{maps,put,_,_}|_]}} = (catch maps:put(1, a, BigNum)),
+ {'EXIT',{{badmap,154},[{maps,put,_,_}|_]}} = (catch maps:put(1, a, 154)),
+ {'EXIT',{{badmap,atom},[{maps,put,_,_}|_]}} = (catch maps:put(1, a, atom)),
+ {'EXIT',{{badmap,[]},[{maps,put,_,_}|_]}} = (catch maps:put(1, a, [])),
+ {'EXIT',{{badmap,<<>>},[{maps,put,_,_}|_]}} = (catch maps:put(1, a, <<>>)),
+ ok.
- M0 = maps:remove(5,M0),
- M0 = maps:remove("hi there",M0),
+is_members(Ks,Ls) when length(Ks) =/= length(Ls) -> false;
+is_members(Ks,Ls) -> is_members_do(Ks,Ls).
- #{ "hi" := "hello", int := 3, 4 := number} = maps:remove(18446744073709551629,maps:remove(<<"key">>,M0)),
+is_members_do([],[]) -> true;
+is_members_do([],_) -> false;
+is_members_do([K|Ks],Ls) ->
+ is_members_do(Ks, lists:delete(K,Ls)).
- %% error case
- {'EXIT',{badarg,[{maps,remove,_,_}|_]}} = (catch maps:remove(a,1 bsl 65 + 3)),
- {'EXIT',{badarg,[{maps,remove,_,_}|_]}} = (catch maps:remove(1,154)),
- {'EXIT',{badarg,[{maps,remove,_,_}|_]}} = (catch maps:remove(a,atom)),
- {'EXIT',{badarg,[{maps,remove,_,_}|_]}} = (catch maps:remove(1,[])),
- {'EXIT',{badarg,[{maps,remove,_,_}|_]}} = (catch maps:remove(a,<<>>)),
- ok.
t_bif_map_update(Config) when is_list(Config) ->
M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
@@ -736,103 +1642,142 @@ t_bif_map_update(Config) when is_list(Config) ->
4 := number, 18446744073709551629 := wazzup} = maps:update(18446744073709551629, wazzup, M0),
%% error case
- {'EXIT',{badarg,[{maps,update,_,_}|_]}} = (catch maps:update(1,none,{})),
- {'EXIT',{badarg,[{maps,update,_,_}|_]}} = (catch maps:update(1,none,<<"value">>)),
- {'EXIT',{badarg,[{maps,update,_,_}|_]}} = (catch maps:update(5,none,M0)),
+ {'EXIT',{{badmap,{}},[{maps,update,_,_}|_]}} = (catch maps:update(1, none, {})),
+ {'EXIT',{{badmap,<<"value">>},[{maps,update,_,_}|_]}} =
+ (catch maps:update(1, none, <<"value">>)),
+ {'EXIT',{{badkey,5},[{maps,update,_,_}|_]}} = (catch maps:update(5, none, M0)),
ok.
+t_bif_map_remove(Config) when is_list(Config) ->
+ 0 = erlang:map_size(maps:remove(some_key, #{})),
+
+ M0 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>,
+ 4 => number, 18446744073709551629 => wat},
+
+ M1 = maps:remove("hi", M0),
+ true = is_members([4,18446744073709551629,int,<<"key">>],maps:keys(M1)),
+ true = is_members([number,wat,3,<<"value">>],maps:values(M1)),
+
+ M2 = maps:remove(int, M1),
+ true = is_members([4,18446744073709551629,<<"key">>],maps:keys(M2)),
+ true = is_members([number,wat,<<"value">>],maps:values(M2)),
+ M3 = maps:remove(<<"key">>, M2),
+ true = is_members([4,18446744073709551629],maps:keys(M3)),
+ true = is_members([number,wat],maps:values(M3)),
+
+ M4 = maps:remove(18446744073709551629, M3),
+ true = is_members([4],maps:keys(M4)),
+ true = is_members([number],maps:values(M4)),
+
+ M5 = maps:remove(4, M4),
+ [] = maps:keys(M5),
+ [] = maps:values(M5),
+
+ M0 = maps:remove(5,M0),
+ M0 = maps:remove("hi there",M0),
+
+ #{ "hi" := "hello", int := 3, 4 := number} = maps:remove(18446744073709551629,maps:remove(<<"key">>,M0)),
+
+ %% error case
+ BigNum = 1 bsl 65 + 3,
+ {'EXIT',{{badmap,BigNum},[{maps,remove,_,_}|_]}} = (catch maps:remove(a, BigNum)),
+ {'EXIT',{{badmap,154},[{maps,remove,_,_}|_]}} = (catch maps:remove(1, 154)),
+ {'EXIT',{{badmap,atom},[{maps,remove,_,_}|_]}} = (catch maps:remove(a, atom)),
+ {'EXIT',{{badmap,[]},[{maps,remove,_,_}|_]}} = (catch maps:remove(1, [])),
+ {'EXIT',{{badmap,<<>>},[{maps,remove,_,_}|_]}} = (catch maps:remove(a, <<>>)),
+ ok.
t_bif_map_values(Config) when is_list(Config) ->
[] = maps:values(#{}),
+ [1] = maps:values(#{a=>1}),
- [a,b,c,d,e] = maps:values(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e}),
- [a,b,c,d,e] = maps:values(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c}),
+ true = is_members([a,b,c,d,e],maps:values(#{ 1 => a, 2 => b, 3 => c, 4 => d, 5 => e})),
+ true = is_members([a,b,c,d,e],maps:values(#{ 4 => d, 5 => e, 1 => a, 2 => b, 3 => c})),
- % values in key order: [4,int,"hi",<<"key">>]
M1 = #{ "hi" => "hello", int => 3, <<"key">> => <<"value">>, 4 => number},
M2 = M1#{ "hi" => "hello2", <<"key">> => <<"value2">> },
- [number,3,"hello2",<<"value2">>] = maps:values(M2),
- [number,3,"hello",<<"value">>] = maps:values(M1),
+ true = is_members([number,3,"hello2",<<"value2">>],maps:values(M2)),
+ true = is_members([number,3,"hello",<<"value">>],maps:values(M1)),
%% error case
- {'EXIT',{badarg,[{maps,values,_,_}|_]}} = (catch maps:values(1 bsl 65 + 3)),
- {'EXIT',{badarg,[{maps,values,_,_}|_]}} = (catch maps:values(atom)),
- {'EXIT',{badarg,[{maps,values,_,_}|_]}} = (catch maps:values([])),
- {'EXIT',{badarg,[{maps,values,_,_}|_]}} = (catch maps:values(<<>>)),
+ BigNum = 1 bsl 65 + 3,
+ {'EXIT',{{badmap,BigNum},[{maps,values,_,_}|_]}} = (catch maps:values(BigNum)),
+ {'EXIT',{{badmap,atom},[{maps,values,_,_}|_]}} = (catch maps:values(atom)),
+ {'EXIT',{{badmap,[]},[{maps,values,_,_}|_]}} = (catch maps:values([])),
+ {'EXIT',{{badmap,<<>>},[{maps,values,_,_}|_]}} = (catch maps:values(<<>>)),
ok.
+
+
t_erlang_hash(Config) when is_list(Config) ->
ok = t_bif_erlang_phash2(),
ok = t_bif_erlang_phash(),
ok = t_bif_erlang_hash(),
-
ok.
t_bif_erlang_phash2() ->
-
39679005 = erlang:phash2(#{}),
- 78942764 = erlang:phash2(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 }),
- 37338230 = erlang:phash2(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} }),
- 14363616 = erlang:phash2(#{ 1 => a }),
- 51612236 = erlang:phash2(#{ a => 1 }),
+ 33667975 = erlang:phash2(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 }), % 78942764
+ 95332690 = erlang:phash2(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} }), % 37338230
+ 108954384 = erlang:phash2(#{ 1 => a }), % 14363616
+ 59617982 = erlang:phash2(#{ a => 1 }), % 51612236
- 37468437 = erlang:phash2(#{{} => <<>>}),
- 44049159 = erlang:phash2(#{<<>> => {}}),
+ 42770201 = erlang:phash2(#{{} => <<>>}), % 37468437
+ 71687700 = erlang:phash2(#{<<>> => {}}), % 44049159
M0 = #{ a => 1, "key" => <<"value">> },
M1 = maps:remove("key",M0),
M2 = M1#{ "key" => <<"value">> },
- 118679416 = erlang:phash2(M0),
- 51612236 = erlang:phash2(M1),
- 118679416 = erlang:phash2(M2),
+ 70249457 = erlang:phash2(M0), % 118679416
+ 59617982 = erlang:phash2(M1), % 51612236
+ 70249457 = erlang:phash2(M2), % 118679416
ok.
t_bif_erlang_phash() ->
Sz = 1 bsl 32,
- 268440612 = erlang:phash(#{},Sz),
- 1196461908 = erlang:phash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz),
- 3944426064 = erlang:phash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz),
- 1394238263 = erlang:phash(#{ 1 => a },Sz),
- 4066388227 = erlang:phash(#{ a => 1 },Sz),
+ 1113425985 = erlang:phash(#{},Sz), % 268440612
+ 1510068139 = erlang:phash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 1196461908
+ 3182345590 = erlang:phash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 3944426064
+ 2927531828 = erlang:phash(#{ 1 => a },Sz), % 1394238263
+ 1670235874 = erlang:phash(#{ a => 1 },Sz), % 4066388227
- 1578050717 = erlang:phash(#{{} => <<>>},Sz),
- 1578050717 = erlang:phash(#{<<>> => {}},Sz), % yep, broken
+ 3935089469 = erlang:phash(#{{} => <<>>},Sz), % 1578050717
+ 71692856 = erlang:phash(#{<<>> => {}},Sz), % 1578050717
M0 = #{ a => 1, "key" => <<"value">> },
M1 = maps:remove("key",M0),
M2 = M1#{ "key" => <<"value">> },
- 3590546636 = erlang:phash(M0,Sz),
- 4066388227 = erlang:phash(M1,Sz),
- 3590546636 = erlang:phash(M2,Sz),
+ 2620391445 = erlang:phash(M0,Sz), % 3590546636
+ 1670235874 = erlang:phash(M1,Sz), % 4066388227
+ 2620391445 = erlang:phash(M2,Sz), % 3590546636
ok.
t_bif_erlang_hash() ->
Sz = 1 bsl 27 - 1,
- 5158 = erlang:hash(#{},Sz),
- 71555838 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz),
- 5497225 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz),
- 126071654 = erlang:hash(#{ 1 => a },Sz),
- 126426236 = erlang:hash(#{ a => 1 },Sz),
+ 39684169 = erlang:hash(#{},Sz), % 5158
+ 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838
+ 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225
+ 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654
+ 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236
- 101655720 = erlang:hash(#{{} => <<>>},Sz),
- 101655720 = erlang:hash(#{<<>> => {}},Sz), % yep, broken
+ 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720
+ 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720
M0 = #{ a => 1, "key" => <<"value">> },
M1 = maps:remove("key",M0),
M2 = M1#{ "key" => <<"value">> },
- 38260486 = erlang:hash(M0,Sz),
- 126426236 = erlang:hash(M1,Sz),
- 38260486 = erlang:hash(M2,Sz),
+ 70254632 = erlang:hash(M0,Sz), % 38260486
+ 59623150 = erlang:hash(M1,Sz), % 126426236
+ 70254632 = erlang:hash(M2,Sz), % 38260486
ok.
-
t_map_encode_decode(Config) when is_list(Config) ->
<<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
Pairs = [
@@ -895,43 +1840,46 @@ t_map_encode_decode(Config) when is_list(Config) ->
map_encode_decode_and_match([{K,V}|Pairs], EncodedPairs, M0) ->
M1 = maps:put(K,V,M0),
B0 = erlang:term_to_binary(M1),
- Ls = lists:sort(fun(A,B) -> erts_internal:cmp_term(A,B) < 0 end, [{K, erlang:term_to_binary(K), erlang:term_to_binary(V)}|EncodedPairs]),
- %% sort Ks and Vs according to term spec, then match it
- KVbins = lists:foldr(fun({_,Kbin,Vbin}, Acc) -> [Kbin,Vbin | Acc] end, [], Ls),
- ok = match_encoded_map(B0, length(Ls), KVbins),
+ Ls = [{erlang:term_to_binary(K), erlang:term_to_binary(V)}|EncodedPairs],
+ ok = match_encoded_map(B0, length(Ls), Ls),
%% decode and match it
M1 = erlang:binary_to_term(B0),
map_encode_decode_and_match(Pairs,Ls,M1);
map_encode_decode_and_match([],_,_) -> ok.
match_encoded_map(<<131,116,Size:32,Encoded/binary>>,Size,Items) ->
- match_encoded_map(Encoded,Items);
+ match_encoded_map_stripped_size(Encoded,Items,Items);
match_encoded_map(_,_,_) -> no_match_size.
-match_encoded_map(<<>>,[]) -> ok;
-match_encoded_map(Bin,[<<131,Item/binary>>|Items]) ->
- Size = erlang:byte_size(Item),
- <<EncodedTerm:Size/binary, Bin1/binary>> = Bin,
- EncodedTerm = Item, %% Asssert
- match_encoded_map(Bin1,Items).
+match_encoded_map_stripped_size(<<>>,_,_) -> ok;
+match_encoded_map_stripped_size(B0,[{<<131,K/binary>>,<<131,V/binary>>}|Items],Ls) ->
+ Ksz = byte_size(K),
+ Vsz = byte_size(V),
+ case B0 of
+ <<K:Ksz/binary,V:Vsz/binary,B1/binary>> ->
+ match_encoded_map_stripped_size(B1,Ls,Ls);
+ _ ->
+ match_encoded_map_stripped_size(B0,Items,Ls)
+ end;
+match_encoded_map_stripped_size(_,[],_) -> fail.
t_bif_map_to_list(Config) when is_list(Config) ->
[] = maps:to_list(#{}),
- [{a,1},{b,2}] = maps:to_list(#{a=>1,b=>2}),
- [{a,1},{b,2},{c,3}] = maps:to_list(#{c=>3,a=>1,b=>2}),
- [{a,1},{b,2},{g,3}] = maps:to_list(#{g=>3,a=>1,b=>2}),
- [{a,1},{b,2},{g,3},{"c",4}] = maps:to_list(#{g=>3,a=>1,b=>2,"c"=>4}),
- [{3,v2},{hi,v4},{{hi,3},v5},{"hi",v3},{<<"hi">>,v1}] = maps:to_list(#{
- <<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5}),
+ [{a,1},{b,2}] = lists:sort(maps:to_list(#{a=>1,b=>2})),
+ [{a,1},{b,2},{c,3}] = lists:sort(maps:to_list(#{c=>3,a=>1,b=>2})),
+ [{a,1},{b,2},{g,3}] = lists:sort(maps:to_list(#{g=>3,a=>1,b=>2})),
+ [{a,1},{b,2},{g,3},{"c",4}] = lists:sort(maps:to_list(#{g=>3,a=>1,b=>2,"c"=>4})),
+ [{3,v2},{hi,v4},{{hi,3},v5},{"hi",v3},{<<"hi">>,v1}] =
+ lists:sort(maps:to_list(#{<<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5})),
- [{3,v7},{hi,v9},{{hi,3},v10},{"hi",v8},{<<"hi">>,v6}] = maps:to_list(#{
- <<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5,
- <<"hi">>=>v6,3=>v7,"hi"=>v8,hi=>v9,{hi,3}=>v10}),
+ [{3,v7},{hi,v9},{{hi,3},v10},{"hi",v8},{<<"hi">>,v6}] =
+ lists:sort(maps:to_list(#{<<"hi">>=>v1,3=>v2,"hi"=>v3,hi=>v4,{hi,3}=>v5,
+ <<"hi">>=>v6,3=>v7,"hi"=>v8,hi=>v9,{hi,3}=>v10})),
%% error cases
- {'EXIT', {badarg,_}} = (catch maps:to_list(id(a))),
- {'EXIT', {badarg,_}} = (catch maps:to_list(id(42))),
+ {'EXIT', {{badmap,a},_}} = (catch maps:to_list(id(a))),
+ {'EXIT', {{badmap,42},_}} = (catch maps:to_list(id(42))),
ok.
@@ -1147,9 +2095,9 @@ t_update_assoc_variables(Config) when is_list(Config) ->
#{ <<0:258>> := val } = id(M0#{<<0:258>> => val}), %% binary limitation
%% Errors cases.
- BadMap = id(badmap),
- {'EXIT',{{badarg,_},_}} = (catch BadMap#{nonexisting=>val}),
- {'EXIT',{{badarg,_},_}} = (catch <<>>#{nonexisting=>val}),
+ BadMap = id(a_bad_map),
+ {'EXIT',{{badmap,BadMap},_}} = (catch BadMap#{nonexisting=>val}),
+ {'EXIT',{{badmap,<<>>},_}} = (catch <<>>#{nonexisting=>val}),
ok.
t_update_exact_variables(Config) when is_list(Config) ->
@@ -1177,14 +2125,14 @@ t_update_exact_variables(Config) when is_list(Config) ->
1.0 => new_val4 },
%% Errors cases.
- {'EXIT',{{badarg,_},_}} = (catch ((id(nil))#{ a := b })),
- {'EXIT',{{badarg,_},_}} = (catch <<>>#{nonexisting:=val}),
-
- {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
- {'EXIT',{badarg,_}} = (catch M0#{<<0:257>> := val}), %% limitation
+ {'EXIT',{{badmap,_},_}} = (catch ((id(nil))#{ a := b })),
+ {'EXIT',{{badmap,_},_}} = (catch <<>>#{nonexisting:=val}),
+
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,1.0},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{<<0:257>> := val}), %% limitation
ok.
t_nested_pattern_expressions(Config) when is_list(Config) ->
@@ -1271,7 +2219,7 @@ map_guard_sequence_mixed(K1,K2,M) ->
t_frequency_table(Config) when is_list(Config) ->
random:seed({13,1337,54}), % pseudo random
- N = 100000,
+ N = 1000,
Ts = rand_terms(N),
#{ n:=N, tf := Tf } = frequency_table(Ts,#{ n=>0, tf => #{}}),
ok = check_frequency(Ts,Tf),
@@ -1435,6 +2383,11 @@ t_build_and_match_structure(Config) when is_list(Config) ->
ok.
+do_badmap(Test) ->
+ Terms = [Test,fun erlang:abs/1,make_ref(),self(),0.0/id(-1),
+ <<1:1>>,<<>>,<<1,2,3>>,
+ [],{a,b,c},[a,b],atom,10.0,42,(1 bsl 65) + 3],
+ [Test(T) || T <- Terms].
%% Use this function to avoid compile-time evaluation of an expression.
id(I) -> I.
diff --git a/lib/debugger/vsn.mk b/lib/debugger/vsn.mk
index 38c19be93e..b6fd4e8e44 100644
--- a/lib/debugger/vsn.mk
+++ b/lib/debugger/vsn.mk
@@ -1 +1 @@
-DEBUGGER_VSN = 4.0.2
+DEBUGGER_VSN = 4.1
diff --git a/lib/dialyzer/doc/src/dialyzer.xml b/lib/dialyzer/doc/src/dialyzer.xml
index e482b1e6f8..fc076c24a6 100644
--- a/lib/dialyzer/doc/src/dialyzer.xml
+++ b/lib/dialyzer/doc/src/dialyzer.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2006</year><year>2014</year>
+ <year>2006</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -70,7 +70,7 @@
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
[--dump_callgraph file] [--no_native] [--fullpath]
- [--statistics]</code>
+ [--statistics] [--no_native_cache]</code>
<p>Options:</p>
<taglist>
<tag><c><![CDATA[files_or_dirs]]></c> (for backwards compatibility also
@@ -139,7 +139,11 @@
<tag><c><![CDATA[-Wwarn]]></c></tag>
<item>A family of options which selectively turn on/off warnings
(for help on the names of warnings use
- <c><![CDATA[dialyzer -Whelp]]></c>).</item>
+ <c><![CDATA[dialyzer -Whelp]]></c>).
+ Note that the options can also be given in the file with a
+ <c>-dialyzer()</c> attribute. See <seealso
+ marker="#suppression">Requesting or Suppressing Warnings in
+ Source Files</seealso> below for details.</item>
<tag><c><![CDATA[--shell]]></c></tag>
<item>Do not disable the Erlang shell while running the GUI.</item>
<tag><c><![CDATA[--version]]></c> (or <c><![CDATA[-v]]></c>)</tag>
@@ -194,6 +198,11 @@
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis
time.</item>
+ <tag><c><![CDATA[--no_native_cache]]></c></tag>
+ <item>By default, Dialyzer caches the results of native compilation in the
+ <c>$XDG_CACHE_HOME/erlang/dialyzer_hipe_cache</c> directory.
+ <c>XDG_CACHE_HOME</c> defaults to <c>$HOME/.cache</c>.
+ Use this option to disable caching.</item>
<tag><c><![CDATA[--fullpath]]></c></tag>
<item>Display the full path names of files for which warnings are emitted.</item>
<tag><c><![CDATA[--gui]]></c></tag>
@@ -227,6 +236,8 @@
<tag><c><![CDATA[-Wno_behaviours]]></c></tag>
<item>Suppress warnings about behaviour callbacks which drift from the
published recommended interfaces.</item>
+ <tag><c><![CDATA[-Wno_missing_calls]]></c></tag>
+ <item>Suppress warnings about calls to missing functions.</item>
<tag><c><![CDATA[-Wno_undefined_callbacks]]></c></tag>
<item>Suppress warnings about behaviours that have no
<c>-callback</c> attributes for their callbacks.</item>
@@ -242,9 +253,16 @@
analysis that finds data races performs intra-procedural data flow analysis
and can sometimes explode in time. Enable it at your own risk.
</item>
- <tag><c><![CDATA[-Wunderspecs]]></c>***</tag>
+i <tag><c><![CDATA[-Wunderspecs]]></c>***</tag>
<item>Warn about underspecified functions
(the -spec is strictly more allowing than the success typing).</item>
+ <tag><c><![CDATA[-Wunknown]]></c>***</tag>
+ <item>Let warnings about unknown functions and types affect the
+ exit status of the command line version. The default is to ignore
+ warnings about unknown functions and types when setting the exit
+ status. When using the Dialyzer from Erlang, warnings about unknown
+ functions and types are returned; the default is not to return
+ these warnings.</item>
</taglist>
<p>The following options are also available but their use is not
recommended: (they are mostly for Dialyzer developers and internal
@@ -269,6 +287,71 @@
given from the command line, so please refer to the sections above for
a description of these.</p>
</section>
+
+ <section>
+ <marker id="suppression"></marker>
+ <title>Requesting or Suppressing Warnings in Source Files</title>
+ <p>
+ The <c>-dialyzer()</c> attribute can be used for turning off
+ warnings in a module by specifying functions or warning options.
+ For example, to turn off all warnings for the function
+ <c>f/0</c>, include the following line:
+ </p>
+<code type="none">
+-dialyzer({nowarn_function, f/0}).
+</code>
+ <p>To turn off warnings for improper lists, add the following line
+ to the source file:
+ </p>
+<code type="none">
+-dialyzer(no_improper_lists).
+</code>
+ <p>The <c>-dialyzer()</c> attribute is allowed after function
+ declarations. Lists of warning options or functions are allowed:
+ </p>
+<code type="none">
+-dialyzer([{nowarn_function, [f/0]}, no_improper_lists]).
+</code>
+ <p>
+ Warning options can be restricted to functions:
+ </p>
+<code type="none">
+-dialyzer({no_improper_lists, g/0}).
+</code>
+<code type="none">
+-dialyzer({[no_return, no_match], [g/0, h/0]}).
+</code>
+ <p>
+ For help on the warning options use <c>dialyzer -Whelp</c>. The
+ options are also enumerated <seealso
+ marker="#gui/1">below</seealso> (<c>WarnOpts</c>).
+ </p>
+ <note>
+ <p>
+ The <c>-dialyzer()</c> attribute is not checked by the Erlang
+ Compiler, but by the Dialyzer itself.
+ </p>
+ </note>
+ <note>
+ <p>
+ The warning option <c>-Wrace_conditions</c> has no effect when
+ set in source files.
+ </p>
+ </note>
+ <p>
+ The <c>-dialyzer()</c> attribute can also be used for turning on
+ warnings. For instance, if a module has been fixed regarding
+ unmatched returns, adding the line
+ </p>
+<code type="none">
+-dialyzer(unmatched_returns).
+</code>
+ <p>
+ can help in assuring that no new unmatched return warnings are
+ introduced.
+ </p>
+ </section>
+
<funcs>
<func>
<name>gui() -> ok | {error, Msg}</name>
@@ -283,13 +366,14 @@
OptList :: [Option]
Option :: {files, [Filename :: string()]}
| {files_rec, [DirName :: string()]}
- | {defines, [{Macro: atom(), Value : term()}]}
+ | {defines, [{Macro :: atom(), Value :: term()}]}
| {from, src_code | byte_code} %% Defaults to byte_code
| {init_plt, FileName :: string()} %% If changed from default
| {plts, [FileName :: string()]} %% If changed from default
| {include_dirs, [DirName :: string()]}
| {output_file, FileName :: string()}
| {output_plt, FileName :: string()}
+ | {check_plt, boolean()},
| {analysis_type, 'succ_typings' |
'plt_add' |
'plt_build' |
@@ -313,7 +397,8 @@ WarnOpts :: no_return
| race_conditions
| overspecs
| underspecs
- | specdiffs</code>
+ | specdiffs
+ | unknown</code>
</desc>
</func>
<func>
@@ -347,6 +432,7 @@ Tag :: 'warn_behaviour'
| 'warn_return_only_exit'
| 'warn_umatched_return'
| 'warn_undefined_callbacks'
+ | 'warn_unknown'
Id = {File :: string(), Line :: integer()}
Msg = msg() -- Undefined</code>
</desc>
diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml
index 4020165697..8976679c1d 100644
--- a/lib/dialyzer/doc/src/notes.xml
+++ b/lib/dialyzer/doc/src/notes.xml
@@ -31,6 +31,21 @@
<p>This document describes the changes made to the Dialyzer
application.</p>
+<section><title>Dialyzer 2.7.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> A bug concerning <c>map()</c> types has been fixed.
+ </p>
+ <p>
+ Own Id: OTP-12472</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Dialyzer 2.7.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src
index 1756800c4f..b6b9173a84 100644
--- a/lib/dialyzer/src/dialyzer.app.src
+++ b/lib/dialyzer/src/dialyzer.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -45,6 +45,6 @@
{registered, []},
{applications, [compiler, gs, hipe, kernel, stdlib, wx]},
{env, []},
- {runtime_dependencies, ["wx-1.2","syntax_tools-1.6.14","stdlib-2.0",
- "kernel-3.0","hipe-3.10.3","erts-6.0",
+ {runtime_dependencies, ["wx-1.2","syntax_tools-1.6.14","stdlib-2.5",
+ "kernel-3.0","hipe-3.10.3","erts-7.0",
"compiler-5.0"]}]}.
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index cec94a49fd..c8537e3bd8 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -162,14 +162,7 @@ run(Opts) ->
{error, Msg} ->
throw({dialyzer_error, Msg});
OptsRecord ->
- case OptsRecord#options.check_plt of
- true ->
- case cl_check_init(OptsRecord) of
- {ok, ?RET_NOTHING_SUSPICIOUS} -> ok;
- {error, ErrorMsg1} -> throw({dialyzer_error, ErrorMsg1})
- end;
- false -> ok
- end,
+ ok = check_init(OptsRecord),
case dialyzer_cl:start(OptsRecord) of
{?RET_DISCREPANCIES, Warnings} -> Warnings;
{?RET_NOTHING_SUSPICIOUS, _} -> []
@@ -179,6 +172,16 @@ run(Opts) ->
erlang:error({dialyzer_error, lists:flatten(ErrorMsg)})
end.
+check_init(#options{analysis_type = plt_check}) ->
+ ok;
+check_init(#options{check_plt = true} = OptsRecord) ->
+ case cl_check_init(OptsRecord) of
+ {ok, _} -> ok;
+ {error, Msg} -> throw({dialyzer_error, Msg})
+ end;
+check_init(#options{check_plt = false}) ->
+ ok.
+
internal_gui(OptsRecord) ->
F = fun() ->
dialyzer_gui_wx:start(OptsRecord),
@@ -199,17 +202,13 @@ gui(Opts) ->
throw({dialyzer_error, Msg});
OptsRecord ->
ok = check_gui_options(OptsRecord),
- case cl_check_init(OptsRecord) of
- {ok, ?RET_NOTHING_SUSPICIOUS} ->
- F = fun() ->
- dialyzer_gui_wx:start(OptsRecord)
- end,
- case doit(F) of
- {ok, _} -> ok;
- {error, Msg} -> throw({dialyzer_error, Msg})
- end;
- {error, ErrorMsg1} ->
- throw({dialyzer_error, ErrorMsg1})
+ ok = check_init(OptsRecord),
+ F = fun() ->
+ dialyzer_gui_wx:start(OptsRecord)
+ end,
+ case doit(F) of
+ {ok, _} -> ok;
+ {error, Msg} -> throw({dialyzer_error, Msg})
end
catch
throw:{dialyzer_error, ErrorMsg} ->
@@ -282,15 +281,17 @@ cl_check_log(none) ->
cl_check_log(Output) ->
io:format(" Check output file `~s' for details\n", [Output]).
--spec format_warning(dial_warning()) -> string().
+-spec format_warning(raw_warning()) -> string().
format_warning(W) ->
format_warning(W, basename).
--spec format_warning(dial_warning(), fopt()) -> string().
+-spec format_warning(raw_warning() | dial_warning(), fopt()) -> string().
+format_warning({Tag, {File, Line, _MFA}, Msg}, FOpt) ->
+ format_warning({Tag, {File, Line}, Msg}, FOpt);
format_warning({_Tag, {File, Line}, Msg}, FOpt) when is_list(File),
- is_integer(Line) ->
+ is_integer(Line) ->
F = case FOpt of
fullpath -> File;
basename -> filename:basename(File)
diff --git a/lib/dialyzer/src/dialyzer.hrl b/lib/dialyzer/src/dialyzer.hrl
index 9a25f86512..90addc35a8 100644
--- a/lib/dialyzer/src/dialyzer.hrl
+++ b/lib/dialyzer/src/dialyzer.hrl
@@ -84,6 +84,15 @@
-type dial_warning() :: {dial_warn_tag(), file_line(), {atom(), [term()]}}.
%%
+%% This is the representation of each warning before suppressions have
+%% been applied
+%%
+-type m_or_mfa() :: module() % warnings not associated with any function
+ | mfa().
+-type warning_info() :: {file:filename(), non_neg_integer(), m_or_mfa()}.
+-type raw_warning() :: {dial_warn_tag(), warning_info(), {atom(), [term()]}}.
+
+%%
%% This is the representation of dialyzer's internal errors
%%
-type dial_error() :: any(). %% XXX: underspecified
@@ -103,6 +112,7 @@
-type fopt() :: 'basename' | 'fullpath'.
-type format() :: 'formatted' | 'raw'.
-type label() :: non_neg_integer().
+-type dial_warn_tags():: ordsets:ordset(dial_warn_tag()).
-type rep_mode() :: 'quiet' | 'normal' | 'verbose'.
-type start_from() :: 'byte_code' | 'src_code'.
-type mfa_or_funlbl() :: label() | mfa().
@@ -138,7 +148,7 @@
init_plts = [] :: [file:filename()],
include_dirs = [] :: [file:filename()],
output_plt = none :: 'none' | file:filename(),
- legal_warnings = ordsets:new() :: ordsets:ordset(dial_warn_tag()),
+ legal_warnings = ordsets:new() :: dial_warn_tags(),
report_mode = normal :: rep_mode(),
erlang_mode = false :: boolean(),
use_contracts = true :: boolean(),
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index af1c2b7e3a..dbfe680345 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -39,8 +39,6 @@
one_file_result/0,
compile_result/0]).
--export_type([no_warn_unused/0]).
-
-include("dialyzer.hrl").
-record(analysis_state,
@@ -50,8 +48,9 @@
defines = [] :: [dial_define()],
doc_plt :: dialyzer_plt:plt(),
include_dirs = [] :: [file:filename()],
- no_warn_unused :: no_warn_unused(),
parent :: pid(),
+ legal_warnings :: % command line options
+ [dial_warn_tag()],
plt :: dialyzer_plt:plt(),
start_from = byte_code :: start_from(),
use_contracts = true :: boolean(),
@@ -59,9 +58,10 @@
solvers :: [solver()]
}).
--record(server_state, {parent :: pid(), legal_warnings :: [dial_warn_tag()]}).
-
--type no_warn_unused() :: sets:set(mfa()).
+-record(server_state,
+ {
+ parent :: pid()
+ }).
%%--------------------------------------------------------------------
%% Main
@@ -75,24 +75,24 @@ start(Parent, LegalWarnings, Analysis) ->
Analysis0 =
Analysis#analysis{race_detection = RacesOn, timing_server = TimingServer},
Analysis1 = expand_files(Analysis0),
- Analysis2 = run_analysis(Analysis1),
- State = #server_state{parent = Parent, legal_warnings = LegalWarnings},
+ Analysis2 = run_analysis(Analysis1, LegalWarnings),
+ State = #server_state{parent = Parent},
loop(State, Analysis2, none),
dialyzer_timing:stop(TimingServer).
-run_analysis(Analysis) ->
+run_analysis(Analysis, LegalWarnings) ->
Self = self(),
- Fun = fun() -> analysis_start(Self, Analysis) end,
+ Fun = fun() -> analysis_start(Self, Analysis, LegalWarnings) end,
Analysis#analysis{analysis_pid = spawn_link(Fun)}.
-loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
+loop(#server_state{parent = Parent} = State,
#analysis{analysis_pid = AnalPid} = Analysis, ExtCalls) ->
receive
{AnalPid, log, LogMsg} ->
send_log(Parent, LogMsg),
loop(State, Analysis, ExtCalls);
{AnalPid, warnings, Warnings} ->
- case filter_warnings(LegalWarnings, Warnings) of
+ case Warnings of
[] -> ok;
SendWarnings ->
send_warnings(Parent, SendWarnings)
@@ -129,7 +129,7 @@ loop(#server_state{parent = Parent, legal_warnings = LegalWarnings} = State,
%% The Analysis
%%--------------------------------------------------------------------
-analysis_start(Parent, Analysis) ->
+analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
State = #analysis_state{codeserver = CServer,
@@ -139,13 +139,14 @@ analysis_start(Parent, Analysis) ->
include_dirs = Analysis#analysis.include_dirs,
plt = Plt,
parent = Parent,
+ legal_warnings = LegalWarnings,
start_from = Analysis#analysis.start_from,
use_contracts = Analysis#analysis.use_contracts,
timing_server = Analysis#analysis.timing_server,
solvers = Analysis#analysis.solvers
},
Files = ordsets:from_list(Analysis#analysis.files),
- {Callgraph, NoWarn, TmpCServer0} = compile_and_store(Files, State),
+ {Callgraph, TmpCServer0} = compile_and_store(Files, State),
%% Remote type postprocessing
NewCServer =
try
@@ -177,7 +178,6 @@ analysis_start(Parent, Analysis) ->
State0 = State#analysis_state{plt = NewPlt1},
dump_callgraph(Callgraph, State0, Analysis),
State1 = State0#analysis_state{codeserver = NewCServer},
- State2 = State1#analysis_state{no_warn_unused = NoWarn},
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
@@ -187,14 +187,14 @@ analysis_start(Parent, Analysis) ->
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State3 = analyze_callgraph(NewCallgraph, State2#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State3#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State3#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State3#analysis_state.doc_plt).
+ Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
+ send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
+ send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
@@ -210,11 +210,11 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
TimingServer, Solvers, Parent),
{NewPlt0, DocPlt};
succ_typings ->
- NoWarn = State#analysis_state.no_warn_unused,
{Warnings, NewPlt0, NewDocPlt0} =
dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarn, TimingServer, Solvers, Parent),
- send_warnings(State#analysis_state.parent, Warnings),
+ TimingServer, Solvers, Parent),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
{NewPlt0, NewDocPlt0}
end,
dialyzer_callgraph:delete(Callgraph),
@@ -230,19 +230,22 @@ analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
defines = [] :: [dial_define()],
include_dirs = [] :: [file:filename()],
start_from = byte_code :: start_from(),
- use_contracts = true :: boolean()
+ use_contracts = true :: boolean(),
+ legal_warnings :: [dial_warn_tag()]
}).
make_compile_init(#analysis_state{codeserver = Codeserver,
defines = Defs,
include_dirs = Dirs,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}, Callgraph) ->
#compile_init{callgraph = Callgraph,
codeserver = Codeserver,
defines = [{d, Macro, Val} || {Macro, Val} <- Defs],
include_dirs = [{i, D} || D <- Dirs],
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}.
compile_and_store(Files, #analysis_state{codeserver = CServer,
@@ -252,7 +255,7 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T1, _} = statistics(wall_clock),
Callgraph = dialyzer_callgraph:new(),
CompileInit = make_compile_init(State, Callgraph),
- {{Failed, NoWarn, Modules}, NextLabel} =
+ {{Failed, Modules}, NextLabel} =
?timing(Timing, "compile", _C1,
dialyzer_coordinator:parallel_job(compile, Files,
CompileInit, Timing)),
@@ -281,34 +284,34 @@ compile_and_store(Files, #analysis_state{codeserver = CServer,
{T3, _} = statistics(wall_clock),
Msg2 = io_lib:format("done in ~.2f secs\n", [(T3-T2)/1000]),
send_log(Parent, Msg2),
- {Callgraph, sets:from_list(NoWarn), CServer2}.
+ {Callgraph, CServer2}.
-type compile_init_data() :: #compile_init{}.
-type error_reason() :: string().
--type compile_result() :: {[{file:filename(), error_reason()}], [mfa()],
+-type compile_result() :: {[{file:filename(), error_reason()}],
[module()]}. %%opaque
-type one_file_result() :: {error, error_reason()} |
{ok, [dialyzer_callgraph:callgraph_edge()],
- [mfa_or_funlbl()], [mfa()], module()}. %%opaque
--type compile_mid_data() :: {module(), cerl:cerl(), [mfa()],
+ [mfa_or_funlbl()], module()}. %%opaque
+-type compile_mid_data() :: {module(), cerl:cerl(),
dialyzer_callgraph:callgraph(),
dialyzer_codeserver:codeserver()}.
-spec compile_init_result() -> compile_result().
-compile_init_result() -> {[], [], []}.
+compile_init_result() -> {[], []}.
-spec add_to_result(file:filename(), one_file_result(), compile_result(),
compile_init_data()) -> compile_result().
-add_to_result(File, NewData, {Failed, NoWarn, Mods}, InitData) ->
+add_to_result(File, NewData, {Failed, Mods}, InitData) ->
case NewData of
{error, Reason} ->
- {[{File, Reason}|Failed], NoWarn, Mods};
- {ok, V, E, NewNoWarn, Mod} ->
+ {[{File, Reason}|Failed], Mods};
+ {ok, V, E, Mod} ->
Callgraph = InitData#compile_init.callgraph,
dialyzer_callgraph:add_edges(E, V, Callgraph),
- {Failed, NewNoWarn ++ NoWarn, [Mod|Mods]}
+ {Failed, [Mod|Mods]}
end.
-spec start_compilation(file:filename(), compile_init_data()) ->
@@ -318,12 +321,14 @@ start_compilation(File,
#compile_init{callgraph = Callgraph, codeserver = Codeserver,
defines = Defines, include_dirs = IncludeD,
use_contracts = UseContracts,
+ legal_warnings = LegalWarnings,
start_from = StartFrom}) ->
case StartFrom of
src_code ->
- compile_src(File, IncludeD, Defines, Callgraph, Codeserver, UseContracts);
+ compile_src(File, IncludeD, Defines, Callgraph, Codeserver,
+ UseContracts, LegalWarnings);
byte_code ->
- compile_byte(File, Callgraph, Codeserver, UseContracts)
+ compile_byte(File, Callgraph, Codeserver, UseContracts, LegalWarnings)
end.
cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
@@ -357,88 +362,86 @@ cleanup_callgraph(#analysis_state{plt = InitPlt, parent = Parent,
end,
Callgraph1.
-compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts) ->
+compile_src(File, Includes, Defines, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
DefaultIncludes = default_includes(filename:dirname(File)),
SrcCompOpts = dialyzer_utils:src_compiler_opts(),
CompOpts = SrcCompOpts ++ Includes ++ Defines ++ DefaultIncludes,
case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
{error, _Msg} = Error -> Error;
{ok, AbstrCode} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_byte(File, Callgraph, CServer, UseContracts) ->
+compile_byte(File, Callgraph, CServer, UseContracts, LegalWarnings) ->
case dialyzer_utils:get_abstract_code_from_beam(File) of
error ->
{error, " Could not get abstract code for: " ++ File ++ "\n" ++
" Recompile with +debug_info or analyze starting from source code"};
{ok, AbstrCode} ->
- compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts)
+ compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings)
end.
-compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts) ->
+compile_byte(File, AbstrCode, Callgraph, CServer, UseContracts,
+ LegalWarnings) ->
case dialyzer_utils:get_compile_options_from_beam(File) of
error ->
{error, " Could not get compile options for: " ++ File ++ "\n" ++
" Recompile or analyze starting from source code"};
{ok, CompOpts} ->
- compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts)
+ compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings)
end.
-compile_common(File, AbstrCode, CompOpts, Callgraph, CServer, UseContracts) ->
+compile_common(File, AbstrCode, CompOpts, Callgraph, CServer,
+ UseContracts, LegalWarnings) ->
case dialyzer_utils:get_core_from_abstract_code(AbstrCode, CompOpts) of
error -> {error, " Could not get core Erlang code for: " ++ File};
{ok, Core} ->
Mod = cerl:concrete(cerl:module_name(Core)),
- NoWarn = abs_get_nowarn(AbstrCode, Mod),
case dialyzer_utils:get_record_and_type_info(AbstrCode) of
{error, _} = Error -> Error;
{ok, RecInfo} ->
CServer1 =
dialyzer_codeserver:store_temp_records(Mod, RecInfo, CServer),
+ MetaFunInfo =
+ dialyzer_utils:get_fun_meta_info(Mod, AbstrCode, LegalWarnings),
+ CServer2 =
+ dialyzer_codeserver:insert_fun_meta_info(MetaFunInfo, CServer1),
case UseContracts of
true ->
case dialyzer_utils:get_spec_info(Mod, AbstrCode, RecInfo) of
{error, _} = Error -> Error;
{ok, SpecInfo, CallbackInfo} ->
- CServer2 =
+ CServer3 =
dialyzer_codeserver:store_temp_contracts(Mod, SpecInfo,
CallbackInfo,
- CServer1),
- store_core(Mod, Core, NoWarn, Callgraph, CServer2)
+ CServer2),
+ store_core(Mod, Core, Callgraph, CServer3)
end;
false ->
- store_core(Mod, Core, NoWarn, Callgraph, CServer1)
+ store_core(Mod, Core, Callgraph, CServer2)
end
end
end.
-store_core(Mod, Core, NoWarn, Callgraph, CServer) ->
+store_core(Mod, Core, Callgraph, CServer) ->
Exp = get_exports_from_core(Core),
ExpTypes = get_exported_types_from_core(Core),
CServer = dialyzer_codeserver:insert_exports(Exp, CServer),
CServer = dialyzer_codeserver:insert_temp_exported_types(ExpTypes, CServer),
CoreTree = cerl:from_records(Core),
- {ok, cerl_trees:size(CoreTree), {Mod, CoreTree, NoWarn, Callgraph, CServer}}.
+ CoreSize = cerl_trees:size(CoreTree),
+ {ok, CoreSize, {Mod, CoreTree, Callgraph, CServer}}.
-spec continue_compilation(integer(), compile_mid_data()) -> one_file_result().
-continue_compilation(NextLabel, {Mod, CoreTree, NoWarn, Callgraph, CServer}) ->
+continue_compilation(NextLabel, {Mod, CoreTree, Callgraph, CServer}) ->
{LabeledTree, _NewNextLabel} = cerl_trees:label(CoreTree, NextLabel),
LabeledCore = cerl:to_records(LabeledTree),
- store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, NoWarn, CServer).
-
-abs_get_nowarn(Abs, M) ->
- Opts = lists:flatten([C || {attribute, _, compile, C} <- Abs]),
- Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
- true, Opts),
- case Warn of
- false ->
- [{M, F, A} || {function, _, F, A, _} <- Abs]; % all functions
- true ->
- [{M, F, A} || {nowarn_unused_function, FAs} <- Opts,
- {F, A} <- lists:flatten([FAs])]
- end.
+ store_code_and_build_callgraph(Mod, LabeledCore, Callgraph, CServer).
get_exported_types_from_core(Core) ->
Attrs = cerl:module_attrs(Core),
@@ -456,11 +459,11 @@ get_exports_from_core(Core) ->
M = cerl:atom_val(cerl:module_name(Tree)),
[{M, F, A} || {F, A} <- Exports2].
-store_code_and_build_callgraph(Mod, Core, Callgraph, NoWarn, CServer) ->
+store_code_and_build_callgraph(Mod, Core, Callgraph, CServer) ->
CoreTree = cerl:from_records(Core),
{Vertices, Edges} = dialyzer_callgraph:scan_core_tree(CoreTree, Callgraph),
CServer = dialyzer_codeserver:insert(Mod, CoreTree, CServer),
- {ok, Vertices, Edges, NoWarn, Mod}.
+ {ok, Vertices, Edges, Mod}.
%%--------------------------------------------------------------------
%% Utilities
@@ -548,10 +551,19 @@ send_warnings(Parent, Warnings) ->
Parent ! {self(), warnings, Warnings},
ok.
-filter_warnings(LegalWarnings, Warnings) ->
- [TIW || {Tag, _Id, _Warning} = TIW <- Warnings,
- ordsets:is_element(Tag, LegalWarnings)].
+filter_warnings(Warnings, Codeserver) ->
+ [TWW || {Tag, WarningInfo, _Warning} = TWW <- Warnings,
+ is_ok_fun(WarningInfo, Codeserver),
+ is_ok_tag(Tag, WarningInfo, Codeserver)].
+
+is_ok_fun({_F, _L, Module}, _Codeserver) when is_atom(Module) ->
+ true;
+is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_fun(MFA, Codeserver).
+is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) ->
+ not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver).
+
send_analysis_done(Parent, Plt, DocPlt) ->
Parent ! {self(), done, Plt, DocPlt},
ok.
@@ -573,7 +585,9 @@ send_codeserver_plt(Parent, CServer, Plt ) ->
ok.
send_bad_calls(Parent, BadCalls, CodeServer) ->
- send_warnings(Parent, format_bad_calls(BadCalls, CodeServer, [])).
+ FormatedBadCalls = format_bad_calls(BadCalls, CodeServer, []),
+ Warnings = filter_warnings(FormatedBadCalls, CodeServer),
+ send_warnings(Parent, Warnings).
send_mod_deps(Parent, ModuleDeps) ->
Parent ! {self(), mod_deps, ModuleDeps},
@@ -585,8 +599,9 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- FileLine = find_call_file_and_line(FunCode, To),
- NewAcc = [{?WARN_CALLGRAPH, FileLine, Msg}|Acc],
+ {File, Line} = find_call_file_and_line(FunCode, To),
+ WarningInfo = {File, Line, FromMFA},
+ NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index bbedd3201e..19b63bd2c8 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -52,7 +52,7 @@
-spec check_callbacks(module(), [{cerl:cerl(), cerl:cerl()}], rectab(),
dialyzer_plt:plt(),
- dialyzer_codeserver:codeserver()) -> [dial_warning()].
+ dialyzer_codeserver:codeserver()) -> [raw_warning()].
check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
{Behaviours, BehLines} = get_behaviours(Attrs),
@@ -65,7 +65,7 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
State = #state{plt = Plt, filename = File, behlines = BehLines,
codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
- [add_tag_file_line(Module, W, State) || W <- Warnings]
+ [add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
%%--------------------------------------------------------------------
@@ -193,7 +193,7 @@ find_mismatching_args(Kind, [Type|Rest], [CbType|CbRest], Behaviour,
Arity, Records, N+1, NewAcc)
end.
-add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
+add_tag_warning_info(Module, {Tag, [B|_R]} = Warn, State)
when Tag =:= callback_missing;
Tag =:= callback_info_missing ->
{B, Line} = lists:keyfind(B, 1, State#state.behlines),
@@ -202,18 +202,18 @@ add_tag_file_line(_Module, {Tag, [B|_R]} = Warn, State)
callback_missing -> ?WARN_BEHAVIOUR;
callback_info_missing -> ?WARN_UNDEFINED_CALLBACK
end,
- {Category, {State#state.filename, Line}, Warn};
-add_tag_file_line(_Module, {Tag, [File, Line|R]}, _State)
+ {Category, {State#state.filename, Line, Module}, Warn};
+add_tag_warning_info(Module, {Tag, [File, Line|R]}, _State)
when Tag =:= callback_spec_type_mismatch;
Tag =:= callback_spec_arg_type_mismatch ->
- {?WARN_BEHAVIOUR, {File, Line}, {Tag, R}};
-add_tag_file_line(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
+ {?WARN_BEHAVIOUR, {File, Line, Module}, {Tag, R}};
+add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
{_A, FunCode} =
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- FileLine = {get_file(Anns), get_line(Anns)},
- {?WARN_BEHAVIOUR, FileLine, Warn}.
+ WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ {?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 3e7d9dfa99..55fcd15641 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -2,7 +2,7 @@
%%-------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -48,7 +48,7 @@
plt_info = none :: 'none' | dialyzer_plt:plt_info(),
report_mode = normal :: rep_mode(),
return_status= ?RET_NOTHING_SUSPICIOUS :: dial_ret(),
- stored_warnings = [] :: [dial_warning()],
+ stored_warnings = [] :: [raw_warning()],
unknown_behaviours = [] :: [dialyzer_behaviours:behaviour()]
}).
@@ -469,7 +469,7 @@ expand_dependent_modules(Md5, DiffMd5, ModDeps) ->
Mod = list_to_atom(filename:basename(File, ".beam")),
sets:is_element(Mod, AnalyzeMods)
end,
- {[F || {F, _} <- Md5, FilterFun(F)], RemovedMods, NewModDeps}.
+ {[F || {F, _} <- Md5, FilterFun(F)], BigSet, NewModDeps}.
expand_dependent_modules_1([Mod|Mods], Included, ModDeps) ->
case dict:find(Mod, ModDeps) of
@@ -512,32 +512,82 @@ hipe_compile(Files, #options{erlang_mode = ErlangMode} = Options) ->
dialyzer_worker],
report_native_comp(Options),
{T1, _} = statistics(wall_clock),
- native_compile(Mods),
+ Cache = (get(dialyzer_options_native_cache) =/= false),
+ native_compile(Mods, Cache),
{T2, _} = statistics(wall_clock),
report_elapsed_time(T1, T2, Options)
end
end.
-native_compile(Mods) ->
+native_compile(Mods, Cache) ->
case dialyzer_utils:parallelism() > ?MIN_PARALLELISM of
true ->
Parent = self(),
- Pids = [spawn(fun () -> Parent ! {self(), hc(M)} end) || M <- Mods],
+ Pids = [spawn(fun () -> Parent ! {self(), hc(M, Cache)} end) || M <- Mods],
lists:foreach(fun (Pid) -> receive {Pid, Res} -> Res end end, Pids);
false ->
- lists:foreach(fun (Mod) -> hc(Mod) end, Mods)
+ lists:foreach(fun (Mod) -> hc(Mod, Cache) end, Mods)
end.
-hc(Mod) ->
+hc(Mod, Cache) ->
{module, Mod} = code:ensure_loaded(Mod),
case code:is_module_native(Mod) of
true -> ok;
false ->
%% io:format(" ~w", [Mod]),
- {ok, Mod} = hipe:c(Mod),
- ok
+ case Cache of
+ false ->
+ {ok, Mod} = hipe:c(Mod),
+ ok;
+ true ->
+ hc_cache(Mod)
+ end
end.
+hc_cache(Mod) ->
+ CacheBase = cache_base_dir(),
+ %% Use HiPE architecture and version in directory name, to avoid
+ %% clashes between incompatible binaries.
+ HipeArchVersion =
+ lists:concat(
+ [erlang:system_info(hipe_architecture), "-",
+ hipe:version(), "-",
+ hipe_bifs:system_crc()]),
+ CacheDir = filename:join(CacheBase, HipeArchVersion),
+ OrigBeamFile = code:which(Mod),
+ {ok, {Mod, <<Checksum:128>>}} = beam_lib:md5(OrigBeamFile),
+ CachedBeamFile = filename:join(CacheDir, lists:concat([Mod, "-", Checksum, ".beam"])),
+ ok = filelib:ensure_dir(CachedBeamFile),
+ ModBin =
+ case filelib:is_file(CachedBeamFile) of
+ true ->
+ {ok, BinFromFile} = file:read_file(CachedBeamFile),
+ BinFromFile;
+ false ->
+ {ok, Mod, CompiledBin} = compile:file(OrigBeamFile, [from_beam, native, binary]),
+ ok = file:write_file(CachedBeamFile, CompiledBin),
+ CompiledBin
+ end,
+ code:unstick_dir(filename:dirname(OrigBeamFile)),
+ {module, Mod} = code:load_binary(Mod, CachedBeamFile, ModBin),
+ true = code:is_module_native(Mod),
+ ok.
+
+cache_base_dir() ->
+ %% http://standards.freedesktop.org/basedir-spec/basedir-spec-0.7.html
+ %% If XDG_CACHE_HOME is set to an absolute path, use it as base.
+ XdgCacheHome = os:getenv("XDG_CACHE_HOME"),
+ CacheHome =
+ case is_list(XdgCacheHome) andalso filename:pathtype(XdgCacheHome) =:= absolute of
+ true ->
+ XdgCacheHome;
+ false ->
+ %% Otherwise, the default is $HOME/.cache.
+ {ok, [[Home]]} = init:get_argument(home),
+ filename:join(Home, ".cache")
+ end,
+ filename:join([CacheHome, "dialyzer_hipe_cache"]).
+
new_state() ->
#cl_state{}.
@@ -627,7 +677,7 @@ format_log_cache(LogCache) ->
Str = lists:append(lists:reverse(LogCache)),
string:join(string:tokens(Str, "\n"), "\n ").
--spec store_warnings(#cl_state{}, [dial_warning()]) -> #cl_state{}.
+-spec store_warnings(#cl_state{}, [raw_warning()]) -> #cl_state{}.
store_warnings(#cl_state{stored_warnings = StoredWarnings} = St, Warnings) ->
St#cl_state{stored_warnings = StoredWarnings ++ Warnings}.
@@ -656,15 +706,15 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
mod_deps = ModDeps,
output_plt = OutputPlt,
plt_info = PltInfo,
- stored_warnings = StoredWarnings,
- legal_warnings = LegalWarnings},
+ stored_warnings = StoredWarnings},
Plt) ->
case OutputPlt =:= none of
true -> ok;
false -> dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
end,
+ UnknownWarnings = unknown_warnings(State),
RetValue =
- case StoredWarnings =:= [] of
+ case StoredWarnings =:= [] andalso UnknownWarnings =:= [] of
true -> ?RET_NOTHING_SUSPICIOUS;
false -> ?RET_DISCREPANCIES
end,
@@ -677,33 +727,37 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
maybe_close_output_file(State),
{RetValue, []};
true ->
- Unknown =
- case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
- true ->
- unknown_functions(State) ++
- unknown_types(State) ++
- unknown_behaviours(State);
- false -> []
- end,
- UnknownWarnings =
- [{?WARN_UNKNOWN, {_Filename = "", _Line = 0}, W} || W <- Unknown],
AllWarnings =
UnknownWarnings ++ process_warnings(StoredWarnings),
- {RetValue, AllWarnings}
+ {RetValue, set_warning_id(AllWarnings)}
end.
+unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) ->
+ Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
+ true ->
+ unknown_functions(State) ++
+ unknown_types(State) ++
+ unknown_behaviours(State);
+ false -> []
+ end,
+ WarningInfo = {_Filename = "", _Line = 0, _MorMFA = ''},
+ [{?WARN_UNKNOWN, WarningInfo, W} || W <- Unknown].
+
unknown_functions(#cl_state{external_calls = Calls}) ->
[{unknown_function, MFA} || MFA <- Calls].
+set_warning_id(Warnings) ->
+ lists:map(fun({Tag, {File, Line, _MorMFA}, Msg}) ->
+ {Tag, {File, Line}, Msg}
+ end, Warnings).
+
print_ext_calls(#cl_state{report_mode = quiet}) ->
ok;
print_ext_calls(#cl_state{output = Output,
external_calls = Calls,
stored_warnings = Warnings,
- output_format = Format,
- legal_warnings = LegalWarnings}) ->
- case not ordsets:is_element(?WARN_UNKNOWN, LegalWarnings)
- orelse Calls =:= [] of
+ output_format = Format}) ->
+ case Calls =:= [] of
true -> ok;
false ->
case Warnings =:= [] of
@@ -735,10 +789,8 @@ print_ext_types(#cl_state{output = Output,
external_calls = Calls,
external_types = Types,
stored_warnings = Warnings,
- output_format = Format,
- legal_warnings = LegalWarnings}) ->
- case not ordsets:is_element(?WARN_UNKNOWN, LegalWarnings)
- orelse Types =:= [] of
+ output_format = Format}) ->
+ case Types =:= [] of
true -> ok;
false ->
case Warnings =:= [] andalso Calls =:= [] of
@@ -817,15 +869,16 @@ print_warnings(#cl_state{output = Output,
formatted ->
[dialyzer:format_warning(W, FOpt) || W <- PrWarnings];
raw ->
- [io_lib:format("~p. \n", [W]) || W <- PrWarnings]
+ [io_lib:format("~p. \n",
+ [W]) || W <- set_warning_id(PrWarnings)]
end,
io:format(Output, "\n~s", [S])
end.
--spec process_warnings([dial_warning()]) -> [dial_warning()].
+-spec process_warnings([raw_warning()]) -> [raw_warning()].
process_warnings(Warnings) ->
- Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line
+ Warnings1 = lists:keysort(2, Warnings), %% Sort on file/line (and m/mfa..)
remove_duplicate_warnings(Warnings1, []).
remove_duplicate_warnings([Duplicate, Duplicate|Left], Acc) ->
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index 04ce0e8bc3..fae88ed6e8 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -75,6 +75,9 @@ cl(["-nn"|T]) ->
cl(["--no_native"|T]) ->
put(dialyzer_options_native, false),
cl(T);
+cl(["--no_native_cache"|T]) ->
+ put(dialyzer_options_native_cache, false),
+ cl(T);
cl(["--plt_info"|T]) ->
put(dialyzer_options_analysis_type, plt_info),
cl(T);
@@ -363,7 +366,7 @@ help_message() ->
[--build_plt] [--add_to_plt] [--remove_from_plt]
[--check_plt] [--no_check_plt] [--plt_info] [--get_warnings]
[--dump_callgraph file] [--no_native] [--fullpath]
- [--statistics]
+ [--statistics] [--no_native_cache]
Options:
files_or_dirs (for backwards compatibility also as: -c files_or_dirs)
Use Dialyzer from the command line to detect defects in the
@@ -468,6 +471,11 @@ Options:
Bypass the native code compilation of some key files that Dialyzer
heuristically performs when dialyzing many files; this avoids the
compilation time but it may result in (much) longer analysis time.
+ --no_native_cache
+ By default, Dialyzer caches the results of native compilation in the
+ $XDG_CACHE_HOME/erlang/dialyzer_hipe_cache directory.
+ XDG_CACHE_HOME defaults to $HOME/.cache. Use this option to disable
+ caching.
--fullpath
Display the full path names of files for which warnings are emitted.
--gui
@@ -509,6 +517,8 @@ warning_options_msg() ->
-Wno_behaviours
Suppress warnings about behaviour callbacks which drift from the published
recommended interfaces.
+ -Wno_missing_calls
+ Suppress warnings about calls to missing functions.
-Wno_undefined_callbacks
Suppress warnings about behaviours that have no -callback attributes for
their callbacks.
@@ -522,6 +532,13 @@ warning_options_msg() ->
-Wunderspecs ***
Warn about underspecified functions
(those whose -spec is strictly more allowing than the success typing).
+ -Wunknown ***
+ Let warnings about unknown functions and types affect the
+ exit status of the command line version. The default is to ignore
+ warnings about unknown functions and types when setting the exit
+ status. When using the Dialyzer from Erlang, warnings about unknown
+ functions and types are returned; the default is not to return
+ such warnings.
The following options are also available but their use is not recommended:
(they are mostly for Dialyzer developers and internal debugging)
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index 593e71f30b..e0add00061 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -43,19 +43,21 @@
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
+ insert_fun_meta_info/2,
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
+ lookup_meta_info/2,
new/0,
set_next_core_label/2,
set_temp_records/2,
store_temp_records/3,
store_temp_contracts/4]).
--export_type([codeserver/0]).
+-export_type([codeserver/0, fun_meta_info/0]).
-include("dialyzer.hrl").
@@ -70,12 +72,19 @@
-type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
-type mod_contracts() :: dict:dict(module(), contracts()).
+%% A property-list of data compiled from -compile and -dialyzer attributes.
+-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
+ 'mod' | 'func'}].
+-type fun_meta_info() :: [{mfa(), meta_info()}
+ | {module(), [dial_warn_tag()]}].
+
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
exported_types :: set_ets(), % set(mfa())
records :: dict_ets(),
contracts :: dict_ets(),
callbacks :: dict_ets(),
+ fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
temp_records :: 'clean' | dict_ets(),
@@ -129,14 +138,17 @@ new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
TempOptions = [public, {write_concurrency, true}],
- [Exports, TempExportedTypes, TempRecords, TempContracts, TempCallbacks] =
+ [Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
+ TempCallbacks] =
[ets:new(Name, TempOptions) ||
Name <-
- [dialyzer_codeserver_exports, dialyzer_codeserver_temp_exported_types,
+ [dialyzer_codeserver_exports, dialyzer_codeserver_fun_meta_info,
+ dialyzer_codeserver_temp_exported_types,
dialyzer_codeserver_temp_records, dialyzer_codeserver_temp_contracts,
dialyzer_codeserver_temp_callbacks]],
#codeserver{code = Code,
exports = Exports,
+ fun_meta_info = FunMetaInfo,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -184,6 +196,12 @@ insert_exports(List, #codeserver{exports = Exports} = CS) ->
true = ets_set_insert_list(List, Exports),
CS.
+-spec insert_fun_meta_info(fun_meta_info(), codeserver()) -> codeserver().
+
+insert_fun_meta_info(List, #codeserver{fun_meta_info = FunMetaInfo} = CS) ->
+ true = ets:insert(FunMetaInfo, List),
+ CS.
+
-spec is_exported(mfa(), codeserver()) -> boolean().
is_exported(MFA, #codeserver{exports = Exports}) ->
@@ -290,6 +308,14 @@ get_file_contract(Key, ContDict) ->
lookup_mfa_contract(MFA, #codeserver{contracts = ContDict}) ->
ets_dict_find(MFA, ContDict).
+-spec lookup_meta_info(module() | mfa(), codeserver()) -> meta_info().
+
+lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
+ case ets_dict_find(MorMFA, FunMetaInfo) of
+ error -> [];
+ {ok, PropList} -> PropList
+ end.
+
-spec get_contracts(codeserver()) -> mod_contracts().
get_contracts(#codeserver{contracts = ContDict}) ->
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index ee147ca102..4a1ba9c539 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -351,7 +351,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
- [dial_warning()].
+ [raw_warning()].
contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
@@ -362,8 +362,9 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {FileLine, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
- {?WARN_CONTRACT_SYNTAX, FileLine, {spec_missing_fun, [M, F, A]}}.
+ {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ WarningInfo = {File, Line, MFA},
+ {?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
%% This treats the "when" constraints. It will be extended, we hope.
insert_constraints([{subtype, Type1, Type2}|Left], Dict) ->
@@ -393,21 +394,22 @@ insert_constraints([], Dict) -> Dict.
store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) ->
%% io:format("contract from form: ~p\n", [TypeSpec]),
- TmpContract = contract_from_form(TypeSpec, RecordsDict, FileLine),
+ {Module, _, _} = MFA,
+ TmpContract = contract_from_form(TypeSpec, Module, RecordsDict, FileLine),
%% io:format("contract: ~p\n", [TmpContract]),
dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict).
-contract_from_form(Forms, RecDict, FileLine) ->
- {CFuns, Forms1} = contract_from_form(Forms, RecDict, FileLine, [], []),
+contract_from_form(Forms, Module, RecDict, FileLine) ->
+ {CFuns, Forms1} = contract_from_form(Forms, Module, RecDict, FileLine, [], []),
#tmp_contract{contract_funs = CFuns, forms = Forms1}.
-contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
+contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], Module, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
fun(ExpTypes, AllRecords) ->
- Type =
+ NewType =
try
- erl_types:t_from_form(Form, RecDict)
+ erl_types:t_from_form(Form, ExpTypes, Module, AllRecords)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
@@ -415,61 +417,60 @@ contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], RecDict,
Line, Msg]),
throw({error, NewMsg})
end,
- NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{NewTypeNoVars, []}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, []} | FormAcc],
- contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+ contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
- RecDict, FileLine, TypeAcc, FormAcc) ->
+ Module, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
fun(ExpTypes, AllRecords) ->
{Constr1, VarDict} =
- process_constraints(Constr, RecDict, ExpTypes, AllRecords),
- Type = erl_types:t_from_form(Form, RecDict, VarDict),
- NewType = erl_types:t_solve_remote(Type, ExpTypes, AllRecords),
+ process_constraints(Constr, Module, RecDict, ExpTypes, AllRecords),
+ NewType = erl_types:t_from_form(Form, ExpTypes, Module, AllRecords,
+ VarDict),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{NewTypeNoVars, Constr1}
end,
NewTypeAcc = [TypeFun | TypeAcc],
NewFormAcc = [{Form, Constr} | FormAcc],
- contract_from_form(Left, RecDict, FileLine, NewTypeAcc, NewFormAcc);
-contract_from_form([], _RecDict, _FileLine, TypeAcc, FormAcc) ->
+ contract_from_form(Left, Module, RecDict, FileLine, NewTypeAcc, NewFormAcc);
+contract_from_form([], _Module, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, RecDict, ExpTypes, AllRecords) ->
- Init0 = initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords),
+process_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
+ Init0 = initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, RecDict, ExpTypes, AllRecords).
+ constraints_fixpoint(Init, Module, RecDict, ExpTypes, AllRecords).
-initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords) ->
- initialize_constraints(Constrs, RecDict, ExpTypes, AllRecords, []).
+initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
+ initialize_constraints(Constrs, Module, RecDict, ExpTypes, AllRecords, []).
-initialize_constraints([], _RecDict, _ExpTypes, _AllRecords, Acc) ->
+initialize_constraints([], _Module, _RecDict, _ExpTypes, _AllRecords, Acc) ->
Acc;
-initialize_constraints([Constr|Rest], RecDict, ExpTypes, AllRecords, Acc) ->
+initialize_constraints([Constr|Rest], Module, RecDict, ExpTypes, AllRecords, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
- T1 = final_form(Type1, RecDict, ExpTypes, AllRecords, dict:new()),
+ T1 = final_form(Type1, Module, ExpTypes, AllRecords, dict:new()),
Entry = {T1, Type2},
- initialize_constraints(Rest, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
+ initialize_constraints(Rest, Module, RecDict, ExpTypes, AllRecords, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
throw({error,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, RecDict, ExpTypes, AllRecords) ->
+constraints_fixpoint(Constrs, Module, RecDict, ExpTypes, AllRecords) ->
VarDict =
- constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, dict:new()),
- constraints_fixpoint(VarDict, Constrs, RecDict, ExpTypes, AllRecords).
+ constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, dict:new()),
+ constraints_fixpoint(VarDict, Module, Constrs, RecDict, ExpTypes, AllRecords).
-constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) ->
+constraints_fixpoint(OldVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords) ->
NewVarDict =
- constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, OldVarDict),
+ constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, OldVarDict),
case NewVarDict of
OldVarDict ->
DictFold =
@@ -479,25 +480,24 @@ constraints_fixpoint(OldVarDict, Constrs, RecDict, ExpTypes, AllRecords) ->
FinalConstrs = dict:fold(DictFold, [], NewVarDict),
{FinalConstrs, NewVarDict};
_Other ->
- constraints_fixpoint(NewVarDict, Constrs, RecDict, ExpTypes, AllRecords)
+ constraints_fixpoint(NewVarDict, Module, Constrs, RecDict, ExpTypes, AllRecords)
end.
-final_form(Form, RecDict, ExpTypes, AllRecords, VarDict) ->
- T1 = erl_types:t_from_form(Form, RecDict, VarDict),
- erl_types:t_solve_remote(T1, ExpTypes, AllRecords).
+final_form(Form, Module, ExpTypes, AllRecords, VarDict) ->
+ erl_types:t_from_form(Form, ExpTypes, Module, AllRecords, VarDict).
-constraints_to_dict(Constrs, RecDict, ExpTypes, AllRecords, VarDict) ->
+constraints_to_dict(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict) ->
Subtypes =
- constraints_to_subs(Constrs, RecDict, ExpTypes, AllRecords, VarDict, []),
+ constraints_to_subs(Constrs, Module, RecDict, ExpTypes, AllRecords, VarDict, []),
insert_constraints(Subtypes, dict:new()).
-constraints_to_subs([], _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
+constraints_to_subs([], _Module, _RecDict, _ExpTypes, _AllRecords, _VarDict, Acc) ->
Acc;
-constraints_to_subs([C|Rest], RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
+constraints_to_subs([C|Rest], Module, RecDict, ExpTypes, AllRecords, VarDict, Acc) ->
{T1, Form2} = C,
- T2 = final_form(Form2, RecDict, ExpTypes, AllRecords, VarDict),
+ T2 = final_form(Form2, Module, ExpTypes, AllRecords, VarDict),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
+ constraints_to_subs(Rest, Module, RecDict, ExpTypes, AllRecords, VarDict, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
%% the constraints.
@@ -585,7 +585,7 @@ general_domain([], AccSig) ->
-spec get_invalid_contract_warnings([module()],
dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(),
- opaques_fun()) -> [dial_warning()].
+ opaques_fun()) -> [raw_warning()].
get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
get_invalid_contract_warnings_modules(Modules, CodeServer, Plt, FindOpaques, []).
@@ -609,12 +609,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
Sig = erl_types:t_fun(Args, Ret),
{M, _F, _A} = MFA,
Opaques = FindOpaques(M),
+ {File, Line} = FileLine,
+ WarningInfo = {File, Line, MFA},
NewAcc =
case check_contract(Contract, Sig, Opaques) of
{error, invalid_contract} ->
- [invalid_contract_warning(MFA, FileLine, Sig, RecDict)|Acc];
+ [invalid_contract_warning(MFA, WarningInfo, Sig, RecDict)|Acc];
{error, {overlapping_contract, []}} ->
- [overlapping_contract_warning(MFA, FileLine)|Acc];
+ [overlapping_contract_warning(MFA, WarningInfo)|Acc];
{error, {extra_range, ExtraRanges, STRange}} ->
Warn =
case t_from_forms_without_remote(Contract#contract.forms,
@@ -627,12 +629,12 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
end,
case Warn of
true ->
- [extra_range_warning(MFA, FileLine, ExtraRanges, STRange)|Acc];
+ [extra_range_warning(MFA, WarningInfo, ExtraRanges, STRange)|Acc];
false ->
Acc
end;
{error, Msg} ->
- [{?WARN_CONTRACT_SYNTAX, FileLine, Msg}|Acc];
+ [{?WARN_CONTRACT_SYNTAX, WarningInfo, Msg}|Acc];
ok ->
{M, F, A} = MFA,
CSig0 = get_contract_signature(Contract),
@@ -646,14 +648,14 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
BifSig = erl_types:t_fun(BifArgs, BifRet),
case check_contract(Contract, BifSig, Opaques) of
{error, _} ->
- [invalid_contract_warning(MFA, FileLine, BifSig, RecDict)
+ [invalid_contract_warning(MFA, WarningInfo, BifSig, RecDict)
|Acc];
ok ->
- picky_contract_check(CSig, BifSig, MFA, FileLine,
+ picky_contract_check(CSig, BifSig, MFA, WarningInfo,
Contract, RecDict, Acc)
end;
false ->
- picky_contract_check(CSig, Sig, MFA, FileLine, Contract,
+ picky_contract_check(CSig, Sig, MFA, WarningInfo, Contract,
RecDict, Acc)
end
end,
@@ -662,20 +664,20 @@ get_invalid_contract_warnings_funs([{MFA, {FileLine, Contract, _Xtra}}|Left],
get_invalid_contract_warnings_funs([], _Plt, _RecDict, _FindOpaques, Acc) ->
Acc.
-invalid_contract_warning({M, F, A}, FileLine, SuccType, RecDict) ->
+invalid_contract_warning({M, F, A}, WarningInfo, SuccType, RecDict) ->
SuccTypeStr = dialyzer_utils:format_sig(SuccType, RecDict),
- {?WARN_CONTRACT_TYPES, FileLine, {invalid_contract, [M, F, A, SuccTypeStr]}}.
+ {?WARN_CONTRACT_TYPES, WarningInfo, {invalid_contract, [M, F, A, SuccTypeStr]}}.
-overlapping_contract_warning({M, F, A}, FileLine) ->
- {?WARN_CONTRACT_TYPES, FileLine, {overlapping_contract, [M, F, A]}}.
+overlapping_contract_warning({M, F, A}, WarningInfo) ->
+ {?WARN_CONTRACT_TYPES, WarningInfo, {overlapping_contract, [M, F, A]}}.
-extra_range_warning({M, F, A}, FileLine, ExtraRanges, STRange) ->
+extra_range_warning({M, F, A}, WarningInfo, ExtraRanges, STRange) ->
ERangesStr = erl_types:t_to_string(ExtraRanges),
STRangeStr = erl_types:t_to_string(STRange),
- {?WARN_CONTRACT_SUPERTYPE, FileLine,
+ {?WARN_CONTRACT_SUPERTYPE, WarningInfo,
{extra_range, [M, F, A, ERangesStr, STRangeStr]}}.
-picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
+picky_contract_check(CSig0, Sig0, MFA, WarningInfo, Contract, RecDict, Acc) ->
CSig = erl_types:t_abstract_records(CSig0, RecDict),
Sig = erl_types:t_abstract_records(Sig0, RecDict),
case erl_types:t_is_equal(CSig, Sig) of
@@ -685,7 +687,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
erl_types:t_is_unit(erl_types:t_fun_range(CSig))) of
true -> Acc;
false ->
- case extra_contract_warning(MFA, FileLine, Contract,
+ case extra_contract_warning(MFA, WarningInfo, Contract,
CSig0, Sig0, RecDict) of
no_warning -> Acc;
{warning, Warning} -> [Warning|Acc]
@@ -693,7 +695,7 @@ picky_contract_check(CSig0, Sig0, MFA, FileLine, Contract, RecDict, Acc) ->
end
end.
-extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
+extra_contract_warning({M, F, A}, WarningInfo, Contract, CSig, Sig, RecDict) ->
%% We do not want to depend upon erl_types:t_to_string() possibly
%% hiding the contents of opaque types.
SigUnopaque = erl_types:t_unopaque(Sig),
@@ -724,7 +726,7 @@ extra_contract_warning({M, F, A}, FileLine, Contract, CSig, Sig, RecDict) ->
{?WARN_CONTRACT_NOT_EQUAL,
{contract_diff, [M, F, A, ContractString, SigString]}}
end,
- {warning, {Tag, FileLine, Msg}}
+ {warning, {Tag, WarningInfo, Msg}}
end.
is_remote_types_related(Contract, CSig, Sig, RecDict) ->
@@ -751,8 +753,7 @@ is_remote_types_related(Contract, CSig, Sig, RecDict) ->
end.
t_from_forms_without_remote([{FType, []}], RecDict) ->
- Type0 = erl_types:t_from_form(FType, RecDict),
- Type1 = erl_types:subst_all_remote(Type0, erl_types:t_none()),
+ Type1 = erl_types:t_from_form_without_remote(FType, RecDict),
{ok, erl_types:subst_all_vars_to_any(Type1)};
t_from_forms_without_remote([{_FType, _Constrs}], _RecDict) ->
%% 'When' constraints
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index 46467a1303..48d1ab9ebc 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,14 +28,15 @@
-module(dialyzer_dataflow).
--export([get_fun_types/4, get_warnings/5, format_args/3]).
+-export([get_fun_types/5, get_warnings/5, format_args/3]).
%% Data structure interfaces.
-export([state__add_warning/2, state__cleanup/1,
state__duplicate/1, dispose_state/1,
state__get_callgraph/1, state__get_races/1,
state__get_records/1, state__put_callgraph/2,
- state__put_races/2, state__records_only/1]).
+ state__put_races/2, state__records_only/1,
+ state__find_function/2]).
-export_type([state/0]).
@@ -89,6 +90,8 @@
-type type() :: erl_types:erl_type().
-type types() :: erl_types:type_table().
+-type curr_fun() :: 'undefined' | 'top' | mfa_or_funlbl().
+
-define(no_arg, no_arg).
-define(TYPE_LIMIT, 3).
@@ -96,17 +99,20 @@
-define(BITS, 128).
-record(state, {callgraph :: dialyzer_callgraph:callgraph(),
+ codeserver :: dialyzer_codeserver:codeserver(),
envs :: env_tab(),
fun_tab :: fun_tab(),
+ fun_homes :: dict:dict(label(), mfa()),
plt :: dialyzer_plt:plt(),
opaques :: [type()],
races = dialyzer_races:new() :: dialyzer_races:races(),
records = dict:new() :: types(),
tree_map :: dict:dict(label(), cerl:cerl()),
warning_mode = false :: boolean(),
- warnings = [] :: [dial_warning()],
+ warnings = [] :: [raw_warning()],
work :: {[_], [_], sets:set()},
- module :: module()
+ module :: module(),
+ curr_fun :: curr_fun()
}).
-record(map, {dict = dict:new() :: type_tab(),
@@ -115,7 +121,6 @@
modified_stack = [] :: [{[Key :: term()],reference()}],
ref = undefined :: reference() | undefined}).
--type nowarn() :: dialyzer_analysis_callgraph:no_warn_unused().
-type env_tab() :: dict:dict(label(), #map{}).
-type fun_entry() :: {Args :: [type()], RetType :: type()}.
-type fun_tab() :: dict:dict('top' | label(),
@@ -133,22 +138,24 @@
-type fun_types() :: dict:dict(label(), type()).
-spec get_warnings(cerl:c_module(), dialyzer_plt:plt(),
- dialyzer_callgraph:callgraph(), types(), nowarn()) ->
- {[dial_warning()], fun_types()}.
-
-get_warnings(Tree, Plt, Callgraph, Records, NoWarnUnused) ->
- State1 = analyze_module(Tree, Plt, Callgraph, Records, true),
- State2 =
- state__renew_warnings(state__get_warnings(State1, NoWarnUnused), State1),
+ dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
+ types()) ->
+ {[raw_warning()], fun_types()}.
+
+get_warnings(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State1 = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, true),
+ State2 = state__renew_warnings(state__get_warnings(State1), State1),
State3 = state__get_race_warnings(State2),
{State3#state.warnings, state__all_fun_types(State3)}.
-spec get_fun_types(cerl:c_module(), dialyzer_plt:plt(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
types()) -> fun_types().
-get_fun_types(Tree, Plt, Callgraph, Records) ->
- State = analyze_module(Tree, Plt, Callgraph, Records, false),
+get_fun_types(Tree, Plt, Callgraph, Codeserver, Records) ->
+ State = analyze_module(Tree, Plt, Callgraph, Codeserver, Records, false),
state__all_fun_types(State).
%%% ===========================================================================
@@ -157,11 +164,11 @@ get_fun_types(Tree, Plt, Callgraph, Records) ->
%%%
%%% ===========================================================================
-analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
+analyze_module(Tree, Plt, Callgraph, Codeserver, Records, GetWarnings) ->
debug_pp(Tree, false),
Module = cerl:atom_val(cerl:module_name(Tree)),
TopFun = cerl:ann_c_fun([{label, top}], [], Tree),
- State = state__new(Callgraph, TopFun, Plt, Module, Records),
+ State = state__new(Callgraph, Codeserver, TopFun, Plt, Module, Records),
State1 = state__race_analysis(not GetWarnings, State),
State2 = analyze_loop(State1),
case GetWarnings of
@@ -175,25 +182,26 @@ analyze_module(Tree, Plt, Callgraph, Records, GetWarnings) ->
analyze_loop(State) ->
case state__get_work(State) of
- none -> State;
- {Fun, NewState1} ->
+ none -> state__set_curr_fun(undefined, State);
+ {Fun, NewState0} ->
+ NewState1 = state__set_curr_fun(get_label(Fun), NewState0),
{ArgTypes, IsCalled} = state__get_args_and_status(Fun, NewState1),
case not IsCalled of
true ->
?debug("Not handling (not called) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
false ->
case state__fun_env(Fun, NewState1) of
none ->
?debug("Not handling (no env) ~w: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_product(ArgTypes))]),
analyze_loop(NewState1);
Map ->
?debug("Handling fun ~p: ~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(state__fun_type(Fun, NewState1))]),
Vars = cerl:fun_vars(Fun),
Map1 = enter_type_lists(Vars, ArgTypes, Map),
@@ -212,7 +220,7 @@ analyze_loop(State) ->
{NewState4, _Map2, BodyType} =
traverse(Body, Map1, NewState3),
?debug("Done analyzing: ~w:~s\n",
- [state__lookup_name(get_label(Fun), State),
+ [NewState1#state.curr_fun,
t_to_string(t_fun(ArgTypes, BodyType))]),
NewState5 =
case IsRaceAnalysisEnabled of
@@ -2780,9 +2788,9 @@ filter_match_fail([]) ->
%%%
%%% ===========================================================================
-state__new(Callgraph, Tree, Plt, Module, Records) ->
+state__new(Callgraph, Codeserver, Tree, Plt, Module, Records) ->
Opaques = erl_types:t_opaque_from_records(Records),
- TreeMap = build_tree_map(Tree),
+ {TreeMap, FunHomes} = build_tree_map(Tree, Callgraph),
Funs = dict:fetch_keys(TreeMap),
FunTab = init_fun_tab(Funs, dict:new(), TreeMap, Callgraph, Plt),
ExportedFuns =
@@ -2790,7 +2798,8 @@ state__new(Callgraph, Tree, Plt, Module, Records) ->
Work = init_work(ExportedFuns),
Env = lists:foldl(fun(Fun, Env) -> dict:store(Fun, map__new(), Env) end,
dict:new(), Funs),
- #state{callgraph = Callgraph, envs = Env, fun_tab = FunTab, opaques = Opaques,
+ #state{callgraph = Callgraph, codeserver = Codeserver,
+ envs = Env, fun_tab = FunTab, fun_homes = FunHomes, opaques = Opaques,
plt = Plt, races = dialyzer_races:new(), records = Records,
warning_mode = false, warnings = [], work = Work, tree_map = TreeMap,
module = Module}.
@@ -2829,7 +2838,7 @@ state__renew_race_list(RaceList, RaceListSize,
state__renew_warnings(Warnings, State) ->
State#state{warnings = Warnings}.
--spec state__add_warning(dial_warning(), state()) -> state().
+-spec state__add_warning(raw_warning(), state()) -> state().
state__add_warning(Warn, #state{warnings = Warnings} = State) ->
State#state{warnings = [Warn|Warnings]}.
@@ -2844,29 +2853,45 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- Warn = {Tag, {get_file(Ann), abs(get_line(Ann))}, Msg},
+ WarningInfo = {get_file(Ann),
+ abs(get_line(Ann)),
+ State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
State#state{warnings = [Warn|Warnings]};
false ->
case is_compiler_generated(Ann) of
- true -> State;
- false ->
- Warn = {Tag, {get_file(Ann), get_line(Ann)}, Msg},
+ true -> State;
+ false ->
+ WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ Warn = {Tag, WarningInfo, Msg},
?debug("MSG ~s\n", [dialyzer:format_warning(Warn)]),
- State#state{warnings = [Warn|Warnings]}
+ State#state{warnings = [Warn|Warnings]}
end
end.
+-spec state__set_curr_fun(curr_fun(), state()) -> state().
+
+state__set_curr_fun(undefined, State) ->
+ State#state{curr_fun = undefined};
+state__set_curr_fun(FunLbl, State) ->
+ State#state{curr_fun = find_function(FunLbl, State)}.
+
+-spec state__find_function(mfa_or_funlbl(), state()) -> mfa_or_funlbl().
+
+state__find_function(FunLbl, State) ->
+ find_function(FunLbl, State).
+
state__get_race_warnings(#state{races = Races} = State) ->
{Races1, State1} = dialyzer_races:get_race_warnings(Races, State),
State1#state{races = Races1}.
state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
- callgraph = Callgraph, plt = Plt} = State,
- NoWarnUnused) ->
+ callgraph = Callgraph, plt = Plt} = State) ->
FoldFun =
fun({top, _}, AccState) -> AccState;
({FunLbl, Fun}, AccState) ->
+ AccState1 = state__set_curr_fun(FunLbl, AccState),
{NotCalled, Ret} =
case dict:fetch(get_label(Fun), FunTab) of
{not_handled, {_Args0, Ret0}} -> {true, Ret0};
@@ -2874,17 +2899,12 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
end,
case NotCalled of
true ->
- {Warn, Msg} =
- case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
- error -> {false, {}};
- {ok, {_M, F, A} = MFA} ->
- {not sets:is_element(MFA, NoWarnUnused),
- {unused_fun, [F, A]}}
- end,
- case Warn of
- true -> state__add_warning(AccState, ?WARN_NOT_CALLED, Fun, Msg);
- false -> AccState
- end;
+ case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ error -> AccState1;
+ {ok, {_M, F, A}} ->
+ Msg = {unused_fun, [F, A]},
+ state__add_warning(AccState1, ?WARN_NOT_CALLED, Fun, Msg)
+ end;
false ->
{Name, Contract} =
case dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
@@ -2897,7 +2917,7 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
%% Check if the function has a contract that allows this.
Warn =
case Contract of
- none -> not parent_allows_this(FunLbl, State);
+ none -> not parent_allows_this(FunLbl, AccState1);
{value, C} ->
GenRet = dialyzer_contracts:get_contract_return(C),
not t_is_unit(GenRet)
@@ -2907,19 +2927,19 @@ state__get_warnings(#state{tree_map = TreeMap, fun_tab = FunTab,
case classify_returns(Fun) of
no_match ->
Msg = {no_return, [no_match|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
only_explicit ->
Msg = {no_return, [only_explicit|Name]},
- state__add_warning(AccState, ?WARN_RETURN_ONLY_EXIT,
+ state__add_warning(AccState1, ?WARN_RETURN_ONLY_EXIT,
Fun, Msg);
only_normal ->
Msg = {no_return, [only_normal|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg);
both ->
Msg = {no_return, [both|Name]},
- state__add_warning(AccState, ?WARN_RETURN_NO_RETURN,
+ state__add_warning(AccState1, ?WARN_RETURN_NO_RETURN,
Fun, Msg)
end;
false ->
@@ -2957,8 +2977,10 @@ state__lookup_name(Fun, #state{callgraph = Callgraph}) ->
state__lookup_record(Tag, Arity, #state{records = Records}) ->
case erl_types:lookup_record(Tag, Arity, Records) of
{ok, Fields} ->
- {ok, t_tuple([t_atom(Tag)|
- [FieldType || {_FieldName, FieldType} <- Fields]])};
+ RecType =
+ t_tuple([t_atom(Tag)|
+ [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
+ {ok, RecType};
error ->
error
end.
@@ -2970,17 +2992,31 @@ state__get_args_and_status(Tree, #state{fun_tab = FunTab}) ->
{ok, {ArgTypes, _}} -> {ArgTypes, true}
end.
-build_tree_map(Tree) ->
+build_tree_map(Tree, Callgraph) ->
Fun =
- fun(T, Dict) ->
+ fun(T, {Dict, Homes, FunLbls} = Acc) ->
case cerl:is_c_fun(T) of
true ->
- dict:store(get_label(T), T, Dict);
+ FunLbl = get_label(T),
+ Dict1 = dict:store(FunLbl, T, Dict),
+ case catch dialyzer_callgraph:lookup_name(FunLbl, Callgraph) of
+ {ok, MFA} ->
+ F2 =
+ fun(Lbl, Dict0) ->
+ dict:store(Lbl, MFA, Dict0)
+ end,
+ Homes1 = lists:foldl(F2, Homes, [FunLbl|FunLbls]),
+ {Dict1, Homes1, []};
+ _ ->
+ {Dict1, Homes, [FunLbl|FunLbls]}
+ end;
false ->
- Dict
+ Acc
end
end,
- cerl_trees:fold(Fun, dict:new(), Tree).
+ Dict0 = dict:new(),
+ {Dict, Homes, _} = cerl_trees:fold(Fun, {Dict0, Dict0, []}, Tree),
+ {Dict, Homes}.
init_fun_tab([top|Left], Dict, TreeMap, Callgraph, Plt) ->
NewDict = dict:store(top, {[], t_none()}, Dict),
@@ -3438,6 +3474,13 @@ parent_allows_this(FunLbl, #state{callgraph = Callgraph, plt = Plt} =State) ->
end
end.
+find_function({_, _, _} = MFA, _State) ->
+ MFA;
+find_function(top, _State) ->
+ top;
+find_function(FunLbl, #state{fun_homes = Homes}) ->
+ dict:fetch(FunLbl, Homes).
+
classify_returns(Tree) ->
case find_terminals(cerl:fun_body(Tree)) of
{false, false} -> no_match;
@@ -3476,6 +3519,7 @@ find_terminals(Tree) ->
'let' -> find_terminals(cerl:let_body(Tree));
letrec -> find_terminals(cerl:letrec_body(Tree));
literal -> {false, true};
+ map -> {false, true};
primop -> {false, false}; %% match_fail, etc. are not explicit exits.
'receive' ->
Timeout = cerl:receive_timeout(Tree),
diff --git a/lib/dialyzer/src/dialyzer_options.erl b/lib/dialyzer/src/dialyzer_options.erl
index a92b8b1958..81907f7995 100644
--- a/lib/dialyzer/src/dialyzer_options.erl
+++ b/lib/dialyzer/src/dialyzer_options.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,7 +28,7 @@
-module(dialyzer_options).
--export([build/1]).
+-export([build/1, build_warnings/2]).
-include("dialyzer.hrl").
@@ -46,13 +46,11 @@ build(Opts) ->
?WARN_CALLGRAPH,
?WARN_FAILING_CALL,
?WARN_BIN_CONSTRUCTION,
- ?WARN_CALLGRAPH,
?WARN_CONTRACT_RANGE,
?WARN_CONTRACT_TYPES,
?WARN_CONTRACT_SYNTAX,
?WARN_BEHAVIOUR,
- ?WARN_UNDEFINED_CALLBACK,
- ?WARN_UNKNOWN],
+ ?WARN_UNDEFINED_CALLBACK],
DefaultWarns1 = ordsets:from_list(DefaultWarns),
InitPlt = dialyzer_plt:get_default_plt(),
DefaultOpts = #options{},
@@ -270,7 +268,7 @@ assert_solvers([v2|Terms]) ->
assert_solvers([Term|_]) ->
bad_option("Illegal value for solver", Term).
--spec build_warnings([atom()], [dial_warning()]) -> [dial_warning()].
+-spec build_warnings([atom()], dial_warn_tags()) -> dial_warn_tags().
build_warnings([Opt|Opts], Warnings) ->
NewWarnings =
@@ -302,6 +300,8 @@ build_warnings([Opt|Opts], Warnings) ->
ordsets:add_element(?WARN_RETURN_ONLY_EXIT, Warnings);
race_conditions ->
ordsets:add_element(?WARN_RACE_CONDITION, Warnings);
+ no_missing_calls ->
+ ordsets:del_element(?WARN_CALLGRAPH, Warnings);
specdiffs ->
S = ordsets:from_list([?WARN_CONTRACT_SUBTYPE,
?WARN_CONTRACT_SUPERTYPE,
@@ -311,8 +311,8 @@ build_warnings([Opt|Opts], Warnings) ->
ordsets:add_element(?WARN_CONTRACT_SUBTYPE, Warnings);
underspecs ->
ordsets:add_element(?WARN_CONTRACT_SUPERTYPE, Warnings);
- no_unknown ->
- ordsets:del_element(?WARN_UNKNOWN, Warnings);
+ unknown ->
+ ordsets:add_element(?WARN_UNKNOWN, Warnings);
OtherAtom ->
bad_option("Unknown dialyzer warning option", OtherAtom)
end,
diff --git a/lib/dialyzer/src/dialyzer_races.erl b/lib/dialyzer/src/dialyzer_races.erl
index 2a8aba5d8f..48eb331239 100644
--- a/lib/dialyzer/src/dialyzer_races.erl
+++ b/lib/dialyzer/src/dialyzer_races.erl
@@ -85,9 +85,9 @@
-type race_tag() :: 'whereis_register' | 'whereis_unregister'
| 'ets_lookup_insert' | 'mnesia_dirty_read_write'.
-%% The following type is similar to the dial_warning() type but has a
+%% The following type is similar to the raw_warning() type but has a
%% tag which is local to this module and is not propagated to outside
--type dial_race_warning() :: {race_warn_tag(), file_line(), {atom(), [term()]}}.
+-type dial_race_warning() :: {race_warn_tag(), warning_info(), {atom(), [term()]}}.
-type race_warn_tag() :: ?WARN_WHEREIS_REGISTER | ?WARN_WHEREIS_UNREGISTER
| ?WARN_ETS_LOOKUP_INSERT | ?WARN_MNESIA_DIRTY_READ_WRITE.
@@ -312,10 +312,13 @@ race(State) ->
DepList = fixup_race_list(RaceWarnTag, VarArgs, State1),
{State2, RaceWarn} =
get_race_warn(Fun, Args, ArgTypes, DepList, State),
+ {File, Line} = FileLine,
+ CurrMFA = dialyzer_dataflow:state__find_function(CurrFun, State),
+ WarningInfo = {File, Line, CurrMFA},
race(
state__add_race_warning(
state__renew_race_tags(T, State2), RaceWarn, RaceWarnTag,
- FileLine))
+ WarningInfo))
end,
state__renew_race_tags([], RetState).
@@ -2324,7 +2327,7 @@ get_race_warnings_helper(Warnings, State) ->
[] ->
{dialyzer_dataflow:state__get_races(State), State};
[H|T] ->
- {RaceWarnTag, FileLine, {race_condition, [M, F, A, AT, S, DepList]}} = H,
+ {RaceWarnTag, WarningInfo, {race_condition, [M, F, A, AT, S, DepList]}} = H,
Reason =
case RaceWarnTag of
?WARN_WHEREIS_REGISTER ->
@@ -2347,7 +2350,7 @@ get_race_warnings_helper(Warnings, State) ->
"caused by its combination with ")
end,
W =
- {?WARN_RACE_CONDITION, FileLine,
+ {?WARN_RACE_CONDITION, WarningInfo,
{race_condition,
[M, F, dialyzer_dataflow:format_args(A, AT, S), Reason]}},
get_race_warnings_helper(T,
@@ -2377,12 +2380,12 @@ get_reason(DependencyList, Reason) ->
end
end.
-state__add_race_warning(State, RaceWarn, RaceWarnTag, FileLine) ->
+state__add_race_warning(State, RaceWarn, RaceWarnTag, WarningInfo) ->
case RaceWarn of
no_race -> State;
_Else ->
Races = dialyzer_dataflow:state__get_races(State),
- Warn = {RaceWarnTag, FileLine, RaceWarn},
+ Warn = {RaceWarnTag, WarningInfo, RaceWarn},
dialyzer_dataflow:state__put_races(add_race_warning(Warn, Races), State)
end.
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 6dc4285194..7ceb19e30a 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([analyze_callgraph/3,
analyze_callgraph/6,
- get_warnings/8
+ get_warnings/7
]).
-export([
@@ -69,10 +69,8 @@
-type scc() :: [mfa_or_funlbl()] | [module()].
-
-record(st, {callgraph :: dialyzer_callgraph:callgraph(),
codeserver :: dialyzer_codeserver:codeserver(),
- no_warn_unused :: sets:set(mfa()),
parent = none :: parent(),
timing_server :: dialyzer_timing:timing_server(),
solvers :: [solver()],
@@ -137,18 +135,17 @@ get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
-type doc_plt() :: 'undefined' | dialyzer_plt:plt().
-spec get_warnings(dialyzer_callgraph:callgraph(), dialyzer_plt:plt(),
- doc_plt(), dialyzer_codeserver:codeserver(), sets:set(mfa()),
+ doc_plt(), dialyzer_codeserver:codeserver(),
dialyzer_timing:timing_server(), [solver()], pid()) ->
- {[dial_warning()], dialyzer_plt:plt(), doc_plt()}.
+ {[raw_warning()], dialyzer_plt:plt(), doc_plt()}.
get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- NoWarnUnused, TimingServer, Solvers, Parent) ->
+ TimingServer, Solvers, Parent) ->
InitState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- NewState = InitState#st{no_warn_unused = NoWarnUnused},
- Mods = dialyzer_callgraph:modules(NewState#st.callgraph),
- MiniPlt = NewState#st.plt,
+ Mods = dialyzer_callgraph:modules(InitState#st.callgraph),
+ MiniPlt = InitState#st.plt,
FindOpaques = lookup_and_find_opaques_fun(Codeserver),
CWarns =
dialyzer_contracts:get_invalid_contract_warnings(Mods, Codeserver,
@@ -156,31 +153,30 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
MiniDocPlt = dialyzer_plt:get_mini_plt(DocPlt),
ModWarns =
?timing(TimingServer, "warning",
- get_warnings_from_modules(Mods, NewState, MiniDocPlt)),
+ get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
dialyzer_plt:restore_full_plt(MiniPlt, Plt),
dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
- no_warn_unused = NoWarnUnused, plt = Plt,
- timing_server = TimingServer} = State,
- Init = {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt},
+ plt = Plt, timing_server = TimingServer} = State,
+ Init = {Codeserver, Callgraph, Plt, DocPlt},
dialyzer_coordinator:parallel_job(warnings, Mods, Init, TimingServer).
--spec collect_warnings(module(), warnings_init_data()) -> [dial_warning()].
+-spec collect_warnings(module(), warnings_init_data()) -> [raw_warning()].
-collect_warnings(M, {Codeserver, Callgraph, NoWarnUnused, Plt, DocPlt}) ->
+collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
ModCode = dialyzer_codeserver:lookup_mod_code(M, Codeserver),
Records = dialyzer_codeserver:lookup_mod_records(M, Codeserver),
Contracts = dialyzer_codeserver:lookup_mod_contracts(M, Codeserver),
AllFuns = collect_fun_info([ModCode]),
%% Check if there are contracts for functions that do not exist
- Warnings1 =
+ Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
{Warnings2, FunTypes} =
- dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph,
- Records, NoWarnUnused),
+ dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
+ Records),
Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
@@ -197,17 +193,19 @@ postprocess_warnings(RawWarnings, Codeserver) ->
postprocess_dataflow_warns([], _Callgraph, WAcc, Acc) ->
lists:reverse(Acc, WAcc);
-postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
+postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, WarningInfo, Msg}|Rest],
Codeserver, WAcc, Acc) ->
+ {CallF, CallL, _CallMFA} = WarningInfo,
{contract_range, [Contract, M, F, A, ArgStrings, CRet]} = Msg,
case dialyzer_codeserver:lookup_mfa_contract({M,F,A}, Codeserver) of
- {ok, {{ContrF, _ContrL} = FileLine, _C, _X}} ->
+ {ok, {{ContrF, ContrL}, _C, _X}} ->
case CallF =:= ContrF of
true ->
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, FileLine, NewMsg},
+ WarningInfo2 = {ContrF, ContrL, {M, F, A}},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo2, NewMsg},
Filter =
- fun({?WARN_CONTRACT_TYPES, FL, _}) when FL =:= FileLine -> false;
+ fun({?WARN_CONTRACT_TYPES, WI, _}) when WI =:= WarningInfo2 -> false;
(_) -> true
end,
FilterWAcc = lists:filter(Filter, WAcc),
@@ -219,7 +217,7 @@ postprocess_dataflow_warns([{?WARN_CONTRACT_RANGE, {CallF, CallL}, Msg}|Rest],
%% The contract is not in a module that is currently under analysis.
%% We display the warning in the file/line of the call.
NewMsg = {contract_range, [Contract, M, F, ArgStrings, CallL, CRet]},
- W = {?WARN_CONTRACT_RANGE, {CallF, CallL}, NewMsg},
+ W = {?WARN_CONTRACT_RANGE, WarningInfo, NewMsg},
postprocess_dataflow_warns(Rest, Codeserver, WAcc, [W|Acc])
end.
@@ -262,7 +260,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
Records = dialyzer_codeserver:lookup_mod_records(M, CodeServer),
FunTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
NewFunTypes =
- dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, Records),
+ dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
Contracts = orddict:from_list(dict:to_list(Contracts1)),
FindOpaques = find_opaques_fun(Records),
diff --git a/lib/dialyzer/src/dialyzer_timing.erl b/lib/dialyzer/src/dialyzer_timing.erl
index b1a4bdc07c..759d49abc8 100644
--- a/lib/dialyzer/src/dialyzer_timing.erl
+++ b/lib/dialyzer/src/dialyzer_timing.erl
@@ -38,7 +38,7 @@ init(Active) ->
case Active of
true ->
io:format("\n"),
- spawn_link(fun() -> loop(now(), 0, "") end);
+ spawn_link(fun() -> loop(erlang:monotonic_time(), 0, "") end);
debug ->
io:format("\n"),
spawn_link(fun() -> debug_loop("") end);
@@ -105,14 +105,14 @@ debug_loop(Phase) ->
start_stamp(none, _) -> ok;
start_stamp(Pid, Msg) ->
- Pid ! {stamp, Msg, now()},
+ Pid ! {stamp, Msg, erlang:monotonic_time()},
ok.
-spec end_stamp(timing_server()) -> ok.
end_stamp(none) -> ok;
end_stamp(Pid) ->
- Pid ! {stamp, now()},
+ Pid ! {stamp, erlang:monotonic_time()},
ok.
-spec send_size_info(timing_server(), integer(), string()) -> ok.
@@ -126,8 +126,8 @@ send_size_info(Pid, Size, Unit) ->
stop(none) -> ok;
stop(Pid) ->
- Pid ! {self(), stop, now()},
+ Pid ! {self(), stop, erlang:monotonic_time()},
receive ok -> ok end.
diff(T2, T1) ->
- timer:now_diff(T2,T1) / 1000000.
+ (T2-T1) / erlang:convert_time_unit(1, seconds, native).
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 3d03ed3ab3..6c14860d7d 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1579,11 +1579,11 @@ get_bif_constr({M, F, A} = _BIF, Dst, Args, _State) ->
eval_inv_arith('+', _Pos, Dst, Arg) ->
bif_return(erlang, '-', 2, [Dst, Arg]);
eval_inv_arith('*', _Pos, Dst, Arg) ->
- case t_number_vals(Arg) of
- [0] -> t_integer();
- _ ->
+ Zero = t_from_term(0),
+ case t_is_none(t_inf(Arg, Zero)) of
+ false -> t_integer();
+ true ->
TmpRet = bif_return(erlang, 'div', 2, [Dst, Arg]),
- Zero = t_from_term(0),
%% If 0 is not part of the result, it cannot be part of the argument.
case t_is_subtype(Zero, Dst) of
false -> t_subtract(TmpRet, Zero);
@@ -3264,7 +3264,7 @@ lookup_record(Records, Tag, Arity) ->
{ok, Fields} ->
RecType =
t_tuple([t_from_term(Tag)|
- [FieldType || {_FieldName, FieldType} <- Fields]]),
+ [FieldType || {_FieldName, _Abstr, FieldType} <- Fields]]),
{ok, RecType};
error ->
error
@@ -3275,7 +3275,7 @@ is_literal_record(Tree) ->
lists:member(record, Ann).
family(L) ->
- sofs:to_external(sofs:rel2fam(sofs:relation(L))).
+ dialyzer_utils:family(L).
%% ============================================================================
%%
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index e5f5c69d45..e29fc3ba8b 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -40,12 +40,16 @@
get_core_from_src/2,
get_record_and_type_info/1,
get_spec_info/3,
+ get_fun_meta_info/3,
+ is_suppressed_fun/2,
+ is_suppressed_tag/3,
merge_records/2,
pp_hook/0,
process_record_remote_types/1,
sets_filter/2,
src_compiler_opts/0,
- parallelism/0
+ parallelism/0,
+ family/1
]).
-include("dialyzer.hrl").
@@ -59,13 +63,13 @@ print_types(RecDict) ->
print_types1([], _) ->
ok;
-print_types1([{type, _Name} = Key|T], RecDict) ->
- {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]),
+print_types1([{type, _Name, _NArgs} = Key|T], RecDict) ->
+ {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict),
+ io:format("\n~w: ~w\n", [Key, Type]),
print_types1(T, RecDict);
-print_types1([{opaque, _Name} = Key|T], RecDict) ->
- {ok, {_Mod, Form, _Args}} = dict:find(Key, RecDict),
- io:format("\n~w: ~w\n", [Key, erl_types:t_from_form(Form, RecDict)]),
+print_types1([{opaque, _Name, _NArgs} = Key|T], RecDict) ->
+ {ok, {{_Mod, _Form, _Args}, Type}} = dict:find(Key, RecDict),
+ io:format("\n~w: ~w\n", [Key, Type]),
print_types1(T, RecDict);
print_types1([{record, _Name} = Key|T], RecDict) ->
{ok, [{_Arity, _Fields} = AF]} = dict:find(Key, RecDict),
@@ -80,7 +84,9 @@ print_types1([{record, _Name} = Key|T], RecDict) ->
-type abstract_code() :: [tuple()]. %% XXX: import from somewhere
-type comp_options() :: [compile:option()].
--type mod_or_fname() :: atom() | file:filename().
+-type mod_or_fname() :: module() | file:filename().
+-type fa() :: {atom(), arity()}.
+-type codeserver() :: dialyzer_codeserver:codeserver().
%% ============================================================================
%%
@@ -215,28 +221,29 @@ get_record_and_type_info([{attribute, _, type, {{record, Name}, Fields0, []}}
get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
- try
- NewRecDict = add_new_type(Attr, Name, TypeForm, [], Module, RecDict),
- get_record_and_type_info(Left, Module, Records, NewRecDict)
+ try add_new_type(Attr, Name, TypeForm, [], Module, RecDict) of
+ NewRecDict ->
+ get_record_and_type_info(Left, Module, Records, NewRecDict)
catch
throw:{error, _} = Error -> Error
end;
get_record_and_type_info([{attribute, _, Attr, {Name, TypeForm, Args}}|Left],
Module, Records, RecDict) when Attr =:= 'type';
Attr =:= 'opaque' ->
- try
- NewRecDict = add_new_type(Attr, Name, TypeForm, Args, Module, RecDict),
- get_record_and_type_info(Left, Module, Records, NewRecDict)
+ try add_new_type(Attr, Name, TypeForm, Args, Module, RecDict) of
+ NewRecDict ->
+ get_record_and_type_info(Left, Module, Records, NewRecDict)
catch
throw:{error, _} = Error -> Error
end;
get_record_and_type_info([_Other|Left], Module, Records, RecDict) ->
get_record_and_type_info(Left, Module, Records, RecDict);
get_record_and_type_info([], _Module, Records, RecDict) ->
- case type_record_fields(lists:reverse(Records), RecDict) of
- {ok, _NewRecDict} = Ok ->
- ?debug(_NewRecDict),
- Ok;
+ case
+ check_type_of_record_fields(lists:reverse(Records), RecDict)
+ of
+ ok ->
+ {ok, RecDict};
{error, Name, Error} ->
{error, flat_format(" Error while parsing #~w{}: ~s\n", [Name, Error])}
end.
@@ -248,20 +255,21 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, RecDict) ->
Msg = flat_format("Type ~s/~w already defined\n", [Name, Arity]),
throw({error, Msg});
false ->
- ArgTypes = [erl_types:t_from_form(X) || X <- ArgForms],
- case lists:all(fun erl_types:t_is_var/1, ArgTypes) of
- true ->
- ArgNames = [erl_types:t_var_name(X) || X <- ArgTypes],
+ try erl_types:t_var_names(ArgForms) of
+ ArgNames ->
dict:store({TypeOrOpaque, Name, Arity},
- {Module, TypeForm, ArgNames}, RecDict);
- false ->
+ {{Module, TypeForm, ArgNames},
+ erl_types:t_any()}, RecDict)
+ catch
+ _:_ ->
throw({error, flat_format("Type declaration for ~w does not "
"have variables as parameters", [Name])})
end
end.
get_record_fields(Fields, RecDict) ->
- get_record_fields(Fields, RecDict, []).
+ Fs = get_record_fields(Fields, RecDict, []),
+ {ok, [{Name, Form, erl_types:t_any()} || {Name, Form} <- Fs]}.
get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
RecDict, Acc) ->
@@ -270,7 +278,7 @@ get_record_fields([{typed_record_field, OrdRecField, TypeForm}|Left],
{record_field, _Line, Name0} -> erl_parse:normalise(Name0);
{record_field, _Line, Name0, _Init} -> erl_parse:normalise(Name0)
end,
- get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]);
+ get_record_fields(Left, RecDict, [{Name, TypeForm}|Acc]);
get_record_fields([{record_field, _Line, Name}|Left], RecDict, Acc) ->
NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc],
get_record_fields(Left, RecDict, NewAcc);
@@ -278,54 +286,66 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
NewAcc = [{erl_parse:normalise(Name), {var, -1, '_'}}|Acc],
get_record_fields(Left, RecDict, NewAcc);
get_record_fields([], _RecDict, Acc) ->
- {ok, lists:reverse(Acc)}.
+ lists:reverse(Acc).
-type_record_fields([], RecDict) ->
- {ok, RecDict};
-type_record_fields([RecKey|Recs], RecDict) ->
- {ok, [{Arity, Fields}]} = dict:find(RecKey, RecDict),
+%% Just check the local types. process_record_remote_types will add
+%% the types later.
+check_type_of_record_fields([], _RecDict) ->
+ ok;
+check_type_of_record_fields([RecKey|Recs], RecDict) ->
+ {ok, [{_Arity, Fields}]} = dict:find(RecKey, RecDict),
try
- TypedFields =
- [{FieldName, erl_types:t_from_form(FieldTypeForm, RecDict)}
- || {FieldName, FieldTypeForm} <- Fields],
- RecDict1 = dict:store(RecKey, [{Arity, TypedFields}], RecDict),
- Fun = fun(OldOrdDict) ->
- orddict:store(Arity, TypedFields, OldOrdDict)
- end,
- RecDict2 = dict:update(RecKey, Fun, RecDict1),
- type_record_fields(Recs, RecDict2)
+ [erl_types:t_from_form_without_remote(FieldTypeForm, RecDict)
+ || {_FieldName, FieldTypeForm, _} <- Fields]
+ of
+ L when is_list(L) ->
+ check_type_of_record_fields(Recs, RecDict)
catch
throw:{error, Error} ->
{record, Name} = RecKey,
{error, Name, Error}
end.
--spec process_record_remote_types(dialyzer_codeserver:codeserver()) -> dialyzer_codeserver:codeserver().
+-spec process_record_remote_types(codeserver()) -> codeserver().
+%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
TempRecords = dialyzer_codeserver:get_temp_records(CServer),
TempExpTypes = dialyzer_codeserver:get_temp_exported_types(CServer),
- RecordFun =
- fun(Key, Value) ->
- case Key of
- {record, _Name} ->
- FieldFun =
- fun(_Arity, Fields) ->
- [{Name, erl_types:t_solve_remote(Field, TempExpTypes,
- TempRecords)}
- || {Name, Field} <- Fields]
- end,
- orddict:map(FieldFun, Value);
- _Other -> Value
- end
- end,
ModuleFun =
- fun(_Module, Record) ->
+ fun(Module, Record) ->
+ RecordFun =
+ fun(Key, Value) ->
+ case Key of
+ {record, _Name} ->
+ FieldFun =
+ fun(_Arity, Fields) ->
+ [{Name, Field,
+ erl_types:t_from_form(Field,
+ TempExpTypes,
+ Module,
+ TempRecords)}
+ || {Name, Field, _} <- Fields]
+ end,
+ orddict:map(FieldFun, Value);
+ {opaque, _, _} ->
+ {{_Module, Form, _ArgNames}=F, _Type} = Value,
+ Type = erl_types:t_from_form(Form, TempExpTypes, Module,
+ TempRecords),
+ {F, Type};
+ _Other -> Value
+ end
+ end,
dict:map(RecordFun, Record)
end,
- NewRecords = dict:map(ModuleFun, TempRecords),
- CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
- dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1).
+ try dict:map(ModuleFun, TempRecords) of
+ NewRecords ->
+ CServer1 = dialyzer_codeserver:finalize_records(NewRecords, CServer),
+ dialyzer_codeserver:finalize_exported_types(TempExpTypes, CServer1)
+ catch
+ throw:{error, _RecName, _Error} = Error->
+ Error
+ end.
-spec merge_records(dict:dict(), dict:dict()) -> dict:dict().
@@ -341,7 +361,7 @@ merge_records(NewRecords, OldRecords) ->
-type spec_dict() :: dict:dict().
-type callback_dict() :: dict:dict().
--spec get_spec_info(atom(), abstract_code(), dict:dict()) ->
+-spec get_spec_info(module(), abstract_code(), dict:dict()) ->
{'ok', spec_dict(), callback_dict()} | {'error', string()}.
get_spec_info(ModName, AbstractCode, RecordsDict) ->
@@ -359,23 +379,17 @@ get_optional_callbacks(Abs) ->
is_fa_list(O)],
lists:append(L).
-is_fa_list([{FuncName, Arity}|L])
- when is_atom(FuncName), is_integer(Arity), Arity >= 0 ->
- is_fa_list(L);
-is_fa_list([]) -> true;
-is_fa_list(_) -> false.
-
-
%% TypeSpec is a list of conditional contracts for a function.
%% Each contract is of the form {[Argument], Range, [Constraint]} where
%% - Argument and Range are in erl_types:erl_type() format and
%% - Constraint is of the form {subtype, T1, T2} where T1 and T2
%% are erl_types:erl_type()
-get_spec_info([{attribute, Ln, Contract, {Id, TypeSpec}}|Left],
+get_spec_info([{attribute, Attr, Contract, {Id, TypeSpec}}|Left],
SpecDict, CallbackDict, RecordsDict, ModName, OptCb, File)
when ((Contract =:= 'spec') or (Contract =:= 'callback')),
is_list(TypeSpec) ->
+ Ln = erl_anno:line(Attr),
MFA = case Id of
{_, _, _} = T -> T;
{F, A} -> {ModName, F, A}
@@ -422,6 +436,126 @@ get_spec_info([], SpecDict, CallbackDict,
_RecordsDict, _ModName, _OptCb, _File) ->
{ok, SpecDict, CallbackDict}.
+-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
+ dialyzer_codeserver:fun_meta_info().
+
+get_fun_meta_info(M, Abs, LegalWarnings) ->
+ NoWarn = get_nowarn_unused_function(M, Abs),
+ FuncSupp = get_func_suppressions(M, Abs),
+ Warnings0 = get_options(Abs, LegalWarnings),
+ Warnings = ordsets:to_list(Warnings0),
+ ModuleWarnings = [{M, W} || W <- Warnings],
+ RawProps = lists:append([NoWarn, FuncSupp, ModuleWarnings]),
+ process_options(dialyzer_utils:family(RawProps), Warnings0).
+
+process_options([{M, _}=Mod|Left], Warnings) when is_atom(M) ->
+ [Mod|process_options(Left, Warnings)];
+process_options([{{_M, _F, _A}=MFA, Opts}|Left], Warnings) ->
+ WL = case lists:member(nowarn_function, Opts) of
+ true -> [{nowarn_function, func}]; % takes precedence
+ false ->
+ Ws = dialyzer_options:build_warnings(Opts, Warnings),
+ ModOnly = [{W, mod} || W <- ordsets:subtract(Warnings, Ws)],
+ FunOnly = [{W, func} || W <- ordsets:subtract(Ws, Warnings)],
+ ordsets:union(ModOnly, FunOnly)
+ end,
+ case WL of
+ [] -> process_options(Left, Warnings);
+ _ -> [{MFA, WL}|process_options(Left, Warnings)]
+ end;
+process_options([], _Warnings) -> [].
+
+-spec get_nowarn_unused_function(module(), abstract_code()) ->
+ [{mfa(), 'no_unused'}].
+
+get_nowarn_unused_function(M, Abs) ->
+ Opts = get_options_with_tag(compile, Abs),
+ Warn = erl_lint:bool_option(warn_unused_function, nowarn_unused_function,
+ true, Opts),
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, compile),
+ TagsFaList = check_fa_list(AttrFile, nowarn_unused_function, Functions),
+ FAs = case Warn of
+ false -> Functions;
+ true ->
+ [FA || {{nowarn_unused_function,_L,_File}, FA} <- TagsFaList]
+ end,
+ [{{M, F, A}, no_unused} || {F, A} <- FAs].
+
+-spec get_func_suppressions(module(), abstract_code()) ->
+ [{mfa(), 'nowarn_function' | dial_warn_tag()}].
+
+get_func_suppressions(M, Abs) ->
+ Functions = [{F, A} || {function, _, F, A, _} <- Abs],
+ AttrFile = collect_attribute(Abs, dialyzer),
+ TagsFAs = check_fa_list(AttrFile, '*', Functions),
+ %% Check the options:
+ Fun = fun({{nowarn_function, _L, _File}, _FA}) -> ok;
+ ({OptLFile, _FA}) ->
+ _ = get_options1([OptLFile], ordsets:new())
+ end,
+ lists:foreach(Fun, TagsFAs),
+ [{{M, F, A}, W} || {{W, _L, _File}, {F, A}} <- TagsFAs].
+
+-spec get_options(abstract_code(), [dial_warn_tag()]) ->
+ ordsets:ordset(dial_warn_tag()).
+
+get_options(Abs, LegalWarnings) ->
+ AttrFile = collect_attribute(Abs, dialyzer),
+ get_options1(AttrFile, LegalWarnings).
+
+get_options1([{Args, L, File}|Left], Warnings) ->
+ Opts = [O ||
+ O <- lists:flatten([Args]),
+ is_atom(O)],
+ try dialyzer_options:build_warnings(Opts, Warnings) of
+ NewWarnings ->
+ get_options1(Left, NewWarnings)
+ catch
+ throw:{dialyzer_options_error, Msg} ->
+ Msg1 = flat_format(" ~s:~w: ~s", [File, L, Msg]),
+ throw({error, Msg1})
+ end;
+get_options1([], Warnings) ->
+ Warnings.
+
+-type collected_attribute() ::
+ {Args :: term(), erl_anno:line(), file:filename()}.
+
+collect_attribute(Abs, Tag) ->
+ collect_attribute(Abs, Tag, "nofile").
+
+collect_attribute([{attribute, L, Tag, Args}|Left], Tag, File) ->
+ CollAttr = {Args, L, File},
+ [CollAttr | collect_attribute(Left, Tag, File)];
+collect_attribute([{attribute, _, file, {IncludeFile, _}}|Left], Tag, _) ->
+ collect_attribute(Left, Tag, IncludeFile);
+collect_attribute([_Other|Left], Tag, File) ->
+ collect_attribute(Left, Tag, File);
+collect_attribute([], _Tag, _File) -> [].
+
+-spec is_suppressed_fun(mfa(), codeserver()) -> boolean().
+
+is_suppressed_fun(MFA, CodeServer) ->
+ lookup_fun_property(MFA, nowarn_function, CodeServer).
+
+-spec is_suppressed_tag(mfa() | module(), dial_warn_tag(), codeserver()) ->
+ boolean().
+
+is_suppressed_tag(MorMFA, Tag, Codeserver) ->
+ not lookup_fun_property(MorMFA, Tag, Codeserver).
+
+lookup_fun_property({M, _F, _A}=MFA, Property, CodeServer) ->
+ MFAPropList = dialyzer_codeserver:lookup_meta_info(MFA, CodeServer),
+ case proplists:get_value(Property, MFAPropList, no) of
+ mod -> false; % suppressed in function
+ func -> true; % requested in function
+ no -> lookup_fun_property(M, Property, CodeServer)
+ end;
+lookup_fun_property(M, Property, CodeServer) when is_atom(M) ->
+ MPropList = dialyzer_codeserver:lookup_meta_info(M, CodeServer),
+ proplists:is_defined(Property, MPropList).
+
%% ============================================================================
%%
%% Exported types
@@ -503,6 +637,57 @@ format_sig(Type, RecDict) ->
flat_format(Fmt, Lst) ->
lists:flatten(io_lib:format(Fmt, Lst)).
+-spec get_options_with_tag(atom(), abstract_code()) -> [term()].
+
+get_options_with_tag(Tag, Abs) ->
+ lists:flatten([O || {attribute, _, Tag0, O} <- Abs, Tag =:= Tag0]).
+
+%% Check F/A, and collect (unchecked) warning tags with line and file.
+-spec check_fa_list([collected_attribute()], atom(), [fa()]) ->
+ [{{atom(), erl_anno:line(), file:filename()},fa()}].
+
+check_fa_list(AttrFile, Tag, Functions) ->
+ FuncTab = gb_sets:from_list(Functions),
+ check_fa_list1(AttrFile, Tag, FuncTab).
+
+check_fa_list1([{Args, L, File}|Left], Tag, Funcs) ->
+ TermsL = [{{Tag0, L, File}, Term} ||
+ {Tags, Terms0} <- lists:flatten([Args]),
+ Tag0 <- lists:flatten([Tags]),
+ Tag =:= '*' orelse Tag =:= Tag0,
+ Term <- lists:flatten([Terms0])],
+ case lists:dropwhile(fun({_, T}) -> is_fa(T) end, TermsL) of
+ [] -> ok;
+ [{_, Bad}|_] ->
+ Msg1 = flat_format(" Bad function ~w in line ~s:~w",
+ [Bad, File, L]),
+ throw({error, Msg1})
+ end,
+ case lists:dropwhile(fun({_, FA}) -> is_known(FA, Funcs) end, TermsL) of
+ [] -> ok;
+ [{_, {F, A}}|_] ->
+ Msg2 = flat_format(" Unknown function ~w/~w in line ~s:~w",
+ [F, A, File, L]),
+ throw({error, Msg2})
+ end,
+ TermsL ++ check_fa_list1(Left, Tag, Funcs);
+check_fa_list1([], _Tag, _Funcs) -> [].
+
+is_known(FA, Funcs) ->
+ gb_sets:is_element(FA, Funcs).
+
+-spec is_fa_list(term()) -> boolean().
+
+is_fa_list([E|L]) -> is_fa(E) andalso is_fa_list(L);
+is_fa_list([]) -> true;
+is_fa_list(_) -> false.
+
+-spec is_fa(term()) -> boolean().
+
+is_fa({FuncName, Arity})
+ when is_atom(FuncName), is_integer(Arity), Arity >= 0 -> true;
+is_fa(_) -> false.
+
%%-------------------------------------------------------------------
%% Author : Per Gustafsson <[email protected]>
%% Description : Provides better printing of binaries.
@@ -607,3 +792,8 @@ parallelism() ->
CPUs = erlang:system_info(logical_processors_available),
Schedulers = erlang:system_info(schedulers),
min(CPUs, Schedulers).
+
+-spec family([{K,V}]) -> [{K,[V]}].
+
+family(L) ->
+ sofs:to_external(sofs:rel2fam(sofs:relation(L))).
diff --git a/lib/dialyzer/test/dialyzer_SUITE.erl b/lib/dialyzer/test/dialyzer_SUITE.erl
index 8507525597..f625d12b45 100644
--- a/lib/dialyzer/test/dialyzer_SUITE.erl
+++ b/lib/dialyzer/test/dialyzer_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2014. All Rights Reserved.
+%% Copyright Ericsson AB 2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,12 +30,12 @@
-export([init_per_testcase/2, end_per_testcase/2]).
%% Test cases must be exported.
--export([app_test/1, appup_test/1, beam_tests/1]).
+-export([app_test/1, appup_test/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [app_test, appup_test, beam_tests].
+ [app_test, appup_test].
groups() ->
[].
@@ -75,38 +75,3 @@ app_test(Config) when is_list(Config) ->
%% Test that the .appup file does not contain any `basic' errors
appup_test(Config) when is_list(Config) ->
ok = ?t:appup_test(dialyzer).
-
-beam_tests(Config) when is_list(Config) ->
- Prog = <<"
- -module(no_auto_import).
-
- %% Copied from erl_lint_SUITE.erl, clash6
-
- -export([size/1]).
-
- size([]) ->
- 0;
- size({N,_}) ->
- N;
- size([_|T]) ->
- 1+size(T).
- ">>,
- Opts = [no_auto_import],
- {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts),
- [] = run_dialyzer([BeamFile]),
- ok.
-
-compile(Config, Prog, Module, CompileOpts) ->
- Source = lists:concat([Module, ".erl"]),
- PrivDir = ?config(priv_dir,Config),
- Filename = filename:join([PrivDir, Source]),
- ok = file:write_file(Filename, Prog),
- Opts = [{outdir, PrivDir}, debug_info | CompileOpts],
- {ok, Module} = compile:file(Filename, Opts),
- {ok, filename:join([PrivDir, lists:concat([Module, ".beam"])])}.
-
-run_dialyzer(Files) ->
- dialyzer:run([{analysis_type, plt_build},
- {files, Files},
- {from, byte_code},
- {check_plt, false}]).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
index 44a65f6e90..3ff26b87db 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
@@ -1 +1 @@
-{dialyzer_options, [{warnings, [no_unused, no_return, no_unknown]}]}.
+{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
diff --git a/lib/dialyzer/test/options1_SUITE_data/dialyzer_options b/lib/dialyzer/test/options1_SUITE_data/dialyzer_options
index 65d233ac0d..c612e77d3e 100644
--- a/lib/dialyzer/test/options1_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/options1_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
-{dialyzer_options, [{include_dirs, ["my_include"]}, {defines, [{'COMPILER_VSN', 42}]}, {warnings, [no_improper_lists, no_unknown]}]}.
+{dialyzer_options, [{include_dirs, ["my_include"]}, {defines, [{'COMPILER_VSN', 42}]}, {warnings, [no_improper_lists]}]}.
{time_limit, 30}.
diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl
index aee9f449a6..ecbac14e5d 100644
--- a/lib/dialyzer/test/plt_SUITE.erl
+++ b/lib/dialyzer/test/plt_SUITE.erl
@@ -1,17 +1,18 @@
%% This suite is the only hand made and simply
-%% checks if we can build a plt.
+%% checks if we can build and update a plt.
-module(plt_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("dialyzer_test_constants.hrl").
--export([suite/0, all/0, build_plt/1]).
+-export([suite/0, all/0, build_plt/1, beam_tests/1, update_plt/1,
+ run_plt_check/1, run_succ_typings/1]).
suite() ->
[{timetrap, ?plt_timeout}].
-all() -> [build_plt].
+all() -> [build_plt, beam_tests, update_plt, run_plt_check, run_succ_typings].
build_plt(Config) ->
OutDir = ?config(priv_dir, Config),
@@ -19,3 +20,155 @@ build_plt(Config) ->
ok -> ok;
fail -> ct:fail(plt_build_fail)
end.
+
+beam_tests(Config) when is_list(Config) ->
+ Prog = <<"
+ -module(no_auto_import).
+
+ %% Copied from erl_lint_SUITE.erl, clash6
+
+ -export([size/1]).
+
+ size([]) ->
+ 0;
+ size({N,_}) ->
+ N;
+ size([_|T]) ->
+ 1+size(T).
+ ">>,
+ Opts = [no_auto_import],
+ {ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts),
+ [] = run_dialyzer(plt_build, [BeamFile], []),
+ ok.
+
+run_plt_check(Config) when is_list(Config) ->
+ Mod1 = <<"
+ -module(run_plt_check1).
+ ">>,
+
+ Mod2A = <<"
+ -module(run_plt_check2).
+ ">>,
+
+ {ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []),
+ {ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []),
+ [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []),
+
+ Mod2B = <<"
+ -module(run_plt_check2).
+
+ -export([call/1]).
+
+ call(X) -> run_plt_check1:call(X).
+ ">>,
+
+ {ok, BeamFile2} = compile(Config, Mod2B, run_plt_check2, []),
+
+ % callgraph warning as run_plt_check2:call/1 makes a call to unexported
+ % function run_plt_check1:call/1.
+ [_] = run_dialyzer(plt_check, [], []),
+
+ ok.
+
+run_succ_typings(Config) when is_list(Config) ->
+ Mod1A = <<"
+ -module(run_succ_typings1).
+
+ -export([call/0]).
+
+ call() -> a.
+ ">>,
+
+ {ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []),
+ [] = run_dialyzer(plt_build, [BeamFile1], []),
+
+ Mod1B = <<"
+ -module(run_succ_typings1).
+
+ -export([call/0]).
+
+ call() -> b.
+ ">>,
+
+ Mod2 = <<"
+ -module(run_succ_typings2).
+
+ -export([call/0]).
+
+ -spec call() -> b.
+ call() -> run_succ_typings1:call().
+ ">>,
+
+ {ok, BeamFile1} = compile(Config, Mod1B, run_succ_typings1, []),
+ {ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []),
+ % contract types warning as run_succ_typings2:call/0 makes a call to
+ % run_succ_typings1:call/0, which returns a (not b) in the PLT.
+ [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]),
+ % warning not returned as run_succ_typings1 is updated in the PLT.
+ [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]),
+
+ ok.
+
+%%% [James Fish:]
+%%% If a function is removed from a module and the module has previously
+%%% been added to a PLT, the function will not be removed from PLT when
+%%% the PLT is checked. This results in dialyzer failing to produce a
+%%% callgraph warning when doing success typings analysis if the remove
+%%% function is still called in another module
+%%% As the function is not removed from the PLT a prior warning, such as a
+%%% contract types warning, might be emitted when the removed function
+%%% nolonger exists.
+update_plt(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Prog1 = <<"-module(plt_gc).
+ -export([one/0]).
+ one() ->
+ one.">>,
+ {ok, Beam} = compile(Config, Prog1, plt_gc, []),
+
+ ErlangBeam = case code:where_is_file("erlang.beam") of
+ non_existing ->
+ filename:join([code:root_dir(),
+ "erts", "preloaded", "ebin",
+ "erlang.beam"]);
+ EBeam ->
+ EBeam
+ end,
+ Plt = filename:join(PrivDir, "plt_gc.plt"),
+ Opts = [{check_plt, true}, {from, byte_code}],
+ [] = dialyzer:run([{analysis_type, plt_build},
+ {files, [Beam, ErlangBeam]},
+ {output_plt, Plt}] ++ Opts),
+
+ Prog2 = <<"-module(plt_gc).
+ -export([two/0]).
+ two() ->
+ two.">>,
+ {ok, Beam} = compile(Config, Prog2, plt_gc, []),
+
+ Test = <<"-module(test).
+ -export([test/0]).
+ -spec test() -> test.
+ test() ->
+ plt_gc:one().">>,
+ {ok, TestBeam} = compile(Config, Test, test, []),
+ [{warn_callgraph, _, {call_to_missing, [plt_gc, one, 0]}}] =
+ dialyzer:run([{analysis_type, succ_typings},
+ {files, [TestBeam]},
+ {init_plt, Plt}] ++ Opts),
+ ok.
+
+compile(Config, Prog, Module, CompileOpts) ->
+ Source = lists:concat([Module, ".erl"]),
+ PrivDir = ?config(priv_dir,Config),
+ Filename = filename:join([PrivDir, Source]),
+ ok = file:write_file(Filename, Prog),
+ Opts = [{outdir, PrivDir}, debug_info | CompileOpts],
+ {ok, Module} = compile:file(Filename, Opts),
+ {ok, filename:join([PrivDir, lists:concat([Module, ".beam"])])}.
+
+run_dialyzer(Analysis, Files, Opts) ->
+ dialyzer:run([{analysis_type, Analysis},
+ {files, Files},
+ {from, byte_code} |
+ Opts]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/dialyzer_options b/lib/dialyzer/test/r9c_SUITE_data/dialyzer_options
index ba0e6b1ad7..e00e23bb66 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/r9c_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
-{dialyzer_options, [{defines, [{vsn, 42}]}, {warnings, [no_unknown]}]}.
+{dialyzer_options, [{defines, [{vsn, 42}]}]}.
{time_limit, 20}.
diff --git a/lib/dialyzer/test/race_SUITE_data/dialyzer_options b/lib/dialyzer/test/race_SUITE_data/dialyzer_options
index 6992fc6c40..44e1720715 100644
--- a/lib/dialyzer/test/race_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/race_SUITE_data/dialyzer_options
@@ -1 +1 @@
-{dialyzer_options, [{warnings, [race_conditions, no_unknown]}]}.
+{dialyzer_options, [{warnings, [race_conditions]}]}.
diff --git a/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
new file mode 100644
index 0000000000..5134cc6f0b
--- /dev/null
+++ b/lib/dialyzer/test/race_SUITE_data/src/ets_insert_args1_suppressed.erl
@@ -0,0 +1,19 @@
+%% This tests the presence of possible races due to an ets:lookup/ets:insert
+%% combination. It takes into account the argument types of the calls.
+
+-module(ets_insert_args1_suppressed).
+-export([start/0]).
+
+-dialyzer({nowarn_function,start/0}).
+
+start() ->
+ F = fun(T) -> [{_, N}] = ets:lookup(T, counter),
+ ets:insert(T, [{counter, N+1}])
+ end,
+ io:format("Created ~w\n", [ets:new(foo, [named_table, public])]),
+ ets:insert(foo, {counter, 0}),
+ io:format("Inserted ~w\n", [{counter, 0}]),
+ F(foo),
+ io:format("Update complete\n", []),
+ ObjectList = ets:lookup(foo, counter),
+ io:format("Counter: ~w\n", [ObjectList]).
diff --git a/lib/dialyzer/test/small_SUITE_data/dialyzer_options b/lib/dialyzer/test/small_SUITE_data/dialyzer_options
index 0d91699e4d..50991c9bc5 100644
--- a/lib/dialyzer/test/small_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/small_SUITE_data/dialyzer_options
@@ -1 +1 @@
-{dialyzer_options, [{warnings, [no_unknown]}]}.
+{dialyzer_options, []}.
diff --git a/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
new file mode 100644
index 0000000000..40733434f6
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/blame_contract_range_suppressed
@@ -0,0 +1,2 @@
+
+blame_contract_range_suppressed.erl:8: Function foo/0 has no local return
diff --git a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
index fbdd182358..a9fbfb6068 100644
--- a/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
+++ b/lib/dialyzer/test/small_SUITE_data/results/contracts_with_subtypes
@@ -6,23 +6,27 @@ contracts_with_subtypes.erl:135: The call contracts_with_subtypes:rec2({'a','b'}
contracts_with_subtypes.erl:136: The call contracts_with_subtypes:rec2({'b','a'}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
contracts_with_subtypes.erl:137: The call contracts_with_subtypes:rec2({'a',{'b','a'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
contracts_with_subtypes.erl:138: The call contracts_with_subtypes:rec2({'b',{'a','b'}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
-contracts_with_subtypes.erl:171: The pattern 1 can never match the type string()
-contracts_with_subtypes.erl:174: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()}
-contracts_with_subtypes.erl:176: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()}
-contracts_with_subtypes.erl:192: The pattern 'alpha' can never match the type {'ok',_}
-contracts_with_subtypes.erl:194: The pattern 42 can never match the type {'ok',_}
-contracts_with_subtypes.erl:212: The pattern 'alpha' can never match the type {'ok',_}
-contracts_with_subtypes.erl:214: The pattern 42 can never match the type {'ok',_}
-contracts_with_subtypes.erl:231: The pattern 1 can never match the type string()
-contracts_with_subtypes.erl:234: The pattern {'ok', _} can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:235: The pattern 'alpha' can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:236: The pattern {'ok', 42} can never match the type {'ok',_,string()}
-contracts_with_subtypes.erl:237: The pattern 42 can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:139: The call contracts_with_subtypes:rec2({'a',{'b',{'a','b'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:140: The call contracts_with_subtypes:rec2({'b',{'a',{'b','a'}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:141: The call contracts_with_subtypes:rec2({'a',{'b',{'a',{'b','a'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:142: The call contracts_with_subtypes:rec2({'b',{'a',{'b',{'a','b'}}}}) breaks the contract (Arg) -> 'ok' when is_subtype(Arg,ab())
+contracts_with_subtypes.erl:175: The pattern 1 can never match the type string()
+contracts_with_subtypes.erl:178: The pattern 'alpha' can never match the type {'ok',_} | {'ok',_,string()}
+contracts_with_subtypes.erl:180: The pattern 42 can never match the type {'ok',_} | {'ok',_,string()}
+contracts_with_subtypes.erl:196: The pattern 'alpha' can never match the type {'ok',_}
+contracts_with_subtypes.erl:198: The pattern 42 can never match the type {'ok',_}
+contracts_with_subtypes.erl:216: The pattern 'alpha' can never match the type {'ok',_}
+contracts_with_subtypes.erl:218: The pattern 42 can never match the type {'ok',_}
+contracts_with_subtypes.erl:235: The pattern 1 can never match the type string()
+contracts_with_subtypes.erl:238: The pattern {'ok', _} can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:239: The pattern 'alpha' can never match the type {'ok',_,string()}
contracts_with_subtypes.erl:23: Invalid type specification for function contracts_with_subtypes:extract2/0. The success typing is () -> 'something'
-contracts_with_subtypes.erl:263: Function flat_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:264: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed')
-contracts_with_subtypes.erl:290: Function factored_ets_new_t/0 has no local return
-contracts_with_subtypes.erl:291: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term())
+contracts_with_subtypes.erl:240: The pattern {'ok', 42} can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:241: The pattern 42 can never match the type {'ok',_,string()}
+contracts_with_subtypes.erl:267: Function flat_ets_new_t/0 has no local return
+contracts_with_subtypes.erl:268: The call contracts_with_subtypes:flat_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,'set' | 'ordered_set' | 'bag' | 'duplicate_bag' | 'public' | 'protected' | 'private' | 'named_table' | {'keypos',integer()} | {'heir',pid(),term()} | {'heir','none'} | {'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed')
+contracts_with_subtypes.erl:294: Function factored_ets_new_t/0 has no local return
+contracts_with_subtypes.erl:295: The call contracts_with_subtypes:factored_ets_new(12,[]) breaks the contract (Name,Options) -> atom() when is_subtype(Name,atom()), is_subtype(Options,[Option]), is_subtype(Option,Type | Access | 'named_table' | {'keypos',Pos} | {'heir',Pid::pid(),HeirData} | {'heir','none'} | Tweaks), is_subtype(Type,type()), is_subtype(Access,access()), is_subtype(Tweaks,{'write_concurrency',boolean()} | {'read_concurrency',boolean()} | 'compressed'), is_subtype(Pos,pos_integer()), is_subtype(HeirData,term())
contracts_with_subtypes.erl:77: The call contracts_with_subtypes:foo1(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,atom()), is_subtype(Res,atom())
contracts_with_subtypes.erl:78: The call contracts_with_subtypes:foo2(5) breaks the contract (Arg1) -> Res when is_subtype(Arg1,Arg2), is_subtype(Arg2,atom()), is_subtype(Res,atom())
contracts_with_subtypes.erl:79: The call contracts_with_subtypes:foo3(5) breaks the contract (Arg1) -> Res when is_subtype(Arg2,atom()), is_subtype(Arg1,Arg2), is_subtype(Res,atom())
diff --git a/lib/dialyzer/test/small_SUITE_data/results/maps_sum b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
new file mode 100644
index 0000000000..a19c0bba96
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/maps_sum
@@ -0,0 +1,4 @@
+
+maps_sum.erl:15: Invalid type specification for function maps_sum:wrong1/1. The success typing is (#{}) -> any()
+maps_sum.erl:26: Function wrong2/1 has no local return
+maps_sum.erl:27: The call lists:foldl(fun((_,_,_) -> any()),0,Data::any()) will never return since it differs in the 1st argument from the success typing arguments: (fun((_,_) -> any()),any(),[any()])
diff --git a/lib/dialyzer/test/small_SUITE_data/results/request1 b/lib/dialyzer/test/small_SUITE_data/results/request1
new file mode 100644
index 0000000000..0cf4017403
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/request1
@@ -0,0 +1,2 @@
+
+request1.erl:8: Expression produces a value of type {'a','b'}, but this value is unmatched
diff --git a/lib/dialyzer/test/small_SUITE_data/results/suppress_request b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
new file mode 100644
index 0000000000..18e82b7972
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/suppress_request
@@ -0,0 +1,6 @@
+
+suppress_request.erl:21: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:25: Expression produces a value of type {'a','b'}, but this value is unmatched
+suppress_request.erl:35: Function test3_b/0 has no local return
+suppress_request.erl:39: Guard test 2 =:= A::fun((none()) -> no_return()) can never succeed
+suppress_request.erl:7: Type specification suppress_request:test1('a' | 'b') -> 'ok' is a subtype of the success typing: suppress_request:test1('a' | 'b' | 'c') -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
new file mode 100644
index 0000000000..91a157b17f
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_external_type.erl
@@ -0,0 +1,528 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A stripped version of erl_parse.yrl.
+%%%
+%%% A type for the abstract format with *external* types has been added.
+%%% The type of the abstract format is not up-to-date, but it does not
+%%% matter since the purpose of the type is to stress the conversion
+%%% of type forms to erl_type().
+
+-module(big_external_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_scan:line().
+
+-export_type([af_record_index/0, af_record_field/1, af_record_name/0,
+ af_field_name/0, af_function_decl/0]).
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: ?MODULE:af_module()
+ | ?MODULE:af_export()
+ | ?MODULE:af_import()
+ | ?MODULE:af_compile()
+ | ?MODULE:af_file()
+ | ?MODULE:af_record_decl()
+ | ?MODULE:af_wild_attribute()
+ | ?MODULE:af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, ?MODULE:af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, ?MODULE:af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, ?MODULE:af_record_name(), [?MODULE:af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), ?MODULE:af_atom()}
+ | {record_field, line(), ?MODULE:af_atom(), ?MODULE:abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), ?MODULE:af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), ?MODULE:af_clause_seq()}.
+
+-type abstract_expr() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:abstract_expr())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:abstract_expr())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:abstract_expr())
+ | ?MODULE:af_bin(?MODULE:abstract_expr())
+ | ?MODULE:af_binary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_unary_op(?MODULE:abstract_expr())
+ | ?MODULE:af_record_access(?MODULE:abstract_expr())
+ | ?MODULE:af_record_update(?MODULE:abstract_expr())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:abstract_expr())
+ | ?MODULE:af_catch()
+ | ?MODULE:af_local_call()
+ | ?MODULE:af_remote_call()
+ | ?MODULE:af_list_comprehension()
+ | ?MODULE:af_binary_comprehension()
+ | ?MODULE:af_block()
+ | ?MODULE:af_if()
+ | ?MODULE:af_case()
+ | ?MODULE:af_try()
+ | ?MODULE:af_receive()
+ | ?MODULE:af_local_fun()
+ | ?MODULE:af_remote_fun()
+ | ?MODULE:af_fun()
+ | ?MODULE:af_query()
+ | ?MODULE:af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ ?MODULE:abstract_expr(),
+ ?MODULE:af_record_name(),
+ [?MODULE:af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), ?MODULE:abstract_expr()}.
+
+-type af_local_call() :: {call, line(), ?MODULE:af_local_function(), ?MODULE:af_args()}.
+
+-type af_remote_call() :: {call, line(), ?MODULE:af_remote_function(), ?MODULE:af_args()}.
+
+-type af_args() :: [?MODULE:abstract_expr()].
+
+-type af_local_function() :: ?MODULE:abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), ?MODULE:abstract_expr(), ?MODULE:abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), ?MODULE:af_template(), ?MODULE:af_qualifier_seq()}.
+
+-type af_template() :: ?MODULE:abstract_expr().
+
+-type af_qualifier_seq() :: [?MODULE:af_qualifier()].
+
+-type af_qualifier() :: ?MODULE:af_generator() | ?MODULE:af_filter().
+
+-type af_generator() :: {generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}
+ | {b_generate, line(), ?MODULE:af_pattern(), ?MODULE:abstract_expr()}.
+
+-type af_filter() :: ?MODULE:abstract_expr().
+
+-type af_block() :: {block, line(), ?MODULE:af_body()}.
+
+-type af_if() :: {'if', line(), ?MODULE:af_clause_seq()}.
+
+-type af_case() :: {'case', line(), ?MODULE:abstract_expr(), ?MODULE:af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ ?MODULE:af_body(),
+ ?MODULE:af_clause_seq(),
+ ?MODULE:af_catch_clause_seq(),
+ ?MODULE:af_body()}.
+
+-type af_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_catch_clause_seq() :: [?MODULE:af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), ?MODULE:af_clause_seq()}
+ | {'receive', line(), ?MODULE:af_clause_seq(), ?MODULE:abstract_expr(), ?MODULE:af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, ?MODULE:af_atom(), ?MODULE:af_atom(), ?MODULE:af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, ?MODULE:af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), ?MODULE:af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), ?MODULE:abstract_expr(), ?MODULE:af_field_name()}.
+
+-type abstract_clause() :: ?MODULE:af_clause() | ?MODULE:af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [?MODULE:af_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [?MODULE:af_catch_pattern()], ?MODULE:af_guard_seq(), ?MODULE:af_body()}.
+
+-type af_catch_pattern() ::
+ {?MODULE:af_catch_class(), ?MODULE:af_pattern(), ?MODULE:af_anon_variable()}.
+
+-type af_catch_class() ::
+ ?MODULE:af_variable()
+ | ?MODULE:af_lit_atom(throw) | ?MODULE:af_lit_atom(error) | ?MODULE:af_lit_atom(exit).
+
+-type af_body() :: [?MODULE:abstract_expr(), ...].
+
+-type af_guard_seq() :: [?MODULE:af_guard()].
+
+-type af_guard() :: [?MODULE:af_guard_test(), ...].
+
+-type af_guard_test() :: ?MODULE:af_literal()
+ | ?MODULE:af_variable()
+ | ?MODULE:af_tuple(?MODULE:af_guard_test())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_guard_test())
+ | ?MODULE:af_bin(?MODULE:af_guard_test())
+ | ?MODULE:af_binary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_unary_op(?MODULE:af_guard_test())
+ | ?MODULE:af_record_access(?MODULE:af_guard_test())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_guard_test())
+ | ?MODULE:af_guard_call()
+ | ?MODULE:af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), ?MODULE:af_record_name(), [?MODULE:af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [?MODULE:af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), ?MODULE:af_lit_atom(erlang), [?MODULE:af_guard_test()]}.
+
+-type af_pattern() :: ?MODULE:af_literal()
+ | ?MODULE:af_match(?MODULE:af_pattern())
+ | ?MODULE:af_variable()
+ | ?MODULE:af_anon_variable()
+ | ?MODULE:af_tuple(?MODULE:af_pattern())
+ | ?MODULE:af_nil()
+ | ?MODULE:af_cons(?MODULE:af_pattern())
+ | ?MODULE:af_bin(?MODULE:af_pattern())
+ | ?MODULE:af_binary_op(?MODULE:af_pattern())
+ | ?MODULE:af_unary_op(?MODULE:af_pattern())
+ | ?MODULE:af_record_index()
+ | ?MODULE:af_record_field(?MODULE:af_pattern()).
+
+-type af_literal() :: ?MODULE:af_atom() | ?MODULE:af_integer() | ?MODULE:af_float() | ?MODULE:af_string().
+
+-type af_atom() :: ?MODULE:af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [?MODULE:af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ ?MODULE:af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | ?MODULE:abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, ?MODULE:af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), ?MODULE:af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
new file mode 100644
index 0000000000..6de263eda1
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/big_local_type.erl
@@ -0,0 +1,525 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%% A stripped version of erl_parse.yrl.
+%%%
+%%% A type for the abstract format with *local* types has been added.
+%%% The type of the abstract format is not up-to-date, but it does not
+%%% matter since the purpose of the type is to stress the conversion
+%%% of type forms to erl_type().
+
+-module(big_local_type).
+
+-export([parse_form/1,parse_exprs/1,parse_term/1]).
+-export([normalise/1,tokens/1,tokens/2]).
+-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+
+-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
+ error_info/0]).
+
+%% Start of Abstract Format
+
+-type line() :: erl_scan:line().
+
+-export_type([af_module/0, af_export/0, af_import/0, af_fa_list/0,
+ af_compile/0, af_file/0, af_record_decl/0,
+ af_field_decl/0, af_wild_attribute/0,
+ af_record_update/1, af_catch/0, af_local_call/0,
+ af_remote_call/0, af_args/0, af_local_function/0,
+ af_remote_function/0, af_list_comprehension/0,
+ af_binary_comprehension/0, af_template/0,
+ af_qualifier_seq/0, af_qualifier/0, af_generator/0,
+ af_filter/0, af_block/0, af_if/0, af_case/0, af_try/0,
+ af_clause_seq/0, af_catch_clause_seq/0, af_receive/0,
+ af_local_fun/0, af_remote_fun/0, af_fun/0, af_query/0,
+ af_query_access/0, af_clause/0,
+ af_catch_clause/0, af_catch_pattern/0, af_catch_class/0,
+ af_body/0, af_guard_seq/0, af_guard/0, af_guard_test/0,
+ af_record_access/1, af_guard_call/0,
+ af_remote_guard_call/0, af_pattern/0, af_literal/0,
+ af_atom/0, af_lit_atom/1, af_integer/0, af_float/0,
+ af_string/0, af_match/1, af_variable/0,
+ af_anon_variable/0, af_tuple/1, af_nil/0, af_cons/1,
+ af_bin/1, af_binelement/1, af_binelement_size/0,
+ af_binary_op/1, af_binop/0, af_unary_op/1, af_unop/0]).
+
+-type abstract_form() :: af_module()
+ | af_export()
+ | af_import()
+ | af_compile()
+ | af_file()
+ | af_record_decl()
+ | af_wild_attribute()
+ | af_function_decl().
+
+-type af_module() :: {attribute, line(), module, module()}.
+
+-type af_export() :: {attribute, line(), export, af_fa_list()}.
+
+-type af_import() :: {attribute, line(), import, af_fa_list()}.
+
+-type af_fa_list() :: [{function(), arity()}].
+
+-type af_compile() :: {attribute, line(), compile, any()}.
+
+-type af_file() :: {attribute, line(), file, {string(), line()}}.
+
+-type af_record_decl() ::
+ {attribute, line(), record, af_record_name(), [af_field_decl()]}.
+
+-type af_field_decl() :: {record_field, line(), af_atom()}
+ | {record_field, line(), af_atom(), abstract_expr()}.
+
+%% Types and specs, among other things...
+-type af_wild_attribute() :: {attribute, line(), af_atom(), any()}.
+
+-type af_function_decl() ::
+ {function, line(), function(), arity(), af_clause_seq()}.
+
+-type abstract_expr() :: af_literal()
+ | af_match(abstract_expr())
+ | af_variable()
+ | af_tuple(abstract_expr())
+ | af_nil()
+ | af_cons(abstract_expr())
+ | af_bin(abstract_expr())
+ | af_binary_op(abstract_expr())
+ | af_unary_op(abstract_expr())
+ | af_record_access(abstract_expr())
+ | af_record_update(abstract_expr())
+ | af_record_index()
+ | af_record_field(abstract_expr())
+ | af_catch()
+ | af_local_call()
+ | af_remote_call()
+ | af_list_comprehension()
+ | af_binary_comprehension()
+ | af_block()
+ | af_if()
+ | af_case()
+ | af_try()
+ | af_receive()
+ | af_local_fun()
+ | af_remote_fun()
+ | af_fun()
+ | af_query()
+ | af_query_access().
+
+-type af_record_update(T) :: {record,
+ line(),
+ abstract_expr(),
+ af_record_name(),
+ [af_record_field(T)]}.
+
+-type af_catch() :: {'catch', line(), abstract_expr()}.
+
+-type af_local_call() :: {call, line(), af_local_function(), af_args()}.
+
+-type af_remote_call() :: {call, line(), af_remote_function(), af_args()}.
+
+-type af_args() :: [abstract_expr()].
+
+-type af_local_function() :: abstract_expr().
+
+-type af_remote_function() ::
+ {remote, line(), abstract_expr(), abstract_expr()}.
+
+-type af_list_comprehension() ::
+ {lc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_binary_comprehension() ::
+ {bc, line(), af_template(), af_qualifier_seq()}.
+
+-type af_template() :: abstract_expr().
+
+-type af_qualifier_seq() :: [af_qualifier()].
+
+-type af_qualifier() :: af_generator() | af_filter().
+
+-type af_generator() :: {generate, line(), af_pattern(), abstract_expr()}
+ | {b_generate, line(), af_pattern(), abstract_expr()}.
+
+-type af_filter() :: abstract_expr().
+
+-type af_block() :: {block, line(), af_body()}.
+
+-type af_if() :: {'if', line(), af_clause_seq()}.
+
+-type af_case() :: {'case', line(), abstract_expr(), af_clause_seq()}.
+
+-type af_try() :: {'try',
+ line(),
+ af_body(),
+ af_clause_seq(),
+ af_catch_clause_seq(),
+ af_body()}.
+
+-type af_clause_seq() :: [af_clause(), ...].
+
+-type af_catch_clause_seq() :: [af_clause(), ...].
+
+-type af_receive() ::
+ {'receive', line(), af_clause_seq()}
+ | {'receive', line(), af_clause_seq(), abstract_expr(), af_body()}.
+
+-type af_local_fun() :: {'fun', line(), {function, function(), arity()}}.
+
+-type af_remote_fun() ::
+ {'fun', line(), {function, module(), function(), arity()}}
+ | {'fun', line(), {function, af_atom(), af_atom(), af_integer()}}.
+
+-type af_fun() :: {'fun', line(), {clauses, af_clause_seq()}}.
+
+-type af_query() :: {'query', line(), af_list_comprehension()}.
+
+-type af_query_access() ::
+ {record_field, line(), abstract_expr(), af_field_name()}.
+
+-type abstract_clause() :: af_clause() | af_catch_clause().
+
+-type af_clause() ::
+ {clause, line(), [af_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_clause() ::
+ {clause, line(), [af_catch_pattern()], af_guard_seq(), af_body()}.
+
+-type af_catch_pattern() ::
+ {af_catch_class(), af_pattern(), af_anon_variable()}.
+
+-type af_catch_class() ::
+ af_variable()
+ | af_lit_atom(throw) | af_lit_atom(error) | af_lit_atom(exit).
+
+-type af_body() :: [abstract_expr(), ...].
+
+-type af_guard_seq() :: [af_guard()].
+
+-type af_guard() :: [af_guard_test(), ...].
+
+-type af_guard_test() :: af_literal()
+ | af_variable()
+ | af_tuple(af_guard_test())
+ | af_nil()
+ | af_cons(af_guard_test())
+ | af_bin(af_guard_test())
+ | af_binary_op(af_guard_test())
+ | af_unary_op(af_guard_test())
+ | af_record_access(af_guard_test())
+ | af_record_index()
+ | af_record_field(af_guard_test())
+ | af_guard_call()
+ | af_remote_guard_call().
+
+-type af_record_access(T) ::
+ {record, line(), af_record_name(), [af_record_field(T)]}.
+
+-type af_guard_call() :: {call, line(), function(), [af_guard_test()]}.
+
+-type af_remote_guard_call() ::
+ {call, line(), atom(), af_lit_atom(erlang), [af_guard_test()]}.
+
+-type af_pattern() :: af_literal()
+ | af_match(af_pattern())
+ | af_variable()
+ | af_anon_variable()
+ | af_tuple(af_pattern())
+ | af_nil()
+ | af_cons(af_pattern())
+ | af_bin(af_pattern())
+ | af_binary_op(af_pattern())
+ | af_unary_op(af_pattern())
+ | af_record_index()
+ | af_record_field(af_pattern()).
+
+-type af_literal() :: af_atom() | af_integer() | af_float() | af_string().
+
+-type af_atom() :: af_lit_atom(atom()).
+
+-type af_lit_atom(A) :: {atom, line(), A}.
+
+-type af_integer() :: {integer, line(), non_neg_integer()}.
+
+-type af_float() :: {float, line(), float()}.
+
+-type af_string() :: {string, line(), [byte()]}.
+
+-type af_match(T) :: {match, line(), T, T}.
+
+-type af_variable() :: {var, line(), atom()}.
+
+-type af_anon_variable() :: {var, line(), '_'}.
+
+-type af_tuple(T) :: {tuple, line(), [T]}.
+
+-type af_nil() :: {nil, line()}.
+
+-type af_cons(T) :: {cons, line, T, T}.
+
+-type af_bin(T) :: {bin, line(), [af_binelement(T)]}.
+
+-type af_binelement(T) :: {bin_element,
+ line(),
+ T,
+ af_binelement_size(),
+ type_specifier_list()}.
+
+-type af_binelement_size() :: default | abstract_expr().
+
+-type af_binary_op(T) :: {op, line(), T, af_binop(), T}.
+
+-type af_binop() :: '/' | '*' | 'div' | 'rem' | 'band' | 'and' | '+' | '-'
+ | 'bor' | 'bxor' | 'bsl' | 'bsr' | 'or' | 'xor' | '++'
+ | '--' | '==' | '/=' | '=<' | '<' | '>=' | '>' | '=:='
+ | '=/='.
+
+-type af_unary_op(T) :: {op, line(), af_unop(), T}.
+
+-type af_unop() :: '+' | '*' | 'bnot' | 'not'.
+
+%% See also lib/stdlib/{src/erl_bits.erl,include/erl_bits.hrl}.
+-type type_specifier_list() :: default | [type_specifier(), ...].
+
+-type type_specifier() :: af_type()
+ | af_signedness()
+ | af_endianness()
+ | af_unit().
+
+-type af_type() :: integer
+ | float
+ | binary
+ | bytes
+ | bitstring
+ | bits
+ | utf8
+ | utf16
+ | utf32.
+
+-type af_signedness() :: signed | unsigned.
+
+-type af_endianness() :: big | little | native.
+
+-type af_unit() :: {unit, 1..256}.
+
+-type af_record_index() ::
+ {record_index, line(), af_record_name(), af_field_name()}.
+
+-type af_record_field(T) :: {record_field, line(), af_field_name(), T}.
+
+-type af_record_name() :: atom().
+
+-type af_field_name() :: atom().
+
+%% End of Abstract Format
+
+-type error_description() :: term().
+-type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type token() :: {Tag :: atom(), Line :: erl_scan:line()}.
+
+%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+
+-define(mkop2(L, OpPos, R),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,L,R}
+ end).
+
+-define(mkop1(OpPos, A),
+ begin
+ {Op,Pos} = OpPos,
+ {op,Pos,Op,A}
+ end).
+
+%% keep track of line info in tokens
+-define(line(Tup), element(2, Tup)).
+
+%% Entry points compatible to old erl_parse.
+%% These really suck and are only here until Calle gets multiple
+%% entry points working.
+
+-spec parse_form(Tokens) -> {ok, AbsForm} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ AbsForm :: abstract_form(),
+ ErrorInfo :: error_info().
+parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
+ parse([{'-',L1},{'spec',L2}|Tokens]);
+parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
+ parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form(Tokens) ->
+ parse(Tokens).
+
+-spec parse_exprs(Tokens) -> {ok, ExprList} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ ExprList :: [abstract_expr()],
+ ErrorInfo :: error_info().
+parse_exprs(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
+ {ok,Exprs};
+ {error,_} = Err -> Err
+ end.
+
+-spec parse_term(Tokens) -> {ok, Term} | {error, ErrorInfo} when
+ Tokens :: [token()],
+ Term :: term(),
+ ErrorInfo :: error_info().
+parse_term(Tokens) ->
+ case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ try normalise(Expr) of
+ Term -> {ok,Term}
+ catch
+ _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ end;
+ {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{?line(E2),?MODULE,"bad term"}};
+ {error,_} = Err -> Err
+ end.
+
+%% Convert between the abstract form of a term and a term.
+
+-spec normalise(AbsTerm) -> Data when
+ AbsTerm :: abstract_expr(),
+ Data :: term().
+normalise({char,_,C}) -> C;
+normalise({integer,_,I}) -> I;
+normalise({float,_,F}) -> F;
+normalise({atom,_,A}) -> A;
+normalise({string,_,S}) -> S;
+normalise({nil,_}) -> [];
+normalise({bin,_,Fs}) ->
+ {value, B, _} =
+ eval_bits:expr_grp(Fs, [],
+ fun(E, _) ->
+ {value, normalise(E), []}
+ end, [], true),
+ B;
+normalise({cons,_,Head,Tail}) ->
+ [normalise(Head)|normalise(Tail)];
+normalise({tuple,_,Args}) ->
+ list_to_tuple(normalise_list(Args));
+%% Atom dot-notation, as in 'foo.bar.baz'
+%% Special case for unary +/-.
+normalise({op,_,'+',{char,_,I}}) -> I;
+normalise({op,_,'+',{integer,_,I}}) -> I;
+normalise({op,_,'+',{float,_,F}}) -> F;
+normalise({op,_,'-',{char,_,I}}) -> -I; %Weird, but compatible!
+normalise({op,_,'-',{integer,_,I}}) -> -I;
+normalise({op,_,'-',{float,_,F}}) -> -F;
+normalise(X) -> erlang:error({badarg, X}).
+
+normalise_list([H|T]) ->
+ [normalise(H)|normalise_list(T)];
+normalise_list([]) ->
+ [].
+
+%% Generate a list of tokens representing the abstract term.
+
+-spec tokens(AbsTerm) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ Tokens :: [token()].
+tokens(Abs) ->
+ tokens(Abs, []).
+
+-spec tokens(AbsTerm, MoreTokens) -> Tokens when
+ AbsTerm :: abstract_expr(),
+ MoreTokens :: [token()],
+ Tokens :: [token()].
+tokens({char,L,C}, More) -> [{char,L,C}|More];
+tokens({integer,L,N}, More) -> [{integer,L,N}|More];
+tokens({float,L,F}, More) -> [{float,L,F}|More];
+tokens({atom,L,A}, More) -> [{atom,L,A}|More];
+tokens({var,L,V}, More) -> [{var,L,V}|More];
+tokens({string,L,S}, More) -> [{string,L,S}|More];
+tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
+tokens({cons,L,Head,Tail}, More) ->
+ [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,L,[]}, More) ->
+ [{'{',L},{'}',L}|More];
+tokens({tuple,L,[E|Es]}, More) ->
+ [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
+
+tokens_tail({cons,L,Head,Tail}, More) ->
+ [{',',L}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,L}, More) ->
+ [{']',L}|More];
+tokens_tail(Other, More) ->
+ L = ?line(Other),
+ [{'|',L}|tokens(Other, [{']',L}|More])].
+
+tokens_tuple([E|Es], Line, More) ->
+ [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
+tokens_tuple([], Line, More) ->
+ [{'}',Line}|More].
+
+%% Give the relative precedences of operators.
+
+inop_prec('=') -> {150,100,100};
+inop_prec('!') -> {150,100,100};
+inop_prec('orelse') -> {160,150,150};
+inop_prec('andalso') -> {200,160,160};
+inop_prec('==') -> {300,200,300};
+inop_prec('/=') -> {300,200,300};
+inop_prec('=<') -> {300,200,300};
+inop_prec('<') -> {300,200,300};
+inop_prec('>=') -> {300,200,300};
+inop_prec('>') -> {300,200,300};
+inop_prec('=:=') -> {300,200,300};
+inop_prec('=/=') -> {300,200,300};
+inop_prec('++') -> {400,300,300};
+inop_prec('--') -> {400,300,300};
+inop_prec('+') -> {400,400,500};
+inop_prec('-') -> {400,400,500};
+inop_prec('bor') -> {400,400,500};
+inop_prec('bxor') -> {400,400,500};
+inop_prec('bsl') -> {400,400,500};
+inop_prec('bsr') -> {400,400,500};
+inop_prec('or') -> {400,400,500};
+inop_prec('xor') -> {400,400,500};
+inop_prec('*') -> {500,500,600};
+inop_prec('/') -> {500,500,600};
+inop_prec('div') -> {500,500,600};
+inop_prec('rem') -> {500,500,600};
+inop_prec('band') -> {500,500,600};
+inop_prec('and') -> {500,500,600};
+inop_prec('#') -> {800,700,800};
+inop_prec(':') -> {900,800,900};
+inop_prec('.') -> {900,900,1000}.
+
+-type pre_op() :: 'catch' | '+' | '-' | 'bnot' | 'not' | '#'.
+
+-spec preop_prec(pre_op()) -> {0 | 600 | 700, 100 | 700 | 800}.
+
+preop_prec('catch') -> {0,100};
+preop_prec('+') -> {600,700};
+preop_prec('-') -> {600,700};
+preop_prec('bnot') -> {600,700};
+preop_prec('not') -> {600,700};
+preop_prec('#') -> {700,800}.
+
+-spec func_prec() -> {800,700}.
+
+func_prec() -> {800,700}.
+
+-spec max_prec() -> 1000.
+
+max_prec() -> 1000.
+
+parse(T) ->
+ bar:foo(T).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
new file mode 100644
index 0000000000..8b66d35083
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/blame_contract_range_suppressed.erl
@@ -0,0 +1,15 @@
+%%-----------------------------------------------------------------------
+%% Like ./blame_contract_range.erl, but warning is suppressed.
+%%-----------------------------------------------------------------------
+-module(blame_contract_range_suppressed).
+
+-export([foo/0]).
+
+foo() ->
+ bar(b).
+
+-dialyzer({nowarn_function, bar/1}).
+
+-spec bar(atom()) -> a.
+bar(a) -> a;
+bar(b) -> b.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
index d7dfd9752e..dbabd904c2 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/contracts_with_subtypes.erl
@@ -136,10 +136,14 @@ q(ab) -> rec2({a, b}); % breaks the contract
q(ba) -> rec2({b, a}); % breaks the contract
q(aba) -> rec2({a, {b, a}}); % breaks the contract
q(bab) -> rec2({b, {a, b}}); % breaks the contract
-q(abab) -> rec2({a, {b, {a, b}}});
-q(baba) -> rec2({b, {a, {b, a}}});
-q(ababa) -> rec2({a, {b, {a, {b, a}}}});
-q(babab) -> rec2({b, {a, {b, {a, b}}}}).
+q(abab) -> rec2({a, {b, {a, b}}}); % breaks the contract
+q(baba) -> rec2({b, {a, {b, a}}}); % breaks the contract
+q(ababa) -> rec2({a, {b, {a, {b, a}}}}); % breaks the contract
+q(babab) -> rec2({b, {a, {b, {a, b}}}}); % breaks the contract
+q(ababab) -> rec2({a, {b, {a, {b, {a, b}}}}});
+q(bababa) -> rec2({b, {a, {b, {a, {b, a}}}}});
+q(abababa) -> rec2({a, {b, {a, {b, {a, {b, a}}}}}});
+q(bababab) -> rec2({b, {a, {b, {a, {b, {a, b}}}}}}).
%===============================================================================
diff --git a/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl
new file mode 100644
index 0000000000..2d75f25bd5
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/ditrap.erl
@@ -0,0 +1,47 @@
+%% A bug reported by Tail-f Systems. The problem is that record types
+%% are included without properly limiting their depth.
+
+-module(ditrap).
+
+-define(tref(T), ?MODULE:T).
+-define(fref(T), ?MODULE:T).
+
+-export_type([ module_rec/0
+ , typedef_rec/0
+ , type_spec_fun/0
+ ]).
+
+-record(type, {
+ base :: 'builtin' | external:random_type() | ?tref(typedef_rec()),
+ type_spec_fun :: ?fref(type_spec_fun())
+ }).
+
+-record(typedef, {type :: #type{}}).
+
+-record(typedefs, {
+ map :: ?tref(typedef_rec()),
+ parent :: 'undefined' | #typedefs{}
+ }).
+
+-record(sn, {
+ module :: ?tref(module_rec()),
+ typedefs :: #typedefs{},
+ type :: 'undefined' | #type{},
+ keys :: 'undefined' | [#sn{}],
+ children = [] :: [#sn{}]
+ }).
+
+-record(augment, {children = [] :: [#sn{}]}).
+
+-record(module, {
+ submodules = [] :: [{#module{}, external:pos()}],
+ typedefs = #typedefs{} :: #typedefs{},
+ children = [] :: [#sn{}],
+ remote_augments = [] :: [{ModuleName :: atom(), [#augment{}]}],
+ local_augments = [] :: [#augment{}]
+ }).
+
+-type typedef_rec() :: #typedef{}.
+-type module_rec() :: #module{}.
+
+-type type_spec_fun() :: undefined | fun((#type{}, #module{}) -> any()).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/inv_mult.erl b/lib/dialyzer/test/small_SUITE_data/src/inv_mult.erl
new file mode 100644
index 0000000000..3413556813
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/inv_mult.erl
@@ -0,0 +1,15 @@
+%% Dialyzer was too constraining when checking the relation between the
+%% arguments and result of a multiplication. We should not constrain an argument
+%% if the other operand *may* be zero.
+%%
+%% Bug found by Kostis Sagonas, fixed by Stavros Aronis
+
+-module(inv_mult).
+-compile(export_all).
+
+main(L) ->
+ N = -1 * length(L),
+ fact(N).
+
+fact(0) -> 1;
+fact(N) -> N * fact(N-1).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
new file mode 100644
index 0000000000..945b2a9144
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps_redef2.erl
@@ -0,0 +1,23 @@
+%% In 17, the linter says that map(A) redefines 'type map', which is
+%% allowed until next release. However, Dialyzer used to replace
+%% map(A) with #{}, which resulted in warnings.
+
+-module(maps_redef2).
+
+-export([t/0]).
+
+-type map(_A) :: integer().
+
+t() ->
+ M = new(),
+ t1(M).
+
+-spec t1(map(_)) -> map(_).
+
+t1(A) ->
+ A + A.
+
+-spec new() -> map(_).
+
+new() ->
+ 3.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/maps_sum.erl b/lib/dialyzer/test/small_SUITE_data/src/maps_sum.erl
new file mode 100644
index 0000000000..a73ac555c9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/maps_sum.erl
@@ -0,0 +1,31 @@
+-module(maps_sum).
+-export([correct1/1,
+ wrong1/1,
+ wrong2/1]).
+
+-spec correct1(#{atom() => term()}) -> integer().
+
+correct1(Data) ->
+ maps:fold(fun (_Key, Value, Acc) when is_integer(Value) ->
+ Acc + Value;
+ (_Key, _Value, Acc) ->
+ Acc
+ end, 0, Data).
+
+-spec wrong1([{atom(),term()}]) -> integer().
+
+wrong1(Data) ->
+ maps:fold(fun (_Key, Value, Acc) when is_integer(Value) ->
+ Acc + Value;
+ (_Key, _Value, Acc) ->
+ Acc
+ end, 0, Data).
+
+-spec wrong2(#{atom() => term()}) -> integer().
+
+wrong2(Data) ->
+ lists:foldl(fun (_Key, Value, Acc) when is_integer(Value) ->
+ Acc + Value;
+ (_Key, _Value, Acc) ->
+ Acc
+ end, 0, Data).
diff --git a/lib/dialyzer/test/small_SUITE_data/src/request1.erl b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
new file mode 100644
index 0000000000..a6c4ab8dbd
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/request1.erl
@@ -0,0 +1,12 @@
+-module(request1).
+
+-export([a/0]).
+
+-dialyzer(unmatched_returns).
+
+a() ->
+ b(),
+ 1.
+
+b() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
new file mode 100644
index 0000000000..c4275fa110
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppress_request.erl
@@ -0,0 +1,50 @@
+-module(suppress_request).
+
+-export([test1/1, test1_b/1, test2/0, test2_b/0,
+ test3/0, test3_b/0, test4/0, test4_b/0]).
+
+-dialyzer({[specdiffs], test1/1}).
+-spec test1(a | b) -> ok. % spec is subtype
+test1(A) ->
+ ok = test1_1(A).
+
+-spec test1_b(a | b) -> ok. % spec is subtype (suppressed by default)
+test1_b(A) ->
+ ok = test1_1(A).
+
+-spec test1_1(a | b | c) -> ok.
+test1_1(_) ->
+ ok.
+
+-dialyzer(unmatched_returns).
+test2() ->
+ tuple(), % unmatched
+ ok.
+
+test2_b() ->
+ tuple(), % unmatched
+ ok.
+
+-dialyzer({[no_return, no_match], [test3/0]}).
+test3() -> % no local return (suppressed)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (suppressed)
+
+test3_b() -> % no local return (requested by default)
+ A = fun(_) ->
+ 1
+ end,
+ A = 2. % can never succeed (requested by default)
+
+-dialyzer(no_improper_lists).
+test4() ->
+ [1 | 2]. % improper list (suppressed)
+
+-dialyzer({no_improper_lists, test4_b/0}).
+test4_b() ->
+ [1 | 2]. % improper list (suppressed)
+
+tuple() ->
+ {a, b}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
new file mode 100644
index 0000000000..00534704c3
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression1.erl
@@ -0,0 +1,33 @@
+-module(suppression1).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, a/1}).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+-dialyzer({nowarn_function, b/1}).
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+-dialyzer({nowarn_function, c/0}).
+
+c() ->
+ #r{}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
new file mode 100644
index 0000000000..4cba53fdce
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression2.erl
@@ -0,0 +1,32 @@
+-module(suppression2).
+
+-export([a/1, b/1, c/0]).
+
+-dialyzer({nowarn_function, [a/1, b/1, c/0]}).
+-dialyzer([no_undefined_callbacks]).
+
+-behaviour(not_a_behaviour).
+
+-spec a(_) -> integer().
+
+a(_) ->
+ A = fun(_) ->
+ B = fun(_) ->
+ x = 7
+ end,
+ B = 1
+ end,
+ A.
+
+-spec b(_) -> integer().
+
+b(_) ->
+ A = fun(_) ->
+ 1
+ end,
+ A = 2.
+
+-record(r, {a = a :: integer()}).
+
+c() ->
+ #r{}.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/suppression3.erl b/lib/dialyzer/test/small_SUITE_data/src/suppression3.erl
new file mode 100644
index 0000000000..4a745cffc2
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/suppression3.erl
@@ -0,0 +1,17 @@
+-module(suppression3).
+
+-export([a/1, b/1]).
+
+-dialyzer({nowarn_function, a/1}).
+
+-spec a(_) -> integer().
+
+a(A) ->
+ ?MODULE:missing(A).
+
+-dialyzer({no_missing_calls, b/1}).
+
+-spec b(_) -> integer().
+
+b(A) ->
+ ?MODULE:missing(A).
diff --git a/lib/dialyzer/test/underspecs_SUITE_data/dialyzer_options b/lib/dialyzer/test/underspecs_SUITE_data/dialyzer_options
index 6843119b9d..f7197ac30f 100644
--- a/lib/dialyzer/test/underspecs_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/underspecs_SUITE_data/dialyzer_options
@@ -1 +1 @@
-{dialyzer_options, [{warnings, [underspecs, no_unknown]}]}.
+{dialyzer_options, [{warnings, [underspecs]}]}.
diff --git a/lib/dialyzer/test/user_SUITE_data/dialyzer_options b/lib/dialyzer/test/user_SUITE_data/dialyzer_options
index d20ecd389f..513ed7752b 100644
--- a/lib/dialyzer/test/user_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/user_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
-{dialyzer_options, [{warnings, [no_unknown]}]}.
+{dialyzer_options, []}.
{time_limit, 3}. \ No newline at end of file
diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk
index e7c13f04ad..48e0830109 100644
--- a/lib/dialyzer/vsn.mk
+++ b/lib/dialyzer/vsn.mk
@@ -1 +1 @@
-DIALYZER_VSN = 2.7.3
+DIALYZER_VSN = 2.8
diff --git a/lib/diameter/doc/src/diameter.xml b/lib/diameter/doc/src/diameter.xml
index 00b54ffbc4..ea175a58b8 100644
--- a/lib/diameter/doc/src/diameter.xml
+++ b/lib/diameter/doc/src/diameter.xml
@@ -21,7 +21,7 @@
<copyright>
<year>2011</year>
-<year>2014</year>
+<year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -111,7 +111,7 @@ Defined in &dict_data_types;.</p>
<tag><c>application_alias() = term()</c></tag>
<item>
<p>
-A name identifying a Diameter application in
+Name identifying a Diameter application in
service configuration.
Passed to &call; when sending requests
defined by the application.</p>
@@ -129,7 +129,7 @@ ExtraArgs = list()
</pre>
<p>
-A module implementing the callback interface defined in &man_app;,
+Module implementing the callback interface defined in &man_app;,
along with any
extra arguments to be appended to those documented.
Note that extra arguments specific to an outgoing request can be
@@ -156,7 +156,7 @@ Has one the following types.</p>
<tag><c>{alias, &application_alias;}</c></tag>
<item>
<p>
-A unique identifier for the application in the scope of the
+Unique identifier for the application in the scope of the
service.
Defaults to the value of the <c>dictionary</c> option if
unspecified.</p>
@@ -165,7 +165,7 @@ unspecified.</p>
<tag><c>{dictionary, atom()}</c></tag>
<item>
<p>
-The name of an encode/decode module for the Diameter
+Name of an encode/decode module for the Diameter
messages defined by the application.
These modules are generated from files whose format is documented in
&man_dict;.</p>
@@ -174,7 +174,7 @@ These modules are generated from files whose format is documented in
<tag><c>{module, &application_module;}</c></tag>
<item>
<p>
-The callback module with which messages of the Diameter application are
+Callback module in which messages of the Diameter application are
handled.
See &man_app; for the required interface and semantics.</p>
</item>
@@ -182,7 +182,7 @@ See &man_app; for the required interface and semantics.</p>
<tag><c>{state, term()}</c></tag>
<item>
<p>
-The initial callback state.
+Initial callback state.
The prevailing state is passed to some
&man_app;
callbacks, which can then return a new state.
@@ -192,7 +192,7 @@ Defaults to the value of the <c>alias</c> option if unspecified.</p>
<tag><c>{call_mutates_state, true|false}</c></tag>
<item>
<p>
-Specifies whether or not the &app_pick_peer;
+Whether or not the &app_pick_peer;
application callback can modify the application state.
Defaults to <c>false</c> if unspecified.</p>
@@ -209,7 +209,7 @@ probably avoid it.</p>
<tag><c>{answer_errors, callback|report|discard}</c></tag>
<item>
<p>
-Determines the manner in which incoming answer messages containing
+Manner in which incoming answer messages containing
decode errors are handled.</p>
<p>
@@ -233,7 +233,7 @@ Defaults to <c>discard</c> if unspecified.</p>
<tag><c>{request_errors, answer_3xxx|answer|callback}</c></tag>
<item>
<p>
-Determines the manner in which incoming requests are handled when an
+Manner in which incoming requests are handled when an
error other than 3007 (DIAMETER_APPLICATION_UNSUPPORTED, which cannot
be associated with an application callback module), is detected.</p>
@@ -293,7 +293,7 @@ Multiple options append to the argument list.</p>
<tag><c>{filter, &peer_filter;}</c></tag>
<item>
<p>
-A filter to apply to the list of available peers before passing it to
+Filter to apply to the list of available peers before passing it to
the &app_pick_peer; callback for the application in question.
Multiple options are equivalent a single <c>all</c> filter on the
corresponding list of filters.
@@ -303,7 +303,7 @@ Defaults to <c>none</c>.</p>
<tag><c>{timeout, &dict_Unsigned32;}</c></tag>
<item>
<p>
-The number of milliseconds after which the request should
+Number of milliseconds after which the request should
timeout.
Defaults to 5000.</p>
</item>
@@ -311,7 +311,7 @@ Defaults to 5000.</p>
<tag><c>detach</c></tag>
<item>
<p>
-Causes &call; to return <c>ok</c> as
+Cause &call; to return <c>ok</c> as
soon as the request in
question has been encoded, instead of waiting for and returning
the result from a subsequent &app_handle_answer; or
@@ -427,7 +427,7 @@ configuration passed to &start_service; or &add_transport;.</p>
<tag><c>peer_filter() = term()</c></tag>
<item>
<p>
-A filter passed to &call; in order to select candidate peers for a
+Filter passed to &call; in order to select candidate peers for a
&app_pick_peer; callback.
Has one of the following types.</p>
@@ -674,7 +674,7 @@ connection establishment.</p>
<tag><c>{'CEA', Result, Caps, Pkt}</c></tag>
<item>
<pre>
-Result = integer() | atom() | {capabilities_cb, CB, ResultCode|discard}
+Result = ResultCode | atom() | {capabilities_cb, CB, ResultCode|discard}
Caps = #diameter_caps{}
Pkt = #diameter_packet{}
ResultCode = integer()
@@ -742,7 +742,7 @@ info fields of forms other than the above.</p>
<tag><c>service_name() = term()</c></tag>
<item>
<p>
-The name of a service as passed to &start_service; and with which the
+Name of a service as passed to &start_service; and with which the
service is identified.
There can be at most one service with a given name on a given node.
Note that &make_ref;
@@ -754,7 +754,7 @@ can be used to generate a service name that is somewhat unique.</p>
<tag><c>service_opt()</c></tag>
<item>
<p>
-An option passed to &start_service;.
+Option passed to &start_service;.
Can be any <c>&capability;</c> as well as the following.</p>
<taglist>
@@ -762,7 +762,7 @@ Can be any <c>&capability;</c> as well as the following.</p>
<tag><c>{application, [&application_opt;]}</c></tag>
<item>
<p>
-Defines a Diameter application supported by the service.</p>
+A Diameter application supported by the service.</p>
<p>
A service must configure one tuple for each Diameter
@@ -783,6 +783,27 @@ be matched by corresponding &capability; configuration, of
</item>
+<marker id="incoming_maxlen"/>
+<tag><c>{incoming_maxlen, 0..16777215}</c></tag>
+<item>
+<p>
+Bound on the expected size of incoming Diameter messages.
+Messages larger than the specified number of bytes are discarded.</p>
+
+<p>
+Defaults to <c>16777215</c>, the maximum value of the 24-bit Message
+Length field in a Diameter Header.</p>
+
+<warning>
+<p>
+This option should be set to as low a value as is sufficient for the
+Diameter applications and peers in question, since decoding incoming
+messages from a malicious peer can otherwise generate significant
+load.</p>
+</warning>
+
+</item>
+
<tag><c>{restrict_connections, false
| node
| nodes
@@ -790,7 +811,7 @@ be matched by corresponding &capability; configuration, of
| evaluable()}</c></tag>
<item>
<p>
-Specifies the degree to which the service allows multiple transport
+The degree to which the service allows multiple transport
connections to the same peer, as identified by its Origin-Host
at capabilities exchange.</p>
@@ -816,7 +837,7 @@ Defaults to <c>nodes</c>.</p>
<tag><c>{sequence, {H,N} | &evaluable;}</c></tag>
<item>
<p>
-Specifies a constant value <c>H</c> for the topmost <c>32-N</c> bits of
+A constant value <c>H</c> for the topmost <c>32-N</c> bits of
of 32-bit End-to-End and Hop-by-Hop Identifiers generated
by the service, either explicitly or as a return value of a function
to be evaluated at &start_service;.
@@ -851,7 +872,7 @@ outgoing requests.</p>
<tag><c>{share_peers, boolean() | [node()] | evaluable()}</c></tag>
<item>
<p>
-Specifies nodes to which peer connections established on the local
+Nodes to which peer connections established on the local
Erlang node are communicated.
Shared peers become available in the remote candidates list passed to
&app_pick_peer; callbacks on remote nodes whose services are
@@ -890,7 +911,7 @@ of a single Diameter node across multiple Erlang nodes.</p>
<tag><c>{spawn_opt, [term()]}</c></tag>
<item>
<p>
-An options list passed to &spawn_opt; when spawning a process for an
+Options list passed to &spawn_opt; when spawning a process for an
incoming Diameter request, unless the transport in question
specifies another value.
Options <c>monitor</c> and <c>link</c> are ignored.</p>
@@ -899,10 +920,34 @@ Options <c>monitor</c> and <c>link</c> are ignored.</p>
Defaults to the empty list.</p>
</item>
+<marker id="string_decode"/>
+<tag><c>{string_decode, boolean()}</c></tag>
+<item>
+<p>
+Whether or not to decode AVPs of type &dict_OctetString; and its
+derived types &dict_DiameterIdentity;, &dict_DiameterURI;,
+&dict_IPFilterRule;, &dict_QoSFilterRule;, and &dict_UTF8String;.
+If <c>true</c> then AVPs of these types are decoded to string().
+If <c>false</c> then values are retained as binary().</p>
+
+<p>
+Defaults to <c>true</c>.</p>
+
+<warning>
+<p>
+This option should be set to <c>false</c>
+since a sufficiently malicious peer can otherwise cause large amounts
+of memory to be consumed when decoded Diameter messages are passed
+between processes.
+The default value is for backwards compatibility.</p>
+</warning>
+
+</item>
+
<tag><c>{use_shared_peers, boolean() | [node()] | evaluable()}</c></tag>
<item>
<p>
-Specifies nodes from which communicated peers are made available in
+Nodes from which communicated peers are made available in
the remote candidates list of &app_pick_peer; callbacks.</p>
<p>
@@ -942,7 +987,7 @@ each node from which requests are sent.</p>
<tag><c>transport_opt()</c></tag>
<item>
<p>
-An option passed to &add_transport;.
+Option passed to &add_transport;.
Has one of the following types.</p>
<taglist>
@@ -950,8 +995,7 @@ Has one of the following types.</p>
<tag><c>{applications, [&application_alias;]}</c></tag>
<item>
<p>
-The list of Diameter applications to which the transport should be
-restricted.
+Diameter applications to which the transport should be restricted.
Defaults to all applications configured on the service in question.
Applications not configured on the service in question are ignored.</p>
@@ -984,7 +1028,7 @@ TLS is desired over TCP as implemented by &man_tcp;.</p>
<tag><c>{capabilities_cb, &evaluable;}</c></tag>
<item>
<p>
-A callback invoked upon reception of CER/CEA during capabilities
+Callback invoked upon reception of CER/CEA during capabilities
exchange in order to ask whether or not the connection should
be accepted.
Applied to the <c>&transport_ref;</c> and
@@ -1032,7 +1076,7 @@ case the corresponding callbacks are applied until either all return
<tag><c>{capx_timeout, &dict_Unsigned32;}</c></tag>
<item>
<p>
-The number of milliseconds after which a transport process having an
+Number of milliseconds after which a transport process having an
established transport connection will be terminated if the expected
capabilities exchange message (CER or CEA) is not received from the peer.
For a connecting transport, the timing of connection attempts is
@@ -1079,7 +1123,7 @@ transport.</p>
<item>
<p>
-A callback invoked prior to terminating the transport process of a
+Callback invoked prior to terminating the transport process of a
transport connection having watchdog state <c>OKAY</c>.
Applied to <c>application|service|transport</c> and the
<c>&transport_ref;</c> and <c>&app_peer;</c> in question:
@@ -1095,7 +1139,7 @@ The return value can have one of the following types.</p>
<tag><c>{dpr, [option()]}</c></tag>
<item>
<p>
-Causes Disconnect-Peer-Request to be sent to the peer, the transport
+Send Disconnect-Peer-Request to the peer, the transport
process being terminated following reception of
Disconnect-Peer-Answer or timeout.
An <c>option()</c> can be one of the following.</p>
@@ -1104,7 +1148,7 @@ An <c>option()</c> can be one of the following.</p>
<tag><c>{cause, 0|rebooting|1|busy|2|goaway}</c></tag>
<item>
<p>
-The Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and
+Disconnect-Cause to send, <c>REBOOTING</c>, <c>BUSY</c> and
<c>DO_NOT_WANT_TO_TALK_TO_YOU</c> respectively.
Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and
<c>goaway</c> for <c>Reason=transport</c>.</p>
@@ -1113,9 +1157,9 @@ Defaults to <c>rebooting</c> for <c>Reason=service|application</c> and
<tag><c>{timeout, &dict_Unsigned32;}</c></tag>
<item>
<p>
-The number of milliseconds after which the transport process is
+Number of milliseconds after which the transport process is
terminated if DPA has not been received.
-Defaults to 1000.</p>
+Defaults to the value of &dpa_timeout;.</p>
</item>
</taglist>
</item>
@@ -1129,7 +1173,7 @@ Equivalent to <c>{dpr, []}</c>.</p>
<tag><c>close</c></tag>
<item>
<p>
-Causes the transport process to be terminated without
+Terminate the transport process without
Disconnect-Peer-Request being sent to the peer.</p>
</item>
@@ -1152,11 +1196,34 @@ configured them.</p>
Defaults to a single callback returning <c>dpr</c>.</p>
</item>
+<marker id="dpa_timeout"/>
+<tag><c>{dpa_timeout, &dict_Unsigned32;}</c></tag>
+<item>
+<p>
+Number of milliseconds after which a transport connection is
+terminated following an outgoing DPR if DPA is not received.</p>
+
+<p>
+Defaults to 1000.</p>
+</item>
+
+<marker id="dpr_timeout"/>
+<tag><c>{dpr_timeout, &dict_Unsigned32;}</c></tag>
+<item>
+<p>
+Number of milliseconds after which a transport connection is
+terminated following an incoming DPR if the peer does not close the
+connection.</p>
+
+<p>
+Defaults to 5000.</p>
+</item>
+
<marker id="length_errors"/>
<tag><c>{length_errors, exit|handle|discard}</c></tag>
<item>
<p>
-Specifies how to deal with errors in the Message Length field of the
+How to deal with errors in the Message Length field of the
Diameter Header in an incoming message.
An error in this context is that the length is not at least 20 bytes
(the length of a Header), is not a multiple of 4 (a valid length) or
@@ -1188,11 +1255,26 @@ See &man_tcp; for the behaviour of that module.</p>
</note>
</item>
+<tag><c>{pool_size, pos_integer()}</c></tag>
+<item>
+<p>
+Number of transport processes to start.
+For a listening transport, determines the size of the pool of
+accepting transport processes, a larger number being desirable for
+processing multiple concurrent peer connection attempts.
+For a connecting transport, determines the number of connections to
+the peer in question that will be attempted to be establshed:
+the &service_opt;: <c>restrict_connections</c> should also be
+configured on the service in question to allow multiple connections to
+the same peer.</p>
+
+</item>
+
<marker id="spawn_opt"/>
<tag><c>{spawn_opt, [term()]}</c></tag>
<item>
<p>
-An options list passed to &spawn_opt; when spawning a process for an
+Options passed to &spawn_opt; when spawning a process for an
incoming Diameter request.
Options <c>monitor</c> and <c>link</c> are ignored.</p>
@@ -1205,7 +1287,7 @@ Defaults to the list configured on the service if not specified.</p>
<tag><c>{transport_config, term(), &dict_Unsigned32; | infinity}</c></tag>
<item>
<p>
-A term passed as the third argument to the &transport_start; function of
+Term passed as the third argument to the &transport_start; function of
the relevant &transport_module; in order to
start a transport process.
Defaults to the empty list if unspecified.</p>
@@ -1233,7 +1315,7 @@ To listen on both SCTP and TCP, define one transport for each.</p>
<tag><c>{transport_module, atom()}</c></tag>
<item>
<p>
-A module implementing a transport process as defined in &man_transport;.
+Module implementing a transport process as defined in &man_transport;.
Defaults to <c>diameter_tcp</c> if unspecified.</p>
<p>
@@ -1253,7 +1335,7 @@ corresponding timeout (see below) or all fail.</p>
<tag><c>{watchdog_config, [{okay|suspect, non_neg_integer()}]}</c></tag>
<item>
<p>
-Specifies configuration that alters the behaviour of the watchdog
+Configuration that alters the behaviour of the watchdog
state machine.
On key <c>okay</c>, the non-negative number of answered DWR
messages before transitioning from REOPEN to OKAY.
@@ -1308,7 +1390,7 @@ in predicate functions passed to &remove_transport;.</p>
<tag><c>transport_ref() = reference()</c></tag>
<item>
<p>
-An reference returned by &add_transport; that
+Reference returned by &add_transport; that
identifies the configuration.</p>
</item>
@@ -1737,6 +1819,15 @@ connection might look as follows.</p>
The information presented here is as in the <c>connect</c> case except
that the client connections are grouped under an <c>accept</c> tuple.</p>
+<p>
+Whether or not the &transport_opt; <c>pool_size</c> has been
+configured affects the format
+of the listing in the case of a connecting transport, since a value
+greater than 1 implies multiple transport processes for the same
+<c>&transport_ref;</c>, as in the listening case.
+The format in this case is similar to the listening case, with a
+<c>pool</c> tuple in place of an <c>accept</c> tuple.</p>
+
</item>
<tag><c>connections</c></tag>
diff --git a/lib/diameter/doc/src/diameter_dict.xml b/lib/diameter/doc/src/diameter_dict.xml
index 810a146b88..5cf1b174a0 100644
--- a/lib/diameter/doc/src/diameter_dict.xml
+++ b/lib/diameter/doc/src/diameter_dict.xml
@@ -529,6 +529,11 @@ answer record and passed to a &app_handle_request;
callback upon reception of an incoming request.</p>
<p>
+In cases in which there is a choice between string() and binary() types
+for OctetString() and derived types, the representation is determined
+by the value of &mod_string_decode;.</p>
+
+<p>
<em>Basic AVP Data Formats</em></p>
<marker id="OctetString"/>
@@ -541,7 +546,7 @@ callback upon reception of an incoming request.</p>
<marker id="Grouped"/>
<pre>
-OctetString() = [0..255]
+OctetString() = string() | binary()
Integer32() = -2147483647..2147483647
Integer64() = -9223372036854775807..9223372036854775807
Unsigned32() = 0..4294967295
@@ -603,7 +608,7 @@ and <c>{{2104,2,26},{9,42,23}}</c> (both inclusive) can be encoded.</p>
<marker id="UTF8String"/>
<pre>
-UTF8String() = [integer()]
+UTF8String() = [integer()] | binary()
</pre>
<p>
diff --git a/lib/diameter/doc/src/notes.xml b/lib/diameter/doc/src/notes.xml
index e6ac332c10..c5df63a7f0 100644
--- a/lib/diameter/doc/src/notes.xml
+++ b/lib/diameter/doc/src/notes.xml
@@ -42,6 +42,260 @@ first.</p>
<!-- ===================================================================== -->
+<section><title>diameter 1.9.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix broken relay counters.</p>
+ <p>
+ OTP-12654 in OTP 17.5.3 broke counters in the case of
+ answer messages received in the relay application.
+ Counters were accumulated as unknown messages or
+ no_result_code instead of as relayed messages on the
+ intended Result-Code and 'Experimental-Result' tuples.</p>
+ <p>
+ Own Id: OTP-12741</p>
+ </item>
+ <item>
+ <p>
+ Fix diameter_sctp listener race.</p>
+ <p>
+ An oversight in OTP-12428 made it possible to start a
+ transport process that could not establish associations.</p>
+ <p>
+ Own Id: OTP-12744</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>diameter 1.9.1</title>
+
+ <section><title>Known Bugs and Problems</title>
+ <list>
+ <item>
+ <p>
+ Don't leave extra bit in decoded AVP data.</p>
+ <p>
+ OTP-12074 in OTP 17.3 missed one case: a length error on
+ a trailing AVP unknown to the dictionary in question.</p>
+ <p>
+ Own Id: OTP-12642</p>
+ </item>
+ <item>
+ <p>
+ Don't confuse Result-Code and Experimental-Result</p>
+ <p>
+ The errors field of a decoded diameter_packet record was
+ populated with a Result-Code AVP when an
+ Experimental-Result containing a 3xxx Result-Code was
+ received in an answer not setting the E-bit. The correct
+ AVP is now extracted from the incoming message.</p>
+ <p>
+ Own Id: OTP-12654</p>
+ </item>
+ <item>
+ <p>
+ Don't count on unknown Application Id.</p>
+ <p>
+ OTP-11721 in OTP 17.1 missed the case of an Application
+ Id not agreeing with that of the dictionary in question,
+ causing counters to be accumulated on keys containing the
+ unknown id.</p>
+ <p>
+ Own Id: OTP-12701</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>diameter 1.9</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Don't discard outgoing answers unnecessarily.</p>
+ <p>
+ Answers missing a Result-Code AVP or setting an E-bit
+ inappropriately were discarded even if encode was
+ successful.</p>
+ <p>
+ Own Id: OTP-11492</p>
+ </item>
+ <item>
+ <p>
+ Increase supervision timeouts.</p>
+ <p>
+ At diameter application shutdown, DPR could be omitted on
+ open peer connections because of short supervision
+ timeouts.</p>
+ <p>
+ Own Id: OTP-12412</p>
+ </item>
+ <item>
+ <p>
+ Fix retransmission of messages sent as header/avps list.</p>
+ <p>
+ Extracting End-to-End and Hop-by-Hop Identifiers resulted
+ in a function clause error, resulting in a handle_error
+ callback.</p>
+ <p>
+ Own Id: OTP-12415</p>
+ </item>
+ <item>
+ <p>
+ Fix diameter_avp decode of Grouped AVPs having decode
+ errors.</p>
+ <p>
+ Components of such an AVP were not extracted, causing it
+ to be represented by a single diameter_avp record instead
+ of the intended list.</p>
+ <p>
+ Dictionary files must be recompiled for the fix to have
+ effect.</p>
+ <p>
+ Own Id: OTP-12475</p>
+ </item>
+ <item>
+ <p>
+ Fix ordering of AVPs in relayed messages.</p>
+ <p>
+ The order was reversed relative to the received order,
+ with a Route-Record AVP prepended.</p>
+ <p>
+ Thanks to Andrzej Trawiński.</p>
+ <p>
+ Own Id: OTP-12551</p>
+ </item>
+ <item>
+ <p>
+ Fix issues with DiameterURI encode/decode.</p>
+ <p>
+ RFC 6773 changed the default port and transport, but the
+ RFC 3588 defaults were used even if the RFC 6733 common
+ dictionary was in use. The RFC 3588 defaults are now only
+ used when the common dictionary is
+ diameter_gen_base_rfc3588.</p>
+ <p>
+ Both RFC 3588 and 6733 disallow
+ transport=udp;protocol=diameter. Encode of the
+ combination now fails.</p>
+ <p>
+ Decode of ports numbers outside the range 0-65535 and
+ fully qualified domain names longer than 255 octets now
+ fails.</p>
+ <p>
+ Note that RFC 3588 is obsolete, and that there is a
+ diameter_gen_base_rfc6733. The change in defaults is a
+ potential interoperability problem when moving to RFC
+ 6733 with peers that do not send all URI components. The
+ fact that 6733 allows 5xxx result codes in answer
+ messages setting the E-bit, which RFC 3588 doesn't, is
+ another.</p>
+ <p>
+ Own Id: OTP-12589</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Add service_opt() string_decode.</p>
+ <p>
+ To disable the decode of potentially large binaries to
+ string. This prevents large strings from being copied
+ when incoming Diameter messages are passed between
+ processes, a vulnerability that can lead to memory being
+ exhausted given sufficiently malicious peers.</p>
+ <p>
+ The value is a boolean(), true being the default for
+ backwards compatibility. Setting false causes both
+ diameter_caps records and decoded messages to contain
+ binary() in relevant places that previously had string():
+ diameter_app(3) callbacks need to be prepared for the
+ change.</p>
+ <p>
+ The Diameter types affected are OctetString and the
+ derived types UTF8String, DiameterIdentity, DiameterURI,
+ IPFilterRule, and QoSFilterRule. Time and Address are
+ unaffected.</p>
+ <p>
+ Own Id: OTP-11952</p>
+ </item>
+ <item>
+ <p>
+ Add transport_opt() pool_size.</p>
+ <p>
+ To allow for pools of accepting transport processes,
+ which can better service multiple simultaneous peer
+ connections. The option can also be used with connecting
+ transports, to establish multiple connections to the same
+ peer without having to configure multiple transports.</p>
+ <p>
+ Own Id: OTP-12428</p>
+ </item>
+ <item>
+ <p>
+ Allow DPR to be sent with diameter:call/4.</p>
+ <p>
+ It has been possible to send, but the answer was regarded
+ as unsolicited and discarded. DPA now causes the
+ transport process in question to be terminated, as for
+ DPR that diameter itself sends.</p>
+ <p>
+ Own Id: OTP-12542</p>
+ </item>
+ <item>
+ <p>
+ Discard requests after DPR.</p>
+ <p>
+ RFC 6733 is imprecise, but the tone is that messages
+ received after DPR are an exception to be dealt with only
+ because of the possibility of unordered delivery over
+ SCTP. As a consequence, and because a request following
+ DPR is unlikely to be answered due to the impending loss
+ of the peer connection, discard outgoing requests
+ following an outgoing or incoming DPR. Incoming requests
+ are also discarded, with the exception of DPR itself.
+ Answers are sent and received as usual.</p>
+ <p>
+ Own Id: OTP-12543</p>
+ </item>
+ <item>
+ <p>
+ Add transport_opt() dpr_timeout.</p>
+ <p>
+ To cause a peer connection to be closed following an
+ outgoing DPA when the peer fails to do so. It is the
+ recipient of DPA that should close the connection
+ according to RFC 6733.</p>
+ <p>
+ Own Id: OTP-12609</p>
+ </item>
+ <item>
+ <p>
+ Add service_opt() incoming_maxlen.</p>
+ <p>
+ To bound the expected size of incoming Diameter messages.
+ Messages larger than the specified number of bytes are
+ discarded, to prevent a malicious peer from generating
+ excessive load.</p>
+ <p>
+ Own Id: OTP-12628</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>diameter 1.8</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/diameter/doc/src/seealso.ent b/lib/diameter/doc/src/seealso.ent
index 44541afb9b..4e205ffad7 100644
--- a/lib/diameter/doc/src/seealso.ent
+++ b/lib/diameter/doc/src/seealso.ent
@@ -4,7 +4,7 @@
%CopyrightBegin%
-Copyright Ericsson AB 2012-2014. All Rights Reserved.
+Copyright Ericsson AB 2012-2015. All Rights Reserved.
The contents of this file are subject to the Erlang Public License,
Version 1.1, (the "License"); you may not use this file except in
@@ -64,11 +64,14 @@ significant.
<!ENTITY capabilities_cb '<seealso marker="#capabilities_cb">capabilities_cb</seealso>'>
<!ENTITY capx_timeout '<seealso marker="#capx_timeout">capx_timeout</seealso>'>
<!ENTITY disconnect_cb '<seealso marker="#disconnect_cb">disconnect_cb</seealso>'>
+<!ENTITY dpa_timeout '<seealso marker="#dpa_timeout">dpa_timeout</seealso>'>
<!ENTITY transport_config '<seealso marker="#transport_config">transport_config</seealso>'>
<!ENTITY transport_module '<seealso marker="#transport_module">transport_module</seealso>'>
<!ENTITY connect_timer '<seealso marker="#connect_timer">connect_timer</seealso>'>
<!ENTITY watchdog_timer '<seealso marker="#watchdog_timer">watchdog_timer</seealso>'>
+<!ENTITY mod_string_decode '<seealso marker="diameter#service_opt">diameter:service_opt()</seealso> <seealso marker="diameter#string_decode">string_decode</seealso>'>
+
<!-- diameter_app -->
<!ENTITY app_handle_answer '<seealso marker="diameter_app#Mod:handle_answer-4">handle_answer/4</seealso>'>
@@ -102,6 +105,9 @@ significant.
<!ENTITY dict_Address '<seealso marker="diameter_dict#DATA_TYPES">Address()</seealso>'>
<!ENTITY dict_DiameterIdentity '<seealso marker="diameter_dict#DATA_TYPES">DiameterIdentity()</seealso>'>
+<!ENTITY dict_DiameterURI '<seealso marker="diameter_dict#DATA_TYPES">DiameterURI()</seealso>'>
+<!ENTITY dict_IPFilterRule '<seealso marker="diameter_dict#DATA_TYPES">IPFilterRule()</seealso>'>
+<!ENTITY dict_QoSFilterRule '<seealso marker="diameter_dict#DATA_TYPES">QoSFilterRule()</seealso>'>
<!ENTITY dict_Grouped '<seealso marker="diameter_dict#DATA_TYPES">Grouped()</seealso>'>
<!ENTITY dict_OctetString '<seealso marker="diameter_dict#DATA_TYPES">OctetString()</seealso>'>
<!ENTITY dict_Time '<seealso marker="diameter_dict#DATA_TYPES">Time()</seealso>'>
diff --git a/lib/diameter/examples/code/GNUmakefile b/lib/diameter/examples/code/GNUmakefile
index 98e36a99e3..81f1da5a39 100644
--- a/lib/diameter/examples/code/GNUmakefile
+++ b/lib/diameter/examples/code/GNUmakefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2012. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -20,7 +20,7 @@
EXAMPLES = client server relay # redirect proxy
CALLBACKS = $(EXAMPLES:%=%_cb)
-MODULES = peer $(EXAMPLES) $(EXAMPLES:%=%_cb)
+MODULES = node $(EXAMPLES) $(EXAMPLES:%=%_cb)
BEAM = $(MODULES:%=%.beam)
diff --git a/lib/diameter/examples/code/client.erl b/lib/diameter/examples/code/client.erl
index 46eb4a55db..844c9cdbdd 100644
--- a/lib/diameter/examples/code/client.erl
+++ b/lib/diameter/examples/code/client.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,9 +38,10 @@
-module(client).
-include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
+-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl").
-export([start/1, %% start a service
+ start/2, %%
connect/2, %% add a connecting transport
call/1, %% send using the record encoding
cast/1, %% send using the list encoding and detached
@@ -50,17 +51,14 @@
%% both the record and list encoding here, one detached and one not,
%% is just for demonstration purposes.
-%% Convenience functions using the default service name, ?SVC_NAME.
+%% Convenience functions using the default service name.
-export([start/0,
connect/1,
stop/0,
call/0,
cast/0]).
--define(SVC_NAME, ?MODULE).
--define(APP_ALIAS, ?MODULE).
--define(CALLBACK_MOD, client_cb).
-
+-define(DEF_SVC_NAME, ?MODULE).
-define(L, atom_to_list).
%% The service configuration. As in the server example, a client
@@ -70,27 +68,40 @@
{'Origin-Realm', "example.com"},
{'Vendor-Id', 0},
{'Product-Name', "Client"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
- {application, [{alias, ?APP_ALIAS},
- {dictionary, ?DIAMETER_DICT_COMMON},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [0]},
+ {string_decode, false},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, client_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ start(Name, []);
+
+start(Opts)
+ when is_list(Opts) ->
+ start(?DEF_SVC_NAME, Opts).
+
+%% start/0
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
+
+%% start/2
+
+start(Name, Opts) ->
+ node:start(Name, Opts ++ [T || {K,_} = T <- ?SERVICE(Name),
+ false == lists:keymember(K, 1, Opts)]).
%% connect/2
connect(Name, T) ->
- peer:connect(Name, T).
+ node:connect(Name, T).
connect(T) ->
- connect(?SVC_NAME, T).
+ connect(?DEF_SVC_NAME, T).
%% call/1
@@ -99,10 +110,10 @@ call(Name) ->
RAR = #diameter_base_RAR{'Session-Id' = SId,
'Auth-Application-Id' = 0,
'Re-Auth-Request-Type' = 0},
- diameter:call(Name, ?APP_ALIAS, RAR, []).
+ diameter:call(Name, common, RAR, []).
call() ->
- call(?SVC_NAME).
+ call(?DEF_SVC_NAME).
%% cast/1
@@ -111,15 +122,15 @@ cast(Name) ->
RAR = ['RAR', {'Session-Id', SId},
{'Auth-Application-Id', 0},
{'Re-Auth-Request-Type', 1}],
- diameter:call(Name, ?APP_ALIAS, RAR, [detach]).
+ diameter:call(Name, common, RAR, [detach]).
cast() ->
- cast(?SVC_NAME).
+ cast(?DEF_SVC_NAME).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/node.erl b/lib/diameter/examples/code/node.erl
new file mode 100644
index 0000000000..4fe9007059
--- /dev/null
+++ b/lib/diameter/examples/code/node.erl
@@ -0,0 +1,174 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%% A library module used by the example Diameter nodes. Does little
+%% more than provide an alternate/simplified transport configuration.
+%%
+
+-module(node).
+
+-export([start/2,
+ listen/2,
+ connect/2,
+ stop/1]).
+
+-type protocol()
+ :: tcp | sctp.
+
+-type ip_address()
+ :: default
+ | inet:ip_address().
+
+-type server_transport()
+ :: protocol()
+ | {protocol(), ip_address(), non_neg_integer()}.
+
+-type server_opts()
+ :: server_transport()
+ | {server_transport(), [diameter:transport_opt()]}
+ | [diameter:transport_opt()].
+
+-type client_transport()
+ :: protocol() | any
+ | {protocol() | any, ip_address(), non_neg_integer()}
+ | {protocol() | any, ip_address(), ip_address(), non_neg_integer()}.
+
+-type client_opts()
+ :: client_transport()
+ | {client_transport(), [diameter:transport_opt()]}
+ | [diameter:transport_opt()].
+
+%% The server_transport() and client_transport() config is just
+%% convenience: arbitrary options can be specifed as a
+%% [diameter:transport_opt()].
+
+-define(DEFAULT_PORT, 3868).
+
+%% ---------------------------------------------------------------------------
+%% Interface functions
+%% ---------------------------------------------------------------------------
+
+%% start/2
+
+-spec start(diameter:service_name(), [diameter:service_opt()])
+ -> ok
+ | {error, term()}.
+
+start(Name, Opts)
+ when is_atom(Name), is_list(Opts) ->
+ diameter:start_service(Name, Opts).
+
+%% connect/2
+
+-spec connect(diameter:service_name(), client_opts())
+ -> {ok, diameter:transport_ref()}
+ | {error, term()}.
+
+connect(Name, Opts)
+ when is_list(Opts) ->
+ diameter:add_transport(Name, {connect, Opts});
+
+connect(Name, {T, Opts}) ->
+ connect(Name, Opts ++ client_opts(T));
+
+connect(Name, T) ->
+ connect(Name, [{connect_timer, 5000} | client_opts(T)]).
+
+%% listen/2
+
+-spec listen(diameter:service_name(), server_opts())
+ -> {ok, diameter:transport_ref()}
+ | {error, term()}.
+
+listen(Name, Opts)
+ when is_list(Opts) ->
+ diameter:add_transport(Name, {listen, Opts});
+
+listen(Name, {T, Opts}) ->
+ listen(Name, Opts ++ server_opts(T));
+
+listen(Name, T) ->
+ listen(Name, server_opts(T)).
+
+%% stop/1
+
+-spec stop(diameter:service_name())
+ -> ok
+ | {error, term()}.
+
+stop(Name) ->
+ diameter:stop_service(Name).
+
+%% ---------------------------------------------------------------------------
+%% Internal functions
+%% ---------------------------------------------------------------------------
+
+%% server_opts/1
+%%
+%% Return transport options for a listening transport.
+
+server_opts({T, Addr, Port}) ->
+ [{transport_module, tmod(T)},
+ {transport_config, [{reuseaddr, true},
+ {ip, addr(Addr)},
+ {port, Port}]}];
+
+server_opts(T) ->
+ server_opts({T, loopback, ?DEFAULT_PORT}).
+
+%% client_opts/1
+%%
+%% Return transport options for a connecting transport.
+
+client_opts({T, LA, RA, RP})
+ when T == all; %% backwards compatibility
+ T == any ->
+ [[S, {C,Os}], T] = [client_opts({P, LA, RA, RP}) || P <- [sctp,tcp]],
+ [S, {C,Os,2000} | T];
+
+client_opts({T, LA, RA, RP}) ->
+ [{transport_module, tmod(T)},
+ {transport_config, [{raddr, addr(RA)},
+ {rport, RP},
+ {reuseaddr, true}
+ | ip(LA)]}];
+
+client_opts({T, RA, RP}) ->
+ client_opts({T, default, RA, RP});
+
+client_opts(T) ->
+ client_opts({T, loopback, loopback, ?DEFAULT_PORT}).
+
+%% ---------------------------------------------------------------------------
+
+tmod(tcp) -> diameter_tcp;
+tmod(sctp) -> diameter_sctp.
+
+ip(default) ->
+ [];
+ip(loopback) ->
+ [{ip, {127,0,0,1}}];
+ip(Addr) ->
+ [{ip, Addr}].
+
+addr(loopback) ->
+ {127,0,0,1};
+addr(A) ->
+ A.
diff --git a/lib/diameter/examples/code/peer.erl b/lib/diameter/examples/code/peer.erl
deleted file mode 100644
index 7519abfb2c..0000000000
--- a/lib/diameter/examples/code/peer.erl
+++ /dev/null
@@ -1,150 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% A library module that factors out commonality in the example
-%% Diameter peers.
-%%
-
--module(peer).
-
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
--export([start/2,
- listen/2,
- connect/2,
- stop/1]).
-
--type service_name()
- :: term().
-
--type protocol()
- :: tcp | sctp.
-
--type ip_address()
- :: default
- | inet:ip_address().
-
--type server_config()
- :: protocol()
- | {protocol(), ip_address(), non_neg_integer()}.
-
--type client_config()
- :: protocol()
- | {protocol(), ip_address(), non_neg_integer()}
- | {protocol(), ip_address(), ip_address(), non_neg_integer()}.
-
--define(DEFAULT_PORT, 3868).
-
-%% ---------------------------------------------------------------------------
-%% Interface functions
-%% ---------------------------------------------------------------------------
-
-%% start/2
-
--spec start(service_name(), list())
- -> ok
- | {error, term()}.
-
-start(Name, Opts)
- when is_atom(Name), is_list(Opts) ->
- diameter:start_service(Name, Opts).
-
-%% connect/2
-
--spec connect(service_name(), client_config())
- -> {ok, reference()}
- | {error, term()}.
-
-connect(Name, T) ->
- diameter:add_transport(Name, {connect, [{connect_timer, 5000}
- | client(T)]}).
-
-%% listen/2
-
--spec listen(service_name(), server_config())
- -> {ok, reference()}
- | {error, term()}.
-
-listen(Name, T) ->
- diameter:add_transport(Name, {listen, server(T)}).
-
-%% stop/1
-
--spec stop(service_name())
- -> ok
- | {error, term()}.
-
-stop(Name) ->
- diameter:stop_service(Name).
-
-%% ---------------------------------------------------------------------------
-%% Internal functions
-%% ---------------------------------------------------------------------------
-
-%% server/1
-%%
-%% Return config for a listening transport.
-
-server({T, Addr, Port}) ->
- [{transport_module, tmod(T)},
- {transport_config, [{reuseaddr, true},
- {ip, addr(Addr)},
- {port, Port}]}];
-
-server(T) ->
- server({T, loopback, ?DEFAULT_PORT}).
-
-%% client/1
-%%
-%% Return config for a connecting transport.
-
-client({all, LA, RA, RP}) ->
- [[M,{K,C}], T]
- = [client({P, LA, RA, RP}) || P <- [sctp,tcp]],
- [M, {K,C,2000} | T];
-
-client({T, LA, RA, RP}) ->
- [{transport_module, tmod(T)},
- {transport_config, [{raddr, addr(RA)},
- {rport, RP},
- {reuseaddr, true}
- | ip(LA)]}];
-
-client({T, RA, RP}) ->
- client({T, default, RA, RP});
-
-client(T) ->
- client({T, loopback, loopback, ?DEFAULT_PORT}).
-
-tmod(tcp) -> diameter_tcp;
-tmod(sctp) -> diameter_sctp.
-
-ip(default) ->
- [];
-ip(loopback) ->
- [{ip, {127,0,0,1}}];
-ip(Addr) ->
- [{ip, Addr}].
-
-addr(loopback) ->
- {127,0,0,1};
-addr(A) ->
- A.
diff --git a/lib/diameter/examples/code/relay.erl b/lib/diameter/examples/code/relay.erl
index d3438f83f3..7bc46dc68d 100644
--- a/lib/diameter/examples/code/relay.erl
+++ b/lib/diameter/examples/code/relay.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,10 +31,8 @@
-module(relay).
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
-export([start/1,
+ start/2,
listen/2,
connect/2,
stop/1]).
@@ -44,49 +42,56 @@
connect/1,
stop/0]).
--define(APP_ALIAS, ?MODULE).
--define(SVC_NAME, ?MODULE).
--define(CALLBACK_MOD, relay_cb).
+-define(DEF_SVC_NAME, ?MODULE).
%% The service configuration.
-define(SERVICE(Name), [{'Origin-Host', atom_to_list(Name) ++ ".example.com"},
{'Origin-Realm', "example.com"},
{'Vendor-Id', 193},
{'Product-Name', "RelayAgent"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_RELAY]},
- {application, [{alias, ?MODULE},
- {dictionary, ?DIAMETER_DICT_RELAY},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [16#FFFFFFFF]},
+ {string_decode, false},
+ {application, [{alias, relay},
+ {dictionary, diameter_relay},
+ {module, relay_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ start(Name, []).
+
+%% start/1
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
+
+%% start/2
+
+start(Name, Opts) ->
+ node:start(Name, Opts ++ [T || {K,_} = T <- ?SERVICE(Name),
+ false == lists:keymember(K, 1, Opts)]).
%% listen/2
listen(Name, T) ->
- peer:listen(Name, T).
+ node:listen(Name, T).
listen(T) ->
- listen(?SVC_NAME, T).
+ listen(?DEF_SVC_NAME, T).
%% connect/2
connect(Name, T) ->
- peer:connect(Name, T).
+ node:connect(Name, T).
connect(T) ->
- connect(?SVC_NAME, T).
+ connect(?DEF_SVC_NAME, T).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/server.erl b/lib/diameter/examples/code/server.erl
index 3959461cec..f32cec594c 100644
--- a/lib/diameter/examples/code/server.erl
+++ b/lib/diameter/examples/code/server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,21 +34,17 @@
-module(server).
--include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
-
-export([start/1, %% start a service
+ start/2, %%
listen/2, %% add a listening transport
stop/1]). %% stop a service
-%% Convenience functions using the default service name, ?SVC_NAME.
+%% Convenience functions using the default service name.
-export([start/0,
listen/1,
stop/0]).
--define(SVC_NAME, ?MODULE).
--define(APP_ALIAS, ?MODULE).
--define(CALLBACK_MOD, server_cb).
+-define(DEF_SVC_NAME, ?MODULE).
%% The service configuration. In a server supporting multiple Diameter
%% applications each application may have its own, although they could all
@@ -57,32 +53,46 @@
{'Origin-Realm', "example.com"},
{'Vendor-Id', 193},
{'Product-Name', "Server"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
- {application, [{alias, ?APP_ALIAS},
- {dictionary, ?DIAMETER_DICT_COMMON},
- {module, ?CALLBACK_MOD}]}]).
+ {'Auth-Application-Id', [0]},
+ {restrict_connections, false},
+ {string_decode, false},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, server_cb}]}]).
%% start/1
start(Name)
when is_atom(Name) ->
- peer:start(Name, ?SERVICE(Name)).
+ start(Name, []);
+
+start(Opts)
+ when is_list(Opts) ->
+ start(?DEF_SVC_NAME, Opts).
+
+%% start/0
start() ->
- start(?SVC_NAME).
+ start(?DEF_SVC_NAME).
+
+%% start/2
+
+start(Name, Opts) ->
+ node:start(Name, Opts ++ [T || {K,_} = T <- ?SERVICE(Name),
+ false == lists:keymember(K, 1, Opts)]).
%% listen/2
listen(Name, T) ->
- peer:listen(Name, T).
+ node:listen(Name, T).
listen(T) ->
- listen(?SVC_NAME, T).
+ listen(?DEF_SVC_NAME, T).
%% stop/1
stop(Name) ->
- peer:stop(Name).
+ node:stop(Name).
stop() ->
- stop(?SVC_NAME).
+ stop(?DEF_SVC_NAME).
diff --git a/lib/diameter/examples/code/server_cb.erl b/lib/diameter/examples/code/server_cb.erl
index 9d8d395d06..071e152493 100644
--- a/lib/diameter/examples/code/server_cb.erl
+++ b/lib/diameter/examples/code/server_cb.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,7 +24,7 @@
-module(server_cb).
-include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc3588.hrl").
+-include_lib("diameter/include/diameter_gen_base_rfc6733.hrl").
%% diameter callbacks
-export([peer_up/3,
diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl
index bc25f7d472..e8ffe7f92c 100644
--- a/lib/diameter/include/diameter_gen.hrl
+++ b/lib/diameter/include/diameter_gen.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -25,6 +25,9 @@
-define(THROW(T), throw({?MODULE, T})).
+%% Tag common to generated dictionaries.
+-define(TAG, diameter_gen).
+
%% Key to a value in the process dictionary that determines whether or
%% not an unrecognized AVP setting the M-bit should be regarded as an
%% error or not. See is_strict/0.
@@ -48,13 +51,20 @@
%% dictionary.
putr(K,V) ->
- put({?MODULE, K}, V).
+ put({?TAG, K}, V).
getr(K) ->
- get({?MODULE, K}).
+ case get({?TAG, K}) of
+ undefined ->
+ V = erase({?MODULE, K}), %% written in old code
+ V == undefined orelse putr(K,V),
+ V;
+ V ->
+ V
+ end.
eraser(K) ->
- erase({?MODULE, K}).
+ erase({?TAG, K}).
%% ---------------------------------------------------------------------------
%% # encode_avps/2
@@ -313,12 +323,20 @@ d(Name, Avp, Acc) ->
%% decode is packed into 'AVP'.
Mod = dict(Failed), %% Dictionary to decode in.
+ %% On decode, a Grouped AVP is represented as a #diameter_avp{}
+ %% list with AVP as head and component AVPs as tail. On encode,
+ %% data can be a list of component AVPs.
+
try Mod:avp(decode, Data, AvpName) of
V ->
{Avps, T} = Acc,
{H, A} = ungroup(V, Avp),
{[H | Avps], pack_avp(Name, A, T)}
catch
+ throw: {?TAG, {grouped, RC, ComponentAvps}} ->
+ {Avps, {Rec, Errors}} = Acc,
+ A = trim(Avp),
+ {[[A | trim(ComponentAvps)] | Avps], {Rec, [{RC, A} | Errors]}};
error: Reason ->
d(undefined == Failed orelse is_failed(),
Reason,
@@ -338,6 +356,10 @@ d(Name, Avp, Acc) ->
trim(#diameter_avp{data = <<0:1, Bin/binary>>} = Avp) ->
Avp#diameter_avp{data = Bin};
+trim(Avps)
+ when is_list(Avps) ->
+ lists:map(fun trim/1, Avps);
+
trim(Avp) ->
Avp.
@@ -373,7 +395,7 @@ d(false, Reason, Name, Avp, {Avps, Acc}) ->
diameter_lib:log(decode_error,
?MODULE,
?LINE,
- {Reason, Name, Avp#diameter_avp.name, Stack}),
+ {Name, Avp#diameter_avp.name, Stack}),
{Rec, Failed} = Acc,
{[Avp|Avps], {Rec, [rc(Reason, Avp) | Failed]}}.
@@ -423,7 +445,7 @@ reset(_, _) ->
%% undecoded. Note that the type field is 'undefined' in this case.
decode_AVP(Name, Avp, {Avps, Acc}) ->
- {[Avp | Avps], pack_AVP(Name, Avp, Acc)}.
+ {[trim(Avp) | Avps], pack_AVP(Name, Avp, Acc)}.
%% rc/1
@@ -582,22 +604,37 @@ value(_, Avp) ->
%% # grouped_avp/3
%% ---------------------------------------------------------------------------
--spec grouped_avp(decode, avp_name(), binary())
+-spec grouped_avp(decode, avp_name(), bitstring())
-> {avp_record(), [avp()]};
(encode, avp_name(), avp_record() | avp_values())
-> binary()
| no_return().
+%% Length error induced by diameter_codec:collect_avps/1.
+grouped_avp(decode, _Name, <<0:1, _/binary>>) ->
+ throw({?TAG, {grouped, 5014, []}});
+
grouped_avp(decode, Name, Data) ->
- {Rec, Avps, []} = decode_avps(Name, diameter_codec:collect_avps(Data)),
- {Rec, Avps};
-%% A failed match here will result in 5004. Note that this is the only
-%% AVP type that doesn't just return the decoded record, also
-%% returning the list of component AVP's.
+ grouped_decode(Name, diameter_codec:collect_avps(Data));
grouped_avp(encode, Name, Data) ->
encode_avps(Name, Data).
+%% grouped_decode/2
+%%
+%% Note that Grouped is the only AVP type that doesn't just return a
+%% decoded value, also returning the list of component diameter_avp
+%% records.
+
+grouped_decode(_Name, {Error, Acc}) ->
+ {RC, Avp} = Error,
+ throw({?TAG, {grouped, RC, [Avp | Acc]}});
+
+grouped_decode(Name, ComponentAvps) ->
+ {Rec, Avps, Es} = decode_avps(Name, ComponentAvps),
+ [] == Es orelse throw({?TAG, {grouped, 5004, Avps}}), %% decode failure
+ {Rec, Avps}.
+
%% ---------------------------------------------------------------------------
%% # empty_group/1
%% ---------------------------------------------------------------------------
diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl
index d74e091e11..010f977b97 100644
--- a/lib/diameter/src/base/diameter.erl
+++ b/lib/diameter/src/base/diameter.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -45,6 +45,7 @@
-export_type([evaluable/0,
restriction/0,
+ message_length/0,
remotes/0,
sequence/0,
app_alias/0,
@@ -298,6 +299,9 @@ call(SvcName, App, Message) ->
| [node()]
| evaluable().
+-type message_length()
+ :: 0..16#FFFFFF.
+
%% Options passed to start_service/2
-type service_opt()
@@ -306,6 +310,8 @@ call(SvcName, App, Message) ->
| {restrict_connections, restriction()}
| {sequence, sequence() | evaluable()}
| {share_peers, remotes()}
+ | {string_decode, boolean()}
+ | {incoming_maxlen, message_length()}
| {use_shared_peers, remotes()}
| {spawn_opt, list()}.
@@ -337,11 +343,14 @@ call(SvcName, App, Message) ->
:: {transport_module, atom()}
| {transport_config, any()}
| {transport_config, any(), 'Unsigned32'() | infinity}
+ | {pool_size, pos_integer()}
| {applications, [app_alias()]}
| {capabilities, [capability()]}
| {capabilities_cb, evaluable()}
| {capx_timeout, 'Unsigned32'()}
| {disconnect_cb, evaluable()}
+ | {dpr_timeout, 'Unsigned32'()}
+ | {dpa_timeout, 'Unsigned32'()}
| {length_errors, exit | handle | discard}
| {connect_timer, 'Unsigned32'()}
| {watchdog_timer, 'Unsigned32'() | {module(), atom(), list()}}
diff --git a/lib/diameter/src/base/diameter_capx.erl b/lib/diameter/src/base/diameter_capx.erl
index 93548ecafd..7dc61f229f 100644
--- a/lib/diameter/src/base/diameter_capx.erl
+++ b/lib/diameter/src/base/diameter_capx.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -50,7 +50,8 @@
-export([build_CER/2,
recv_CER/3,
recv_CEA/3,
- make_caps/2]).
+ make_caps/2,
+ binary_caps/1]).
-include_lib("diameter/include/diameter.hrl").
-include("diameter_internal.hrl").
@@ -115,7 +116,8 @@ mk_caps(Caps0, Opts) ->
-define(SC(K,F),
set_cap({K, Val}, {Caps, #diameter_caps{F = false} = C}) ->
- {Caps#diameter_caps{F = cap(K, Val)}, C#diameter_caps{F = true}}).
+ {Caps#diameter_caps{F = cap(K, copy(Val))},
+ C#diameter_caps{F = true}}).
?SC('Origin-Host', origin_host);
?SC('Origin-Realm', origin_realm);
@@ -375,10 +377,10 @@ capx_to_caps(CEX, Dict) ->
'Firmware-Revision',
'AVP'],
CEX),
- #diameter_caps{origin_host = OH,
- origin_realm = OR,
+ #diameter_caps{origin_host = copy(OH),
+ origin_realm = copy(OR),
vendor_id = VId,
- product_name = PN,
+ product_name = copy(PN),
origin_state_id = OSI,
host_ip_address = IP,
supported_vendor_id = SV,
@@ -389,6 +391,32 @@ capx_to_caps(CEX, Dict) ->
firmware_revision = FR,
avp = X}.
+%% Copy binaries to avoid retaining a reference to a large binary
+%% containing AVPs we aren't interested in.
+copy(B)
+ when is_binary(B) ->
+ binary:copy(B);
+
+copy(T) ->
+ T.
+
+%% binary_caps/1
+%%
+%% Encode stringish capabilities with {string_decode, false}.
+
+binary_caps(Caps) ->
+ lists:foldl(fun bcaps/2, Caps, [#diameter_caps.origin_host,
+ #diameter_caps.origin_realm,
+ #diameter_caps.product_name]).
+
+bcaps(N, Caps) ->
+ case element(N, Caps) of
+ undefined ->
+ Caps;
+ V ->
+ setelement(N, Caps, iolist_to_binary(V))
+ end.
+
%% ---------------------------------------------------------------------------
%% ---------------------------------------------------------------------------
diff --git a/lib/diameter/src/base/diameter_codec.erl b/lib/diameter/src/base/diameter_codec.erl
index a2b04bfd63..bf2fe8e7ca 100644
--- a/lib/diameter/src/base/diameter_codec.erl
+++ b/lib/diameter/src/base/diameter_codec.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,6 +22,8 @@
-export([encode/2,
decode/2,
decode/3,
+ setopts/1,
+ getopt/1,
collect_avps/1,
decode_header/1,
sequence_numbers/1,
@@ -59,6 +61,50 @@
%% +-+-+-+-+-+-+-+-+-+-+-+-+-
%%% ---------------------------------------------------------------------------
+%%% # setopts/1
+%%% # getopt/1
+%%% ---------------------------------------------------------------------------
+
+%% These functions are a compromise in the same vein as the use of the
+%% process dictionary in diameter_gen.hrl in generated codec modules.
+%% Instead of rewriting the entire dictionary generation to pass
+%% encode/decode options around, the calling process sets them by
+%% calling setopts/1. At current, the only option is whether or not to
+%% decode binaries as strings, which is used by diameter_types.
+
+setopts(Opts)
+ when is_list(Opts) ->
+ lists:foreach(fun setopt/1, Opts).
+
+%% Decode stringish types to string()? The default true is for
+%% backwards compatibility.
+setopt({string_decode = K, false = B}) ->
+ setopt(K, B);
+
+%% Regard anything but the generated RFC 3588 dictionary as modern.
+%% This affects the interpretation of defaults during the decode
+%% of values of type DiameterURI, this having changed from RFC 3588.
+%% (So much for backwards compatibility.)
+setopt({common_dictionary, diameter_gen_base_rfc3588}) ->
+ setopt(rfc, 3588);
+
+setopt(_) ->
+ ok.
+
+setopt(Key, Value) ->
+ put({diameter, Key}, Value).
+
+getopt(Key) ->
+ case get({diameter, Key}) of
+ undefined when Key == string_decode ->
+ true;
+ undefined when Key == rfc ->
+ 6733;
+ V ->
+ V
+ end.
+
+%%% ---------------------------------------------------------------------------
%%% # encode/2
%%% ---------------------------------------------------------------------------
@@ -90,7 +136,7 @@ encode(Mod, Msg) ->
msg = Msg}).
e(_, #diameter_packet{msg = [#diameter_header{} = Hdr | As]} = Pkt) ->
- try encode_avps(As) of
+ try encode_avps(reorder(As)) of
Avps ->
Length = size(Avps) + 20,
@@ -183,26 +229,50 @@ values(Avps) ->
%% Message as a list of #diameter_avp{} ...
encode_avps(_, _, [#diameter_avp{} | _] = Avps) ->
- encode_avps(reorder(Avps, [], Avps));
+ encode_avps(reorder(Avps));
%% ... or as a tuple list or record.
encode_avps(Mod, MsgName, Values) ->
Mod:encode_avps(MsgName, Values).
%% reorder/1
+%%
+%% Reorder AVPs for the relay case using the index field of
+%% diameter_avp records. Decode populates this field in collect_avps
+%% and presents AVPs in reverse order. A relay then sends the reversed
+%% list with a Route-Record AVP prepended. The goal here is just to do
+%% lists:reverse/1 in Grouped AVPs and the outer list, but only in the
+%% case there are indexed AVPs at all, so as not to reverse lists that
+%% have been explicilty sent (unindexed, in the desired order) as a
+%% diameter_avp list. The effect is the same as lists:keysort/2, but
+%% only on the cases we expect, not a general sort.
+
+reorder(Avps) ->
+ case reorder(Avps, []) of
+ false ->
+ Avps;
+ Sorted ->
+ Sorted
+ end.
+
+%% reorder/3
-reorder([#diameter_avp{index = 0} | _] = Avps, Acc, _) ->
+%% In case someone has reversed the list already. (Not likely.)
+reorder([#diameter_avp{index = 0} | _] = Avps, Acc) ->
Avps ++ Acc;
-reorder([#diameter_avp{index = N} = A | Avps], Acc, _)
+%% Assume indexed AVPs are in reverse order.
+reorder([#diameter_avp{index = N} = A | Avps], Acc)
when is_integer(N) ->
lists:reverse(Avps, [A | Acc]);
-reorder([H | T], Acc, Avps) ->
- reorder(T, [H | Acc], Avps);
+%% An unindexed AVP.
+reorder([H | T], Acc) ->
+ reorder(T, [H | Acc]);
-reorder([], Acc, _) ->
- Acc.
+%% No indexed members.
+reorder([], _) ->
+ false.
%% encode_avps/1
@@ -390,6 +460,9 @@ sequence_numbers(#diameter_packet{bin = Bin})
sequence_numbers(#diameter_packet{header = #diameter_header{} = H}) ->
sequence_numbers(H);
+sequence_numbers(#diameter_packet{msg = [#diameter_header{} = H | _]}) ->
+ sequence_numbers(H);
+
sequence_numbers(#diameter_header{hop_by_hop_id = H,
end_to_end_id = E}) ->
{H,E};
@@ -561,14 +634,18 @@ split_data(Bin, Len) ->
<<Data:Len/binary, _:Pad/binary, Rest/binary>> ->
{Data, Rest};
_ ->
- %% Header length points past the end of the message. As
- %% stated in the 6733 text above, it's sufficient to
- %% return a zero-filled minimal payload if this is a
- %% request. Do this (in cases that we know the type) by
- %% inducing a decode failure and letting the dictionary's
- %% decode (in diameter_gen) deal with it. Here we don't
- %% know type. If the type isn't known, then the decode
- %% just strips the extra bit.
+ %% Header length points past the end of the message, or
+ %% doesn't span the header. As stated in the 6733 text
+ %% above, it's sufficient to return a zero-filled minimal
+ %% payload if this is a request. Do this (in cases that we
+ %% know the type) by inducing a decode failure and letting
+ %% the dictionary's decode (in diameter_gen) deal with it.
+ %%
+ %% Note that the extra bit can only occur in the trailing
+ %% AVP of a message or Grouped AVP, since a faulty AVP
+ %% Length is otherwise indistinguishable from a correct
+ %% one here, since we don't know the types of the AVPs
+ %% being extracted.
{<<0:1, Bin/binary>>, <<>>}
end.
@@ -582,6 +659,8 @@ split_data(Bin, Len) ->
%% dictionary doesn't know about specific AVP's.
%% Grouped AVP whose components need packing ...
+pack_avp([#diameter_avp{} = A | Avps]) ->
+ pack_avp(A#diameter_avp{data = Avps});
pack_avp(#diameter_avp{data = [#diameter_avp{} | _] = Avps} = A) ->
pack_avp(A#diameter_avp{data = encode_avps(Avps)});
@@ -615,8 +694,8 @@ pack_avp(#diameter_avp{code = undefined, data = B})
Len = size(<<H:5/binary, _:24, T/binary>> = <<B/binary, 0:Pad>>),
<<H/binary, Len:24, T/binary>>;
-%% ... from a dictionary compiled against old code in diameter_gen ...
%% ... when ignoring errors in Failed-AVP ...
+%% ... during a relay encode ...
pack_avp(#diameter_avp{data = <<0:1, B/binary>>} = A) ->
pack_avp(A#diameter_avp{data = B});
diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl
index dd1c9b73bb..8ac3b9d6ca 100644
--- a/lib/diameter/src/base/diameter_config.erl
+++ b/lib/diameter/src/base/diameter_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,10 +35,11 @@
%%
-module(diameter_config).
--compile({no_auto_import, [monitor/2]}).
-
-behaviour(gen_server).
+-compile({no_auto_import, [monitor/2, now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([start_service/2,
stop_service/1,
add_transport/2,
@@ -158,7 +159,8 @@ stop_service(SvcName) ->
%% # add_transport/2
%% --------------------------------------------------------------------------
--spec add_transport(diameter:service_name(), {connect|listen, [diameter:transport_opt()]})
+-spec add_transport(diameter:service_name(),
+ {connect|listen, [diameter:transport_opt()]})
-> {ok, diameter:transport_ref()}
| {error, term()}.
@@ -531,7 +533,10 @@ opt({applications, As}) ->
opt({capabilities, Os}) ->
is_list(Os) andalso ok == encode_CER(Os);
-opt({capx_timeout, Tmo}) ->
+opt({K, Tmo})
+ when K == capx_timeout;
+ K == dpr_timeout;
+ K == dpa_timeout ->
?IS_UINT32(Tmo);
opt({length_errors, T}) ->
@@ -554,6 +559,9 @@ opt({watchdog_config, L}) ->
opt({spawn_opt, Opts}) ->
is_list(Opts);
+opt({pool_size, N}) ->
+ is_integer(N) andalso 0 < N;
+
%% Options that we can't validate.
opt({K, _})
when K == transport_config;
@@ -638,13 +646,24 @@ make_config(SvcName, Opts) ->
{false, monitor},
{?NOMASK, sequence},
{nodes, restrict_connections},
+ {16#FFFFFF, incoming_maxlen},
+ {true, string_decode},
{[], spawn_opt}]),
+ D = proplists:get_value(string_decode, SvcOpts, true),
+
#service{name = SvcName,
rec = #diameter_service{applications = Apps,
- capabilities = Caps},
+ capabilities = binary_caps(Caps, D)},
options = SvcOpts}.
+binary_caps(Caps, true) ->
+ Caps;
+binary_caps(Caps, false) ->
+ diameter_capx:binary_caps(Caps).
+
+%% make_opts/2
+
make_opts(Opts, Defs) ->
Known = [{K, get_opt(K, Opts, D)} || {D,K} <- Defs],
Unknown = Opts -- Known,
@@ -653,17 +672,26 @@ make_opts(Opts, Defs) ->
[{K, opt(K,V)} || {K,V} <- Known].
+opt(incoming_maxlen, N)
+ when 0 =< N, N < 1 bsl 24 ->
+ N;
+
opt(spawn_opt, L)
when is_list(L) ->
L;
opt(K, false = B)
- when K /= sequence ->
+ when K == share_peers;
+ K == use_shared_peers;
+ K == monitor;
+ K == restrict_connections;
+ K == string_decode ->
B;
opt(K, true = B)
when K == share_peers;
- K == use_shared_peers ->
+ K == use_shared_peers;
+ K == string_decode ->
B;
opt(restrict_connections, T)
diff --git a/lib/diameter/src/base/diameter_lib.erl b/lib/diameter/src/base/diameter_lib.erl
index 5b3a2063f8..51d203d722 100644
--- a/lib/diameter/src/base/diameter_lib.erl
+++ b/lib/diameter/src/base/diameter_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,12 +18,19 @@
%%
-module(diameter_lib).
+-compile({no_auto_import, [now/0]}).
+-compile({nowarn_deprecated_function, [{erlang, now, 0}]}).
-export([info_report/2,
error_report/2,
warning_report/2,
+ now/0,
+ timestamp/1,
now_diff/1,
+ micro_diff/1,
+ micro_diff/2,
time/1,
+ seed/0,
eval/1,
eval_name/1,
get_stacktrace/0,
@@ -31,6 +38,8 @@
spawn_opts/2,
wait/1,
fold_tuple/3,
+ fold_n/3,
+ for_n/2,
log/4]).
%% ---------------------------------------------------------------------------
@@ -90,13 +99,51 @@ fmt(T) ->
end.
%% ---------------------------------------------------------------------------
+%% # now/0
+%% ---------------------------------------------------------------------------
+
+-type timestamp() :: {non_neg_integer(), 0..999999, 0..999999}.
+-type now() :: integer() %% monotonic time
+ | timestamp().
+
+-spec now()
+ -> now().
+
+%% Use monotonic time if it exists, fall back to erlang:now()
+%% otherwise.
+
+now() ->
+ try
+ erlang:monotonic_time()
+ catch
+ error: undef -> erlang:now()
+ end.
+
+%% ---------------------------------------------------------------------------
+%% # timestamp/1
+%% ---------------------------------------------------------------------------
+
+-spec timestamp(NowT :: now())
+ -> timestamp().
+
+timestamp({_,_,_} = T) -> %% erlang:now()
+ T;
+
+timestamp(MonoT) -> %% monotonic time
+ MicroSecs = monotonic_to_microseconds(MonoT + erlang:time_offset()),
+ Secs = MicroSecs div 1000000,
+ {Secs div 1000000, Secs rem 1000000, MicroSecs rem 1000000}.
+
+monotonic_to_microseconds(MonoT) ->
+ erlang:convert_time_unit(MonoT, native, micro_seconds).
+
+%% ---------------------------------------------------------------------------
%% # now_diff/1
%% ---------------------------------------------------------------------------
--spec now_diff(NowT)
+-spec now_diff(NowT :: now())
-> {Hours, Mins, Secs, MicroSecs}
- when NowT :: {non_neg_integer(), 0..999999, 0..999999},
- Hours :: non_neg_integer(),
+ when Hours :: non_neg_integer(),
Mins :: 0..59,
Secs :: 0..59,
MicroSecs :: 0..999999.
@@ -104,8 +151,37 @@ fmt(T) ->
%% Return timer:now_diff(now(), NowT) as an {H, M, S, MicroS} tuple
%% instead of as integer microseconds.
-now_diff({_,_,_} = Time) ->
- time(timer:now_diff(now(), Time)).
+now_diff(Time) ->
+ time(micro_diff(Time)).
+
+%% ---------------------------------------------------------------------------
+%% # micro_diff/1
+%% ---------------------------------------------------------------------------
+
+-spec micro_diff(NowT :: now())
+ -> MicroSecs
+ when MicroSecs :: non_neg_integer().
+
+micro_diff({_,_,_} = T0) ->
+ timer:now_diff(erlang:now(), T0);
+
+micro_diff(T0) -> %% monotonic time
+ monotonic_to_microseconds(erlang:monotonic_time() - T0).
+
+%% ---------------------------------------------------------------------------
+%% # micro_diff/2
+%% ---------------------------------------------------------------------------
+
+-spec micro_diff(T1 :: now(), T0 :: now())
+ -> MicroSecs
+ when MicroSecs :: non_neg_integer().
+
+micro_diff(T1, T0)
+ when is_integer(T1), is_integer(T0) -> %% monotonic time
+ monotonic_to_microseconds(T1 - T0);
+
+micro_diff(T1, T0) -> %% at least one erlang:now()
+ timer:now_diff(timestamp(T1), timestamp(T0)).
%% ---------------------------------------------------------------------------
%% # time/1
@@ -115,7 +191,7 @@ now_diff({_,_,_} = Time) ->
-spec time(NowT | Diff)
-> {Hours, Mins, Secs, MicroSecs}
- when NowT :: {non_neg_integer(), 0..999999, 0..999999},
+ when NowT :: timestamp(),
Diff :: non_neg_integer(),
Hours :: non_neg_integer(),
Mins :: 0..59,
@@ -134,6 +210,27 @@ time(Micro) -> %% elapsed time
{H, M, S, Micro rem 1000000}.
%% ---------------------------------------------------------------------------
+%% # seed/0
+%% ---------------------------------------------------------------------------
+
+-spec seed()
+ -> {timestamp(), {integer(), integer(), integer()}}.
+
+%% Return an argument for random:seed/1.
+
+seed() ->
+ T = now(),
+ {timestamp(T), seed(T)}.
+
+%% seed/1
+
+seed({_,_,_} = T) ->
+ T;
+
+seed(T) -> %% monotonic time
+ {erlang:phash2(node()), T, erlang:unique_integer()}.
+
+%% ---------------------------------------------------------------------------
%% # eval/1
%%
%% Evaluate a function in various forms.
@@ -247,17 +344,19 @@ opts(HeapSize, Opts) ->
%% # wait/1
%% ---------------------------------------------------------------------------
--spec wait([pid()])
+-spec wait([pid() | reference()])
-> ok.
wait(L) ->
- down([erlang:monitor(process, P) || P <- L]).
+ lists:foreach(fun down/1, L).
-down([]) ->
- ok;
-down([MRef|T]) ->
- receive {'DOWN', MRef, process, _, _} -> ok end,
- down(T).
+down(Pid)
+ when is_pid(Pid) ->
+ down(monitor(process, Pid));
+
+down(MRef)
+ when is_reference(MRef) ->
+ receive {'DOWN', MRef, process, _, _} = T -> T end.
%% ---------------------------------------------------------------------------
%% # fold_tuple/3
@@ -290,6 +389,35 @@ ft(Value, {Idx, T}) ->
setelement(Idx, T, Value).
%% ---------------------------------------------------------------------------
+%% # fold_n/3
+%% ---------------------------------------------------------------------------
+
+-spec fold_n(F, Acc0, N)
+ -> term()
+ when F :: fun((non_neg_integer(), term()) -> term()),
+ Acc0 :: term(),
+ N :: non_neg_integer().
+
+fold_n(F, Acc, N)
+ when is_integer(N), 0 < N ->
+ fold_n(F, F(N, Acc), N-1);
+
+fold_n(_, Acc, _) ->
+ Acc.
+
+%% ---------------------------------------------------------------------------
+%% # for_n/2
+%% ---------------------------------------------------------------------------
+
+-spec for_n(F, N)
+ -> non_neg_integer()
+ when F :: fun((non_neg_integer()) -> term()),
+ N :: non_neg_integer().
+
+for_n(F, N) ->
+ fold_n(fun(M,A) -> F(M), A+1 end, 0, N).
+
+%% ---------------------------------------------------------------------------
%% # log/4
%%
%% Called to have something to trace on for happenings of interest.
diff --git a/lib/diameter/src/base/diameter_peer.erl b/lib/diameter/src/base/diameter_peer.erl
index e5d4b28766..89b63c8a92 100644
--- a/lib/diameter/src/base/diameter_peer.erl
+++ b/lib/diameter/src/base/diameter_peer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,9 +18,11 @@
%%
-module(diameter_peer).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% Interface towards transport modules ...
-export([recv/2,
up/1,
@@ -230,12 +232,22 @@ recv(Pid, Pkt) ->
%% # send/2
%% ---------------------------------------------------------------------------
-send(Pid, #diameter_packet{transport_data = undefined,
- bin = Bin}) ->
- send(Pid, Bin);
+send(Pid, Msg) ->
+ ifc_send(Pid, {send, strip(Msg)}).
+
+%% Send only binary when possible.
+strip(#diameter_packet{transport_data = undefined,
+ bin = Bin}) ->
+ Bin;
+
+%% Strip potentially large message terms.
+strip(#diameter_packet{transport_data = T,
+ bin = Bin}) ->
+ #diameter_packet{transport_data = T,
+ bin = Bin};
-send(Pid, Pkt) ->
- ifc_send(Pid, {send, Pkt}).
+strip(Msg) ->
+ Msg.
%% ---------------------------------------------------------------------------
%% # close/1
@@ -324,7 +336,6 @@ code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ---------------------------------------------------------
-%% INTERNAL FUNCTIONS
%% ---------------------------------------------------------
%% ifc_send/2
diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl
index ee6e7dd89e..2255d0a76b 100644
--- a/lib/diameter/src/base/diameter_peer_fsm.erl
+++ b/lib/diameter/src/base/diameter_peer_fsm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -63,6 +63,8 @@
%% Keys in process dictionary.
-define(CB_KEY, cb). %% capabilities callback
-define(DPR_KEY, dpr). %% disconnect callback
+-define(DPA_KEY, dpa). %% timeout for incoming DPA, or shutdown after
+ %% outgoing DPA
-define(REF_KEY, ref). %% transport_ref()
-define(Q_KEY, q). %% transport start queue
-define(START_KEY, start). %% start of connected transport
@@ -82,18 +84,26 @@
N == ?GOAWAY; N == goaway;
N == ?BUSY; N == busy).
-%% RFC 3588:
+%% RFC 6733:
%%
%% Timeout An application-defined timer has expired while waiting
%% for some event.
%%
--define(EVENT_TIMEOUT, 10000).
+
%% Default timeout for reception of CER/CEA.
+-define(CAPX_TIMEOUT, 10000).
-%% Default timeout for DPA in response to DPR. A bit short but the
-%% timeout used to be hardcoded. (So it could be worse.)
+%% Default timeout for DPA to be received in response to an outgoing
+%% DPR. A bit short but the timeout used to be hardcoded. (So it could
+%% be worse.)
-define(DPA_TIMEOUT, 1000).
+%% Default timeout for the connection to be closed by the peer
+%% following an outgoing DPA in response to an incoming DPR. It's the
+%% recipient of DPA that should close the connection according to the
+%% RFC.
+-define(DPR_TIMEOUT, 5000).
+
-type uint32() :: diameter:'Unsigned32'().
-record(state,
@@ -107,9 +117,16 @@
transport :: pid(), %% transport process
dictionary :: module(), %% common dictionary
service :: #diameter_service{},
- dpr = false :: false | {uint32(), uint32()},
- %% | hop by hop and end to end identifiers
- length_errors :: exit | handle | discard}).
+ dpr = false :: false
+ | true %% DPR received, DPA sent
+ | {uint32(), uint32()} %% set in old code
+ | {boolean(), uint32(), uint32()},
+ %% hop by hop and end to end identifiers in
+ %% outgoing DPR; boolean says whether or not
+ %% the request was sent explicitly with
+ %% diameter:call/4.
+ length_errors :: exit | handle | discard,
+ incoming_maxlen :: integer() | infinity}).
%% There are non-3588 states possible as a consequence of 5.6.1 of the
%% standard and the corresponding problem for incoming CEA's: we don't
@@ -138,7 +155,8 @@
%% # start/3
%% ---------------------------------------------------------------------------
--spec start(T, [Opt], {diameter:sequence(),
+-spec start(T, [Opt], {[diameter:service_opt()]
+ | diameter:sequence(), %% from old code
[node()],
module(),
#diameter_service{}})
@@ -177,18 +195,26 @@ init(T) ->
proc_lib:init_ack({ok, self()}),
gen_server:enter_loop(?MODULE, [], i(T)).
-i({Ack, WPid, {M, Ref} = T, Opts, {Mask, Nodes, Dict0, Svc}}) ->
+i({Ack, WPid, T, Opts, {{_,_} = Mask, Nodes, Dict0, Svc}}) -> %% from old code
+ i({Ack, WPid, T, Opts, {[{sequence, Mask}], Nodes, Dict0, Svc}});
+
+i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) ->
erlang:monitor(process, WPid),
wait(Ack, WPid),
diameter_stats:reg(Ref),
+ diameter_codec:setopts([{common_dictionary, Dict0} | SvcOpts]),
+ {_,_} = Mask = proplists:get_value(sequence, SvcOpts),
+ Maxlen = proplists:get_value(incoming_maxlen, SvcOpts, 16#FFFFFF),
{[Cs,Ds], Rest} = proplists:split(Opts, [capabilities_cb, disconnect_cb]),
putr(?CB_KEY, {Ref, [F || {_,F} <- Cs]}),
putr(?DPR_KEY, [F || {_, F} <- Ds]),
putr(?REF_KEY, Ref),
putr(?SEQUENCE_KEY, Mask),
putr(?RESTRICT_KEY, Nodes),
+ putr(?DPA_KEY, {proplists:get_value(dpr_timeout, Opts, ?DPR_TIMEOUT),
+ proplists:get_value(dpa_timeout, Opts, ?DPA_TIMEOUT)}),
- Tmo = proplists:get_value(capx_timeout, Opts, ?EVENT_TIMEOUT),
+ Tmo = proplists:get_value(capx_timeout, Opts, ?CAPX_TIMEOUT),
OnLengthErr = proplists:get_value(length_errors, Opts, exit),
{TPid, Addrs} = start_transport(T, Rest, Svc),
@@ -199,7 +225,8 @@ i({Ack, WPid, {M, Ref} = T, Opts, {Mask, Nodes, Dict0, Svc}}) ->
dictionary = Dict0,
mode = M,
service = svc(Svc, Addrs),
- length_errors = OnLengthErr}.
+ length_errors = OnLengthErr,
+ incoming_maxlen = Maxlen}.
%% The transport returns its local ip addresses so that different
%% transports on the same service can use different local addresses.
%% The local addresses are put into Host-IP-Address avps here when
@@ -212,9 +239,12 @@ wait(Ref, Pid) ->
Ref ->
ok;
{'DOWN', _, process, Pid, _} = D ->
- exit({shutdown, D})
+ x(D)
end.
+x(T) ->
+ exit({shutdown, T}).
+
start_transport(T, Opts, #diameter_service{capabilities = LCaps} = Svc) ->
Addrs0 = LCaps#diameter_caps.host_ip_address,
start_transport(Addrs0, {T, Opts, Svc}).
@@ -226,7 +256,7 @@ start_transport(Addrs0, T) ->
q_next(TPid, Addrs0, Tmo, Data),
{TPid, Addrs};
{error, No} ->
- exit({shutdown, {no_connection, No}})
+ x({no_connection, No})
end.
svc(#diameter_service{capabilities = LCaps0} = Svc, Addrs) ->
@@ -299,11 +329,14 @@ handle_info(T, #state{} = State) ->
{?MODULE, Tag, Reason} ->
?LOG(stop, Tag),
{stop, {shutdown, Reason}, State}
- end.
+ end;
%% The form of the throw caught here is historical. It's
%% significant that it's not a 2-tuple, as in ?FAILURE(Reason),
%% since these are caught elsewhere.
+handle_info(T, S) -> %% started in old code
+ handle_info(T, #state{} = erlang:append_element(S, infinity)).
+
%% Note that there's no guarantee that the service and transport
%% capabilities are good enough to build a CER/CEA that can be
%% succesfully encoded. It's not checked at diameter:add_transport/2
@@ -333,6 +366,9 @@ eraser(Key) ->
%% transition/2
+transition(T, #state{dpr = {Hid, Eid}} = S) -> %% DPR sent from old code
+ transition(T, S#state{dpr = {false, Hid, Eid}});
+
%% Connection to peer.
transition({diameter, {TPid, connected, Remote}},
#state{transport = TPid,
@@ -397,9 +433,8 @@ transition({timeout, _}, _) ->
ok;
%% Outgoing message.
-transition({send, Msg}, #state{transport = TPid}) ->
- send(TPid, Msg),
- ok;
+transition({send, Msg}, S) ->
+ outgoing(Msg, S);
%% Request for graceful shutdown at remove_transport, stop_service of
%% application shutdown.
@@ -408,7 +443,8 @@ transition({shutdown, Pid, Reason}, #state{parent = Pid, dpr = false} = S) ->
transition({shutdown, Pid, _}, #state{parent = Pid}) ->
ok;
-%% DPA reception has timed out.
+%% DPA reception has timed out, or peer has not closed the connection
+%% as a result of outgoing DPA.
transition(dpa_timeout, _) ->
stop;
@@ -516,12 +552,9 @@ encode(Rec, Dict) ->
recv(#diameter_packet{header = #diameter_header{} = Hdr}
= Pkt,
- #state{parent = Pid,
- dictionary = Dict0}
+ #state{dictionary = Dict0}
= S) ->
- Name = diameter_codec:msg_name(Dict0, Hdr),
- Pid ! {recv, self(), Name, Pkt},
- rcv(Name, Pkt, S);
+ recv1(diameter_codec:msg_name(Dict0, Hdr), Pkt, S);
recv(#diameter_packet{header = undefined,
bin = Bin}
@@ -532,6 +565,47 @@ recv(#diameter_packet{header = undefined,
recv(Bin, S) ->
recv(#diameter_packet{bin = Bin}, S).
+%% recv1/3
+
+recv1(_,
+ #diameter_packet{header = H, bin = Bin},
+ #state{incoming_maxlen = M})
+ when M < size(Bin) ->
+ invalid(false, incoming_maxlen_exceeded, {size(Bin), H});
+
+%% Incoming request after outgoing DPR: discard. Don't discard DPR, so
+%% both ends don't do so when sending simultaneously.
+recv1(Name,
+ #diameter_packet{header = #diameter_header{is_request = true} = H},
+ #state{dpr = {_,_,_}})
+ when Name /= 'DPR' ->
+ invalid(false, recv_after_outgoing_dpr, H);
+
+%% Incoming request after incoming DPR: discard.
+recv1(_,
+ #diameter_packet{header = #diameter_header{is_request = true} = H},
+ #state{dpr = true}) ->
+ invalid(false, recv_after_incoming_dpr, H);
+
+%% DPA with identifier mismatch, or in response to a DPR initiated by
+%% the service.
+recv1('DPA' = N,
+ #diameter_packet{header = #diameter_header{hop_by_hop_id = Hid,
+ end_to_end_id = Eid}}
+ = Pkt,
+ #state{dpr = {X,H,E}}
+ = S)
+ when H /= Hid;
+ E /= Eid;
+ not X ->
+ rcv(N, Pkt, S);
+
+%% Any other message with a header and no length errors: send to the
+%% parent.
+recv1(Name, Pkt, #state{parent = Pid} = S) ->
+ Pid ! {recv, self(), Name, Pkt},
+ rcv(Name, Pkt, S).
+
%% recv/3
recv(#diameter_header{length = Len}
@@ -588,7 +662,7 @@ rcv(Name, _, #state{state = PS})
rcv('DPR' = N, Pkt, S) ->
handle_request(N, Pkt, S);
-%% DPA in response to DPR and with the expected identifiers.
+%% DPA in response to DPR, with the expected identifiers.
rcv('DPA' = N,
#diameter_packet{header = #diameter_header{end_to_end_id = Eid,
hop_by_hop_id = Hid}
@@ -596,14 +670,21 @@ rcv('DPA' = N,
= Pkt,
#state{dictionary = Dict0,
transport = TPid,
- dpr = {Hid, Eid}}) ->
+ dpr = {X, Hid, Eid}}) ->
?LOG(recv, N),
- incr(recv, H, Dict0),
- incr_rc(recv, diameter_codec:decode(Dict0, Pkt), Dict0),
+ X orelse begin
+ %% Only count DPA in response to a DPR sent by the
+ %% service: explicit DPR is counted in the same way
+ %% as other explicitly sent requests.
+ incr(recv, H, Dict0),
+ incr_rc(recv, diameter_codec:decode(Dict0, Pkt), Dict0)
+ end,
diameter_peer:close(TPid),
{stop, N};
-%% Ignore anything else, an unsolicited DPA in particular.
+%% Ignore anything else, an unsolicited DPA in particular. Note that
+%% dpa_timeout deals with the case in which the peer sends the wrong
+%% identifiers in DPA.
rcv(N, #diameter_packet{header = H}, _)
when N == 'CER';
N == 'CEA';
@@ -637,9 +718,61 @@ incr_error(Dir, Pkt, Dict0) ->
%% Msg here could be a #diameter_packet or a binary depending on who's
%% sending. In particular, the watchdog will send DWR as a binary
%% while messages coming from clients will be in a #diameter_packet.
+
send(Pid, Msg) ->
diameter_peer:send(Pid, Msg).
+%% outgoing/2
+
+%% Explicit DPR.
+outgoing(#diameter_packet{header = #diameter_header{application_id = 0,
+ cmd_code = 282,
+ is_request = true}
+ = H}
+ = Pkt,
+ #state{dpr = T,
+ parent = Pid}
+ = S) ->
+ if T == false ->
+ inform_dpr(Pid),
+ send_dpr(true, Pkt, dpa_timeout(), S);
+ T == true ->
+ invalid(false, dpr_after_dpa, H); %% DPA sent: discard
+ true ->
+ invalid(false, dpr_after_dpr, H) %% DPR sent: discard
+ end;
+
+%% Explict CER or DWR: discard. These are sent by us.
+outgoing(#diameter_packet{header = #diameter_header{application_id = 0,
+ cmd_code = C,
+ is_request = true}
+ = H},
+ _)
+ when 257 == C; %% CER
+ 280 == C -> %% DWR
+ invalid(false, invalid_request, H);
+
+%% DPR not sent: send.
+outgoing(Msg, #state{transport = TPid, dpr = false}) ->
+ send(TPid, Msg),
+ ok;
+
+%% Outgoing answer: send.
+outgoing(#diameter_packet{header = #diameter_header{is_request = false}}
+ = Pkt,
+ #state{transport = TPid}) ->
+ send(TPid, Pkt),
+ ok;
+
+%% Outgoing request: discard.
+outgoing(Msg, #state{dpr = {_,_,_}}) ->
+ invalid(false, send_after_dpr, header(Msg)).
+
+header(#diameter_packet{header = H}) ->
+ H;
+header(Bin) -> %% DWR
+ diameter_codec:decode_header(Bin).
+
%% handle_request/3
%%
%% Incoming CER or DPR.
@@ -699,6 +832,8 @@ build_answer('CER',
= Pkt,
#state{dictionary = Dict0}
= S) ->
+ diameter_codec:setopts([{string_decode, false}]),
+
{SupportedApps, RCaps, CEA} = recv_CER(CER, S),
[RC, IS] = Dict0:'#get-'(['Result-Code', 'Inband-Security-Id'], CEA),
@@ -731,7 +866,7 @@ build_answer(Type,
errors = Es}
= Pkt,
S) ->
- {RC, FailedAVP} = result_code(H, Es),
+ {RC, FailedAVP} = result_code(Type, H, Es),
{answer(Type, RC, FailedAVP, S), post(Type, RC, Pkt, S)}.
inband_security([]) ->
@@ -748,8 +883,16 @@ cea(CEA, RC, Dict0) ->
post('CER' = T, RC, Pkt, S) ->
{T, caps(S), {RC, Pkt}};
-post('DPR' = T, _, _, #state{parent = Pid}) ->
- [fun(S) -> Pid ! {T, self()}, S end].
+post('DPR', _, _, #state{parent = Pid}) ->
+ [fun(S) -> dpr_timer(), inform_dpr(Pid), dpr(S) end].
+
+dpr(#state{dpr = false} = S) -> %% not awaiting DPA
+ S#state{dpr = true}; %% DPR received
+dpr(S) -> %% DPR already sent or received
+ S.
+
+inform_dpr(Pid) ->
+ Pid ! {'DPR', self()}. %% tell watchdog to die with us
rejected({capabilities_cb, _F, Reason}, T, S) ->
rejected(Reason, T, S);
@@ -798,6 +941,19 @@ set(['answer-message' | _] = Ans, FailedAvp) ->
set([_|_] = Ans, FailedAvp) ->
Ans ++ FailedAvp.
+%% result_code/3
+
+%% Be lenient with errors in DPR since there's no reason to be
+%% otherwise. Rejecting may cause the peer to missinterpret the error
+%% as meaning that the connection should not be closed, which may well
+%% lead to more problems than any errors in the DPR.
+
+result_code('DPR', _, _) ->
+ {2001, []};
+
+result_code('CER', H, Es) ->
+ result_code(H, Es).
+
%% result_code/2
result_code(#diameter_header{is_error = true}, _) ->
@@ -886,6 +1042,8 @@ handle_CEA(#diameter_packet{header = H}
= DPkt
= diameter_codec:decode(Dict0, Pkt),
+ diameter_codec:setopts([{string_decode, false}]),
+
RC = result_code(incr_rc(recv, DPkt, Dict0)),
{SApps, IS, RCaps} = recv_CEA(DPkt, S),
@@ -1026,7 +1184,7 @@ close(Reason) ->
%% dpr/2
%%
-%% The RFC isn't clear on whether DPR should be send in a non-Open
+%% The RFC isn't clear on whether DPR should be sent in a non-Open
%% state. The Peer State Machine transitions it documents aren't
%% exhaustive (no Stop in Wait-I-CEA for example) so assume it's up to
%% the implementation and transition to Closed (ie. die) if we haven't
@@ -1042,7 +1200,7 @@ dpr(Reason, #state{state = 'Open',
Peer = {self(), Caps},
dpr(CBs, [Reason, Ref, Peer], S);
-%% Connection is open, DPR already sent.
+%% Connection is open, DPR already sent or received.
dpr(_, #state{state = 'Open'}) ->
ok;
@@ -1073,10 +1231,9 @@ dpr([CB|Rest], [Reason | _] = Args, S) ->
dpr([], [Reason | _], S) ->
send_dpr(Reason, [], S).
--record(opts, {cause, timeout = ?DPA_TIMEOUT}).
+-record(opts, {cause, timeout}).
-send_dpr(Reason, Opts, #state{transport = TPid,
- dictionary = Dict,
+send_dpr(Reason, Opts, #state{dictionary = Dict,
service = #diameter_service{capabilities = Caps}}
= S) ->
#opts{cause = Cause, timeout = Tmo}
@@ -1085,24 +1242,37 @@ send_dpr(Reason, Opts, #state{transport = TPid,
transport -> ?GOAWAY;
_ -> ?REBOOT
end,
- timeout = ?DPA_TIMEOUT},
+ timeout = dpa_timeout()},
Opts),
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}
= Caps,
- #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
- hop_by_hop_id = Hid}}
- = Pkt
- = encode(['DPR', {'Origin-Host', OH},
+ Pkt = encode(['DPR', {'Origin-Host', OH},
{'Origin-Realm', OR},
{'Disconnect-Cause', Cause}],
Dict),
- incr(send, Pkt, Dict),
+ send_dpr(false, Pkt, Tmo, S).
+
+%% send_dpr/4
+
+send_dpr(X,
+ #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
+ hop_by_hop_id = Hid}}
+ = Pkt,
+ Tmo,
+ #state{transport = TPid,
+ dictionary = Dict}
+ = S) ->
+ %% Only count DPR sent by the service: explicit DPR is counted in
+ %% the same way as other explicitly sent requests.
+ X orelse incr(send, Pkt, Dict),
send(TPid, Pkt),
dpa_timer(Tmo),
?LOG(send, 'DPR'),
- S#state{dpr = {Hid, Eid}}.
+ S#state{dpr = {X, Hid, Eid}}.
+
+%% opt/2
opt({timeout, Tmo}, Rec)
when ?IS_TIMEOUT(Tmo) ->
@@ -1125,6 +1295,27 @@ cause(N) ->
dpa_timer(Tmo) ->
erlang:send_after(Tmo, self(), dpa_timeout).
+dpa_timeout() ->
+ dpa_timeout(getr(?DPA_KEY)).
+
+dpa_timeout({_, Tmo}) ->
+ Tmo;
+dpa_timeout(undefined) -> %% set in old code
+ ?DPA_TIMEOUT;
+dpa_timeout(Tmo) -> %% ditto
+ Tmo.
+
+dpr_timer() ->
+ dpa_timer(dpr_timeout()).
+
+dpr_timeout() ->
+ dpr_timeout(getr(?DPA_KEY)).
+
+dpr_timeout({Tmo, _}) ->
+ Tmo;
+dpr_timeout(_) -> %% set in old code
+ ?DPR_TIMEOUT.
+
%% register_everywhere/1
%%
%% Register a term and ensure it's not registered elsewhere. Note that
diff --git a/lib/diameter/src/base/diameter_reg.erl b/lib/diameter/src/base/diameter_reg.erl
index 3197c1aee1..f785777874 100644
--- a/lib/diameter/src/base/diameter_reg.erl
+++ b/lib/diameter/src/base/diameter_reg.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,10 +22,11 @@
%%
-module(diameter_reg).
--compile({no_auto_import, [monitor/2]}).
-
-behaviour(gen_server).
+-compile({no_auto_import, [monitor/2, now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([add/1,
add_new/1,
del/1,
diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl
index 76b05a2ad4..86e744dfbe 100644
--- a/lib/diameter/src/base/diameter_service.erl
+++ b/lib/diameter/src/base/diameter_service.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,6 +24,9 @@
-module(diameter_service).
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% towards diameter_service_sup
-export([start_link/1]).
@@ -127,7 +130,9 @@
:: [{sequence, diameter:sequence()} %% sequence mask
| {share_peers, diameter:remotes()} %% broadcast to
| {use_shared_peers, diameter:remotes()} %% use from
- | {restrict_connections, diameter:restriction()}]}).
+ | {restrict_connections, diameter:restriction()}
+ | {string_decode, boolean()}
+ | {incoming_maxlen, diameter:message_length()}]}).
%% shared_peers reflects the peers broadcast from remote nodes.
%% Record representing an RFC 3539 watchdog process implemented by
@@ -258,16 +263,22 @@ whois(SvcName) ->
%% ---------------------------------------------------------------------------
-spec pick_peer(SvcName, AppOrAlias, Opts)
- -> {{TPid, Caps, App}, Mask}
- | false
- | {error, term()}
+ -> {{TPid, Caps, App}, Mask, SvcOpts}
+ | false %% no selection
+ | {error, no_service}
when SvcName :: diameter:service_name(),
- AppOrAlias :: {alias, diameter:app_alias()} | #diameter_app{},
- Opts :: tuple(),
+ AppOrAlias :: #diameter_app{}
+ | {alias, diameter:app_alias()},
+ Opts :: {fun((Dict :: module()) -> [term()]),
+ diameter:peer_filter(),
+ Xtra :: list()},
TPid :: pid(),
Caps :: #diameter_caps{},
App :: #diameter_app{},
- Mask :: diameter:sequence().
+ Mask :: diameter:sequence(),
+ SvcOpts :: [diameter:service_opt()].
+%% Extract Mask in the returned tuple so that diameter_traffic doesn't
+%% need to know about the ordering of SvcOpts used here.
pick_peer(SvcName, App, Opts) ->
pick(lookup_state(SvcName), App, Opts).
@@ -284,10 +295,10 @@ pick(#state{service = #diameter_service{applications = Apps}}
Opts) -> %% initial call from diameter:call/4
pick(S, find_outgoing_app(Alias, Apps), Opts);
-pick(_, false, _) ->
- false;
+pick(_, false = No, _) ->
+ No;
-pick(#state{options = [{_, Mask} | _]}
+pick(#state{options = [{_, Mask} | SvcOpts]}
= S,
#diameter_app{module = ModX, dictionary = Dict}
= App0,
@@ -296,7 +307,7 @@ pick(#state{options = [{_, Mask} | _]}
[_,_] = RealmAndHost = diameter_lib:eval([DestF, Dict]),
case pick_peer(App, RealmAndHost, Filter, S) of
{TPid, Caps} ->
- {{TPid, Caps, App}, Mask};
+ {{TPid, Caps, App}, Mask, SvcOpts};
false = No ->
No
end.
@@ -610,8 +621,9 @@ st(#watchdog{ref = Ref, pid = Pid}, Refs) ->
%% st/3
st(#watchdog{pid = Pid}, Reason, Acc) ->
+ MRef = monitor(process, Pid),
Pid ! {shutdown, self(), Reason},
- [Pid | Acc].
+ [MRef | Acc].
%% ---------------------------------------------------------------------------
%% # call_service/2
@@ -686,7 +698,9 @@ service_options(Opts) ->
{restrict_connections, proplists:get_value(restrict_connections,
Opts,
?RESTRICT)},
- {spawn_opt, proplists:get_value(spawn_opt, Opts, [])}].
+ {spawn_opt, proplists:get_value(spawn_opt, Opts, [])},
+ {string_decode, proplists:get_value(string_decode, Opts, true)},
+ {incoming_maxlen, proplists:get_value(incoming_maxlen, Opts, 16#FFFFFF)}].
%% The order of options is significant since we match against the list.
mref(false = No) ->
@@ -765,8 +779,9 @@ reason(failure) ->
start(Ref, {T, Opts}, S)
when T == connect;
T == listen ->
+ N = proplists:get_value(pool_size, Opts, 1),
try
- {ok, start(Ref, type(T), Opts, S)}
+ {ok, start(Ref, type(T), Opts, N, S)}
catch
?FAILURE(Reason) ->
{error, Reason}
@@ -784,26 +799,44 @@ type(connect = T) -> T.
%% start/4
-start(Ref, Type, Opts, #state{watchdogT = WatchdogT,
- peerT = PeerT,
- options = SvcOpts,
- service_name = SvcName,
- service = Svc0})
+start(Ref, Type, Opts, State) ->
+ start(Ref, Type, Opts, 1, State).
+
+%% start/5
+
+start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT,
+ peerT = PeerT,
+ options = SvcOpts,
+ service_name = SvcName,
+ service = Svc0})
when Type == connect;
Type == accept ->
#diameter_service{applications = Apps}
- = Svc
+ = Svc1
= merge_service(Opts, Svc0),
- {_,_} = Mask = proplists:get_value(sequence, SvcOpts),
- RecvData = diameter_traffic:make_recvdata([SvcName, PeerT, Apps, Mask]),
- Pid = s(Type, Ref, {{spawn_opts([Opts, SvcOpts]), RecvData},
- Opts,
- SvcOpts,
- Svc}),
- insert(WatchdogT, #watchdog{pid = Pid,
- type = Type,
- ref = Ref,
- options = Opts}),
+ Svc = binary_caps(Svc1, proplists:get_value(string_decode, SvcOpts, true)),
+ RecvData = diameter_traffic:make_recvdata([SvcName,
+ PeerT,
+ Apps,
+ SvcOpts]),
+ T = {{spawn_opts([Opts, SvcOpts]), RecvData}, Opts, SvcOpts, Svc},
+ Rec = #watchdog{type = Type,
+ ref = Ref,
+ options = Opts},
+ diameter_lib:fold_n(fun(_,A) ->
+ [wd(Type, Ref, T, WatchdogT, Rec) | A]
+ end,
+ [],
+ N).
+
+binary_caps(Svc, true) ->
+ Svc;
+binary_caps(#diameter_service{capabilities = Caps} = Svc, false) ->
+ Svc#diameter_service{capabilities = diameter_capx:binary_caps(Caps)}.
+
+wd(Type, Ref, T, WatchdogT, Rec) ->
+ Pid = start_watchdog(Type, Ref, T),
+ insert(WatchdogT, Rec#watchdog{pid = Pid}),
Pid.
%% Note that the service record passed into the watchdog is the merged
@@ -816,7 +849,7 @@ spawn_opts(Optss) ->
T /= link,
T /= monitor].
-s(Type, Ref, T) ->
+start_watchdog(Type, Ref, T) ->
{_MRef, Pid} = diameter_watchdog:start({Type, Ref}, T),
Pid.
@@ -837,7 +870,7 @@ ms({applications, As}, #diameter_service{applications = Apps} = S)
%% The fact that all capabilities can be configured on the transports
%% means that the service doesn't necessarily represent a single
-%% locally implemented Diameter peer as identified by Origin-Host: a
+%% locally implemented Diameter node as identified by Origin-Host: a
%% transport can configure its own Origin-Host. This means that the
%% service little more than a placeholder for default capabilities
%% plus a list of applications that individual transports can choose
@@ -1185,7 +1218,7 @@ connect_timer(Opts, Def0) ->
%% continuous restarted in case of faulty config or other problems.
tc(Time, Tc) ->
choose(Tc > ?RESTART_TC
- orelse timer:now_diff(now(), Time) > 1000*?RESTART_TC,
+ orelse diameter_lib:micro_diff(Time) > 1000*?RESTART_TC,
Tc,
?RESTART_TC).
@@ -1718,31 +1751,43 @@ info_transport(S) ->
[],
PeerD).
-%% Only a config entry for a listening transport: use it.
-transport([[{type, listen}, _] = L]) ->
- L ++ [{accept, []}];
-
-%% Only one config or peer entry for a connecting transport: use it.
-transport([[{type, connect} | _] = L]) ->
- L;
+%% Single config entry. Distinguish between pool_size config or not on
+%% a connecting transport for backwards compatibility: with the option
+%% the form is similar to the listening case, with connections grouped
+%% in a pool tuple (for lack of a better name), without as before.
+transport([[{type, Type}, {options, Opts}] = L])
+ when Type == listen;
+ Type == connect ->
+ L ++ [{K, []} || [{_,K}] <- [keys(Type, Opts)]];
%% Peer entries: discard config. Note that the peer entries have
%% length at least 3.
transport([[_,_] | L]) ->
transport(L);
-%% Possibly many peer entries for a listening transport. Note that all
-%% have the same options by construction, which is not terribly space
-%% efficient.
-transport([[{type, accept}, {options, Opts} | _] | _] = Ls) ->
- [{type, listen},
+%% Multiple tranports. Note that all have the same options by
+%% construction, which is not terribly space efficient.
+transport([[{type, Type}, {options, Opts} | _] | _] = Ls) ->
+ transport(keys(Type, Opts), Ls).
+
+%% Group transports in an accept or pool tuple ...
+transport([{Type, Key}], [[{type, _}, {options, Opts} | _] | _] = Ls) ->
+ [{type, Type},
{options, Opts},
- {accept, [lists:nthtail(2,L) || L <- Ls]}].
+ {Key, [tl(tl(L)) || L <- Ls]}];
+
+%% ... or not: there can only be one.
+transport([], [L]) ->
+ L.
+
+keys(connect = T, Opts) ->
+ [{T, pool} || lists:keymember(pool_size, 1, Opts)];
+keys(_, _) ->
+ [{listen, accept}].
peer_dict(#state{watchdogT = WatchdogT, peerT = PeerT}, Dict0) ->
try ets:tab2list(WatchdogT) of
- L ->
- lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L)
+ L -> lists:foldl(fun(T,A) -> peer_acc(PeerT, A, T) end, Dict0, L)
catch
error: badarg -> Dict0 %% service has gone down
end.
diff --git a/lib/diameter/src/base/diameter_service_sup.erl b/lib/diameter/src/base/diameter_service_sup.erl
index 153fff902f..e3177f0083 100644
--- a/lib/diameter/src/base/diameter_service_sup.erl
+++ b/lib/diameter/src/base/diameter_service_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -58,7 +58,7 @@ init([]) ->
ChildSpec = {Mod,
{Mod, start_link, []},
temporary,
- 1000,
+ 5000,
worker,
[Mod]},
{ok, {Flags, [ChildSpec]}}.
diff --git a/lib/diameter/src/base/diameter_session.erl b/lib/diameter/src/base/diameter_session.erl
index 3b236f109a..c5ea0428b5 100644
--- a/lib/diameter/src/base/diameter_session.erl
+++ b/lib/diameter/src/base/diameter_session.erl
@@ -157,8 +157,8 @@ session_id(Host) ->
%% ---------------------------------------------------------------------------
init() ->
- Now = now(),
- random:seed(Now),
+ {Now, Seed} = diameter_lib:seed(),
+ random:seed(Seed),
Time = time32(Now),
Seq = (?INT32 band (Time bsl 20)) bor (random:uniform(1 bsl 20) - 1),
ets:insert(diameter_sequence, [{origin_state_id, Time},
diff --git a/lib/diameter/src/base/diameter_stats.erl b/lib/diameter/src/base/diameter_stats.erl
index 8353613d32..64ea082be0 100644
--- a/lib/diameter/src/base/diameter_stats.erl
+++ b/lib/diameter/src/base/diameter_stats.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,9 +22,11 @@
%%
-module(diameter_stats).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([reg/2, reg/1,
incr/3, incr/1,
read/1,
diff --git a/lib/diameter/src/base/diameter_sup.erl b/lib/diameter/src/base/diameter_sup.erl
index e5afd23dcd..4ede4086d8 100644
--- a/lib/diameter/src/base/diameter_sup.erl
+++ b/lib/diameter/src/base/diameter_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -64,7 +64,7 @@ spec(Mod) ->
{Mod,
{Mod, start_link, []},
permanent,
- 1000,
+ infinity,
supervisor,
[Mod]}.
diff --git a/lib/diameter/src/base/diameter_sync.erl b/lib/diameter/src/base/diameter_sync.erl
index ce2db4b3a2..90eabece3d 100644
--- a/lib/diameter/src/base/diameter_sync.erl
+++ b/lib/diameter/src/base/diameter_sync.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -27,6 +27,9 @@
-module(diameter_sync).
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
-export([call/4, call/5,
cast/4, cast/5,
carp/1, carp/2]).
diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl
index 3b62afca47..eb4bbae931 100644
--- a/lib/diameter/src/base/diameter_traffic.erl
+++ b/lib/diameter/src/base/diameter_traffic.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,7 +77,12 @@
{peerT :: ets:tid(),
service_name :: diameter:service_name(),
apps :: [#diameter_app{}],
- sequence :: diameter:sequence()}).
+ sequence :: diameter:sequence(),
+ codec :: [{string_decode, boolean()}
+ | {incoming_maxlen, diameter:message_length()}]}).
+%% Note that incoming_maxlen is currently handled in diameter_peer_fsm,
+%% so that any message exceeding the maximum is discarded. Retain the
+%% option in case we want to extend the values and semantics.
%% Record stored in diameter_request for each outgoing request.
-record(request,
@@ -92,11 +97,18 @@
%% # make_recvdata/1
%% ---------------------------------------------------------------------------
-make_recvdata([SvcName, PeerT, Apps, Mask | _]) ->
+make_recvdata([SvcName, PeerT, Apps, {_,_} = Mask | _]) -> %% from old code
+ make_recvdata([SvcName, PeerT, Apps, [{sequence, Mask}]]);
+
+make_recvdata([SvcName, PeerT, Apps, SvcOpts | _]) ->
+ {_,_} = Mask = proplists:get_value(sequence, SvcOpts),
#recvdata{service_name = SvcName,
peerT = PeerT,
apps = Apps,
- sequence = Mask}.
+ sequence = Mask,
+ codec = [T || {K,_} = T <- SvcOpts,
+ lists:member(K, [string_decode,
+ incoming_maxlen])]}.
%% ---------------------------------------------------------------------------
%% peer_up/1
@@ -119,11 +131,11 @@ peer_down(TPid) ->
%% incr/4
%% ---------------------------------------------------------------------------
-incr(Dir, #diameter_packet{header = H}, TPid, Dict) ->
- incr(Dir, H, TPid, Dict);
+incr(Dir, #diameter_packet{header = H}, TPid, AppDict) ->
+ incr(Dir, H, TPid, AppDict);
-incr(Dir, #diameter_header{} = H, TPid, Dict) ->
- incr(TPid, {msg_id(H, Dict), Dir}).
+incr(Dir, #diameter_header{} = H, TPid, AppDict) ->
+ incr(TPid, {msg_id(H, AppDict), Dir}).
%% ---------------------------------------------------------------------------
%% incr_error/4
@@ -131,26 +143,26 @@ incr(Dir, #diameter_header{} = H, TPid, Dict) ->
%% Identify messages using the application dictionary, not the encode
%% dictionary, which may differ in the case of answer-message.
-incr_error(Dir, T, Pid, {_Dict, AppDict}) ->
+incr_error(Dir, T, Pid, {_MsgDict, AppDict}) ->
incr_error(Dir, T, Pid, AppDict);
%% Decoded message without errors.
incr_error(recv, #diameter_packet{errors = []}, _, _) ->
ok;
-incr_error(recv = D, #diameter_packet{header = H}, TPid, Dict) ->
- incr_error(D, H, TPid, Dict);
+incr_error(recv = D, #diameter_packet{header = H}, TPid, AppDict) ->
+ incr_error(D, H, TPid, AppDict);
%% Encoded message with errors and an identifiable header ...
-incr_error(send = D, {_, _, #diameter_header{} = H}, TPid, Dict) ->
- incr_error(D, H, TPid, Dict);
+incr_error(send = D, {_, _, #diameter_header{} = H}, TPid, AppDict) ->
+ incr_error(D, H, TPid, AppDict);
%% ... or not.
incr_error(send = D, {_,_}, TPid, _) ->
incr_error(D, unknown, TPid);
-incr_error(Dir, #diameter_header{} = H, TPid, Dict) ->
- incr_error(Dir, msg_id(H, Dict), TPid);
+incr_error(Dir, #diameter_header{} = H, TPid, AppDict) ->
+ incr_error(Dir, msg_id(H, AppDict), TPid);
incr_error(Dir, Id, TPid, _) ->
incr_error(Dir, Id, TPid).
@@ -162,24 +174,30 @@ incr_error(Dir, Id, TPid) ->
%% incr_rc/4
%% ---------------------------------------------------------------------------
--spec incr_rc(send|recv, Pkt, TPid, Dict0)
+-spec incr_rc(send|recv, Pkt, TPid, DictT)
-> {Counter, non_neg_integer()}
| Reason
when Pkt :: #diameter_packet{},
TPid :: pid(),
- Dict0 :: module(),
+ DictT :: module() | {MsgDict :: module(),
+ AppDict :: module(),
+ CommonDict:: module()},
Counter :: {'Result-Code', integer()}
| {'Experimental-Result', integer(), integer()},
Reason :: atom().
-incr_rc(Dir, Pkt, TPid, Dict0) ->
+incr_rc(Dir, Pkt, TPid, {_, AppDict, _} = DictT) ->
try
- incr_result(Dir, Pkt, TPid, {Dict0, Dict0, Dict0})
+ incr_result(Dir, Pkt, TPid, DictT)
catch
exit: {E,_} when E == no_result_code;
E == invalid_error_bit ->
+ incr(TPid, {msg_id(Pkt#diameter_packet.header, AppDict), Dir, E}),
E
- end.
+ end;
+
+incr_rc(Dir, Pkt, TPid, Dict0) ->
+ incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}).
%% ---------------------------------------------------------------------------
%% pending/1
@@ -223,6 +241,8 @@ receive_message(TPid, Pkt, Dict0, RecvData)
Dict0,
RecvData).
+%% recv/6
+
%% Incoming request ...
recv(true, false, TPid, Pkt, Dict0, T) ->
spawn_request(TPid, Pkt, Dict0, T);
@@ -230,6 +250,7 @@ recv(true, false, TPid, Pkt, Dict0, T) ->
%% ... answer to known request ...
recv(false, #request{ref = Ref, handler = Pid} = Req, _, Pkt, Dict0, _) ->
Pid ! {answer, Ref, Req, Dict0, Pkt};
+
%% Note that failover could have happened prior to this message being
%% received and triggering failback. That is, both a failover message
%% and answer may be on their way to the handler process. In the worst
@@ -240,7 +261,8 @@ recv(false, #request{ref = Ref, handler = Pid} = Req, _, Pkt, Dict0, _) ->
%% any others are discarded.
%% ... or not.
-recv(false, false, _, _, _, _) ->
+recv(false, false, TPid, _, _, _) ->
+ incr(TPid, {{unknown, 0}, recv, discarded}),
ok.
%% spawn_request/4
@@ -266,8 +288,11 @@ recv_request(TPid,
#diameter_packet{header = #diameter_header{application_id = Id}}
= Pkt,
Dict0,
- #recvdata{peerT = PeerT, apps = Apps}
+ #recvdata{peerT = PeerT,
+ apps = Apps,
+ codec = Opts}
= RecvData) ->
+ diameter_codec:setopts([{common_dictionary, Dict0} | Opts]),
send_A(recv_R(diameter_service:find_incoming_app(PeerT, TPid, Id, Apps),
TPid,
Pkt,
@@ -275,18 +300,24 @@ recv_request(TPid,
RecvData),
TPid,
Dict0,
- RecvData).
+ RecvData);
+
+recv_request(TPid, Pkt, Dict0, RecvData) -> %% from old code
+ recv_request(TPid,
+ Pkt,
+ Dict0,
+ #recvdata{} = erlang:append_element(RecvData, [])).
%% recv_R/5
-recv_R({#diameter_app{id = Id, dictionary = Dict} = App, Caps},
+recv_R({#diameter_app{id = Id, dictionary = AppDict} = App, Caps},
TPid,
Pkt0,
Dict0,
RecvData) ->
- incr(recv, Pkt0, TPid, Dict),
- Pkt = errors(Id, diameter_codec:decode(Id, Dict, Pkt0)),
- incr_error(recv, Pkt, TPid, Dict),
+ incr(recv, Pkt0, TPid, AppDict),
+ Pkt = errors(Id, diameter_codec:decode(Id, AppDict, Pkt0)),
+ incr_error(recv, Pkt, TPid, AppDict),
{Caps, Pkt, App, recv_R(App, TPid, Dict0, Caps, RecvData, Pkt)};
%% Note that the decode is different depending on whether or not Id is
%% ?APP_ID_RELAY.
@@ -494,14 +525,17 @@ send_A(_, _, _, _) ->
%% send_A/6
-send_A(T, TPid, DictT, ReqPkt, EvalPktFs, EvalFs) ->
- reply(T, TPid, DictT, EvalPktFs, ReqPkt),
+send_A(T, TPid, {AppDict, Dict0} = DictT0, ReqPkt, EvalPktFs, EvalFs) ->
+ {MsgDict, Pkt} = reply(T, TPid, DictT0, EvalPktFs, ReqPkt),
+ incr(send, Pkt, TPid, AppDict),
+ incr_rc(send, Pkt, TPid, {MsgDict, AppDict, Dict0}), %% count outgoing
+ send(TPid, Pkt),
lists:foreach(fun diameter_lib:eval/1, EvalFs).
%% answer/6
answer({reply, Ans}, _Caps, _Pkt, App, Dict0, _RecvData) ->
- {dict(App#diameter_app.dictionary, Dict0, Ans), Ans};
+ {msg_dict(App#diameter_app.dictionary, Dict0, Ans), Ans};
answer({call, Opts}, Caps, Pkt, App, Dict0, RecvData) ->
#diameter_caps{origin_host = {OH,_}}
@@ -524,27 +558,37 @@ answer({answer_message, RC} = T, Caps, Pkt, App, Dict0, _RecvData) ->
orelse ?ERROR({invalid_return, T, handle_request, App}),
answer_message(RC, Caps, Dict0, Pkt).
-%% dict/3
+%% msg_dict/3
+%%
+%% Return the dictionary defining the message grammar in question: the
+%% application dictionary or the common dictionary.
-%% An incoming answer, not yet decoded.
-dict(Dict, Dict0, #diameter_packet{header
- = #diameter_header{is_request = false,
- is_error = E},
- msg = undefined}) ->
- if E -> Dict0; true -> Dict end;
+msg_dict(AppDict, Dict0, [Msg])
+ when is_list(Msg);
+ is_tuple(Msg) ->
+ msg_dict(AppDict, Dict0, Msg);
+
+msg_dict(AppDict, Dict0, Msg) ->
+ choose(is_answer_message(Msg, Dict0), Dict0, AppDict).
-dict(Dict, Dict0, [Msg]) ->
- dict(Dict, Dict0, Msg);
+%% Incoming, not yet decoded.
+is_answer_message(#diameter_packet{header = #diameter_header{} = H,
+ msg = undefined},
+ Dict0) ->
+ is_answer_message([H], Dict0);
-dict(Dict, Dict0, #diameter_packet{msg = Msg}) ->
- dict(Dict, Dict0, Msg);
+is_answer_message(#diameter_packet{msg = Msg}, Dict0) ->
+ is_answer_message(Msg, Dict0);
-dict(Dict, Dict0, Msg) ->
- choose(is_answer_message(Msg, Dict0), Dict0, Dict).
+%% Message sent as a header/avps list.
+is_answer_message([#diameter_header{is_request = R, is_error = E} | _], _) ->
+ E andalso not R;
+%% Message sent as a tagged avp/value list.
is_answer_message([Name | _], _) ->
Name == 'answer-message';
+%% Message sent as a record.
is_answer_message(Rec, Dict) ->
try
'answer-message' == Dict:rec2msg(element(1,Rec))
@@ -592,7 +636,7 @@ resend(false,
Route = #diameter_avp{data = {Dict0, 'Route-Record', OH}},
Seq = diameter_session:sequence(Mask),
Hdr = Hdr0#diameter_header{hop_by_hop_id = Seq},
- Msg = [Hdr, Route | Avps],
+ Msg = [Hdr, Route | Avps], %% reordered at encode
resend(send_request(SvcName, App, Msg, Opts), Caps, Dict0, Pkt).
%% The incoming request is relayed with the addition of a
%% Route-Record. Note the requirement on the return from call/4 below,
@@ -614,7 +658,7 @@ resend(false,
%%
%% Relay a reply to a relayed request.
-%% Answer from the peer: reset the hop by hop identifier and send.
+%% Answer from the peer: reset the hop by hop identifier.
resend(#diameter_packet{bin = B}
= Pkt,
_Caps,
@@ -653,13 +697,13 @@ is_loop(Code, Vid, OH, Dict0, Avps) ->
%% reply/5
%% Local answer ...
-reply({Dict, Ans}, TPid, {AppDict, Dict0}, Fs, ReqPkt) ->
- local(Ans, TPid, {Dict, AppDict, Dict0}, Fs, ReqPkt);
+reply({MsgDict, Ans}, TPid, {AppDict, Dict0}, Fs, ReqPkt) ->
+ local(Ans, TPid, {MsgDict, AppDict, Dict0}, Fs, ReqPkt);
%% ... or relayed.
-reply(#diameter_packet{} = Pkt, TPid, _Dict0, Fs, _ReqPkt) ->
+reply(#diameter_packet{} = Pkt, _TPid, {AppDict, Dict0}, Fs, _ReqPkt) ->
eval_packet(Pkt, Fs),
- send(TPid, Pkt).
+ {msg_dict(AppDict, Dict0, Pkt), Pkt}.
%% local/5
%%
@@ -672,14 +716,12 @@ local([Msg], TPid, DictT, Fs, ReqPkt)
is_tuple(Msg) ->
local(Msg, TPid, DictT, Fs, ReqPkt#diameter_packet{errors = []});
-local(Msg, TPid, {Dict, AppDict, Dict0} = DictT, Fs, ReqPkt) ->
- Pkt = encode({Dict, AppDict},
+local(Msg, TPid, {MsgDict, AppDict, Dict0}, Fs, ReqPkt) ->
+ Pkt = encode({MsgDict, AppDict},
TPid,
- reset(make_answer_packet(Msg, ReqPkt), Dict, Dict0),
+ reset(make_answer_packet(Msg, ReqPkt), MsgDict, Dict0),
Fs),
- incr(send, Pkt, TPid, AppDict),
- incr_result(send, Pkt, TPid, DictT), %% count outgoing
- send(TPid, Pkt).
+ {MsgDict, Pkt}.
%% reset/3
@@ -952,8 +994,8 @@ answer_message(OH, OR, RC, Dict0, #diameter_packet{avps = Avps,
session_id(Code, Vid, Dict0, Avps)
when is_list(Avps) ->
try
- {value, #diameter_avp{data = D}} = find_avp(Code, Vid, Avps),
- [{'Session-Id', [Dict0:avp(decode, D, 'Session-Id')]}]
+ #diameter_avp{data = Bin} = find_avp(Code, Vid, Avps),
+ [{'Session-Id', [Dict0:avp(decode, Bin, 'Session-Id')]}]
catch
error: _ ->
[]
@@ -970,26 +1012,17 @@ failed_avp(_, [] = No) ->
%% find_avp/3
-find_avp(Code, Vid, Avps)
- when is_integer(Code), (undefined == Vid orelse is_integer(Vid)) ->
- find(fun(A) -> is_avp(Code, Vid, A) end, Avps).
+%% Grouped ...
+find_avp(Code, VId, [[#diameter_avp{code = Code, vendor_id = VId} | _] = As
+ | _]) ->
+ As;
-%% The final argument here could be a list of AVP's, depending on the case,
-%% but we're only searching at the top level.
-is_avp(Code, Vid, #diameter_avp{code = Code, vendor_id = Vid}) ->
- true;
-is_avp(_, _, _) ->
- false.
+%% ... or not.
+find_avp(Code, VId, [#diameter_avp{code = Code, vendor_id = VId} = A | _]) ->
+ A;
-find(_, []) ->
- false;
-find(Pred, [H|T]) ->
- case Pred(H) of
- true ->
- {value, H};
- false ->
- find(Pred, T)
- end.
+find_avp(Code, VId, [_ | Avps]) ->
+ find_avp(Code, VId, Avps).
%% 7. Error Handling
%%
@@ -1048,58 +1081,74 @@ find(Pred, [H|T]) ->
%% Increment a stats counter for result codes in incoming and outgoing
%% answers.
+%% Message sent as a header/avps list.
+incr_result(send = Dir,
+ #diameter_packet{msg = [#diameter_header{} = H | _]}
+ = Pkt,
+ TPid,
+ DictT) ->
+ incr_res(Dir, Pkt#diameter_packet{header = H}, TPid, DictT);
+
%% Outgoing message as binary: don't count. (Sending binaries is only
%% partially supported.)
-incr_result(_, #diameter_packet{msg = undefined = No}, _, _) ->
+incr_result(send, #diameter_packet{header = undefined = No}, _, _) ->
No;
%% Incoming or outgoing. Outgoing with encode errors never gets here
%% since encode fails.
-incr_result(Dir, Pkt, TPid, {Dict, AppDict, Dict0}) ->
- #diameter_packet{header = #diameter_header{is_error = E}
- = Hdr,
- msg = Msg,
- errors = Es}
- = Pkt,
+incr_result(Dir, Pkt, TPid, DictT) ->
+ incr_res(Dir, Pkt, TPid, DictT).
+
+incr_res(Dir,
+ #diameter_packet{header = #diameter_header{is_error = E}
+ = Hdr,
+ errors = Es}
+ = Pkt,
+ TPid,
+ DictT) ->
+ {MsgDict, AppDict, Dict0} = DictT,
Id = msg_id(Hdr, AppDict),
+ %% Could be {relay, 0}, in which case the R-bit is redundant since
+ %% only answers are being counted. Let it be however, so that the
+ %% same tuple is in both send/recv and result code counters.
%% Count incoming decode errors.
recv /= Dir orelse [] == Es orelse incr_error(Dir, Id, TPid, AppDict),
%% Exit on a missing result code.
- T = rc_counter(Dict, Msg),
- T == false andalso ?LOGX(no_result_code, {Dict, Dir, Hdr}),
- {Ctr, RC} = T,
+ T = rc_counter(MsgDict, Dir, Pkt),
+ T == false andalso ?LOGX(no_result_code, {MsgDict, Dir, Hdr}),
+ {Ctr, RC, Avp} = T,
%% Or on an inappropriate value.
is_result(RC, E, Dict0)
- orelse ?LOGX(invalid_error_bit, {Dict, Dir, Hdr, RC}),
+ orelse ?LOGX(invalid_error_bit, {MsgDict, Dir, Hdr, Avp}),
incr(TPid, {Id, Dir, Ctr}),
Ctr.
%% msg_id/2
-msg_id(#diameter_packet{header = H}, Dict) ->
- msg_id(H, Dict);
+msg_id(#diameter_packet{header = H}, AppDict) ->
+ msg_id(H, AppDict);
%% Only count on known keys so as not to be vulnerable to attack:
%% there are 2^32 (application ids) * 2^24 (command codes) = 2^56
%% pairs for an attacker to choose from.
-msg_id(Hdr, Dict) ->
- {_ApplId, Code, R} = Id = diameter_codec:msg_id(Hdr),
- case Dict:msg_name(Code, 0 == R) of
- '' ->
- unknown(Dict:id(), R);
- _ ->
- Id
+msg_id(Hdr, AppDict) ->
+ {Aid, Code, R} = Id = diameter_codec:msg_id(Hdr),
+ case AppDict:id() of
+ ?APP_ID_RELAY ->
+ {relay, R};
+ A ->
+ unknown(A /= Aid orelse '' == AppDict:msg_name(Code, 0 == R), Id)
end.
-unknown(?APP_ID_RELAY, R) ->
- {relay, R};
-unknown(_, _) ->
- unknown.
+unknown(true, {_, _, R}) ->
+ {unknown, R};
+unknown(false, Id) ->
+ Id.
%% No E-bit: can't be 3xxx.
is_result(RC, false, _Dict0) ->
@@ -1120,7 +1169,7 @@ is_result(RC, true, _) ->
incr(TPid, Counter) ->
diameter_stats:incr(Counter, TPid, 1).
-%% rc_counter/2
+%% rc_counter/3
%% RFC 3588, 7.6:
%%
@@ -1128,39 +1177,49 @@ incr(TPid, Counter) ->
%% applications MUST include either one Result-Code AVP or one
%% Experimental-Result AVP.
-rc_counter(Dict, Msg) ->
- rcc(Dict, Msg, int(get_avp_value(Dict, 'Result-Code', Msg))).
+rc_counter(Dict, Dir, #diameter_packet{header = H,
+ avps = As,
+ msg = Msg})
+ when Dir == recv; %% decoded incoming
+ Msg == undefined -> %% relayed outgoing
+ rc_counter(Dict, [H|As]);
+
+rc_counter(Dict, _, #diameter_packet{msg = Msg}) ->
+ rc_counter(Dict, Msg).
-rcc(Dict, Msg, undefined) ->
- rcc(get_avp_value(Dict, 'Experimental-Result', Msg));
+rc_counter(Dict, Msg) ->
+ rcc(get_result(Dict, Msg)).
-rcc(_, _, N)
+rcc(#diameter_avp{name = 'Result-Code' = Name, value = N} = A)
when is_integer(N) ->
- {{'Result-Code', N}, N}.
+ {{Name, N}, N, A};
-%% Outgoing answers may be in any of the forms messages can be sent
-%% in. Incoming messages will be records. We're assuming here that the
-%% arity of the result code AVP's is 0 or 1.
+rcc(#diameter_avp{name = 'Result-Code' = Name, value = [N|_]} = A)
+ when is_integer(N) ->
+ {{Name, N}, N, A};
-rcc([{_,_,N} = T | _])
+rcc(#diameter_avp{name = 'Experimental-Result', value = {_,_,N} = T} = A)
when is_integer(N) ->
- {T,N};
-rcc({_,_,N} = T)
+ {T, N, A};
+
+rcc(#diameter_avp{name = 'Experimental-Result', value = [{_,_,N} = T|_]} = A)
when is_integer(N) ->
- {T,N};
+ {T, N, A};
+
rcc(_) ->
false.
-%% Extract the first good looking integer. There's no guarantee
-%% that what we're looking for has arity 1.
-int([N|_])
- when is_integer(N) ->
- N;
-int(N)
- when is_integer(N) ->
- N;
-int(_) ->
- undefined.
+%% get_result/2
+
+get_result(Dict, Msg) ->
+ try
+ [throw(A) || N <- ['Result-Code', 'Experimental-Result'],
+ #diameter_avp{} = A <- [get_avp(Dict, N, Msg)]]
+ of
+ [] -> false
+ catch
+ #diameter_avp{} = A -> A
+ end.
x(T) ->
exit(T).
@@ -1221,10 +1280,9 @@ answer_rc(_, _, Sent) ->
send_R(SvcName, AppOrAlias, Msg, Opts, Caller) ->
case pick_peer(SvcName, AppOrAlias, Msg, Opts) of
- {{_,_,_} = Transport, Mask} ->
+ {Transport, Mask, SvcOpts} ->
+ diameter_codec:setopts(SvcOpts),
send_request(Transport, Mask, Msg, Opts, Caller, SvcName);
- false ->
- {error, no_connection};
{error, _} = No ->
No
end.
@@ -1286,6 +1344,8 @@ send_request({TPid, Caps, App}
SvcName,
[]).
+%% send_R/7
+
send_R({send, Msg}, Pkt, Transport, Opts, Caller, SvcName, Fs) ->
send_R(make_request_packet(Msg, Pkt),
Transport,
@@ -1388,6 +1448,21 @@ make_request_packet(#diameter_packet{header = Hdr} = Pkt,
make_request_packet(Msg, Pkt) ->
Pkt#diameter_packet{msg = Msg}.
+%% make_retransmit_packet/2
+
+make_retransmit_packet(#diameter_packet{msg = [#diameter_header{} = Hdr
+ | Avps]}
+ = Pkt) ->
+ Pkt#diameter_packet{msg = [make_retransmit_header(Hdr) | Avps]};
+
+make_retransmit_packet(#diameter_packet{header = Hdr} = Pkt) ->
+ Pkt#diameter_packet{header = make_retransmit_header(Hdr)}.
+
+%% make_retransmit_header/1
+
+make_retransmit_header(Hdr) ->
+ Hdr#diameter_header{is_retransmitted = true}.
+
%% fold_record/2
fold_record(undefined, R) ->
@@ -1398,12 +1473,12 @@ fold_record(Rec, R) ->
%% send_R/6
send_R(Pkt0,
- {TPid, Caps, #diameter_app{dictionary = Dict} = App},
+ {TPid, Caps, #diameter_app{dictionary = AppDict} = App},
Opts,
{Pid, Ref},
SvcName,
Fs) ->
- Pkt = encode(Dict, TPid, Pkt0, Fs),
+ Pkt = encode(AppDict, TPid, Pkt0, Fs),
#options{timeout = Timeout}
= Opts,
@@ -1416,7 +1491,7 @@ send_R(Pkt0,
packet = Pkt0},
try
- incr(send, Pkt, TPid, Dict),
+ incr(send, Pkt, TPid, AppDict),
TRef = send_request(TPid, Pkt, Req, SvcName, Timeout),
Pid ! Ref, %% tell caller a send has been attempted
handle_answer(SvcName,
@@ -1456,10 +1531,10 @@ handle_answer(SvcName,
id = Id}
= App,
{answer, Req, Dict0, Pkt}) ->
- Dict = dict(AppDict, Dict0, Pkt),
- handle_A(errors(Id, diameter_codec:decode({Dict, AppDict}, Pkt)),
+ MsgDict = msg_dict(AppDict, Dict0, Pkt),
+ handle_A(errors(Id, diameter_codec:decode({MsgDict, AppDict}, Pkt)),
SvcName,
- Dict,
+ MsgDict,
Dict0,
App,
Req).
@@ -1484,10 +1559,10 @@ handle_A(Pkt, SvcName, Dict, Dict0, App, #request{transport = TPid} = Req) ->
%% a missing AVP. If both are optional in the dictionary
%% then this isn't a decode error: just continue on.
answer(Pkt, SvcName, App, Req);
- exit: {invalid_error_bit, {_, _, _, RC}} ->
+ exit: {invalid_error_bit, {_, _, _, Avp}} ->
#diameter_packet{errors = Es}
= Pkt,
- E = {5004, #diameter_avp{name = 'Result-Code', value = RC}},
+ E = {5004, Avp},
answer(Pkt#diameter_packet{errors = [E|Es]}, SvcName, App, Req)
end.
@@ -1531,7 +1606,9 @@ a(Hdr, SvcName, discard) ->
%% timer value is ignored. This means that an answer could be accepted
%% from a peer after timeout in the case of failover.
-retransmit({{_,_,App} = Transport, _Mask}, Req, Opts, SvcName, Timeout) ->
+%% retransmit/5
+
+retransmit({{_,_,App} = Transport, _, _}, Req, Opts, SvcName, Timeout) ->
try retransmit(Transport, Req, SvcName, Timeout) of
T -> recv_A(Timeout, SvcName, App, Opts, T)
catch
@@ -1552,17 +1629,26 @@ pick_peer(SvcName,
pick_peer(SvcName, App, Msg, Opts#options{extra = []});
pick_peer(_, _, undefined, _) ->
- false;
+ {error, no_connection};
pick_peer(SvcName,
AppOrAlias,
Msg,
#options{filter = Filter, extra = Xtra}) ->
- diameter_service:pick_peer(SvcName,
- AppOrAlias,
- {fun(D) -> get_destination(D, Msg) end,
- Filter,
- Xtra}).
+ pick(diameter_service:pick_peer(SvcName,
+ AppOrAlias,
+ {fun(D) -> get_destination(D, Msg) end,
+ Filter,
+ Xtra})).
+
+pick({{_,_,_} = Transport, Mask}) -> %% from old code; dialyzer complains
+ {Transport, Mask, []}; %% about this
+
+pick(false) ->
+ {error, no_connection};
+
+pick(T) ->
+ T.
%% handle_error/4
@@ -1647,6 +1733,8 @@ send({TPid, Pkt, #request{handler = Pid} = Req0, SvcName, Timeout, TRef}) ->
end.
%% recv/4
+%%
+%% Relay an answer from a remote node.
recv(TPid, Pid, TRef, Ref) ->
receive
@@ -1660,8 +1748,14 @@ recv(TPid, Pid, TRef, Ref) ->
%% send/2
-send(Pid, Pkt) ->
- Pid ! {send, Pkt}.
+send(Pid, Pkt) -> %% Strip potentially large message terms.
+ #diameter_packet{header = H,
+ bin = Bin,
+ transport_data = T}
+ = Pkt,
+ Pid ! {send, #diameter_packet{header = H,
+ bin = Bin,
+ transport_data = T}}.
%% retransmit/4
@@ -1674,9 +1768,7 @@ retransmit({TPid, Caps, App}
have_request(Pkt0, TPid) %% Don't failover to a peer we've
andalso ?THROW(timeout), %% already sent to.
- #diameter_packet{header = Hdr0} = Pkt0,
- Hdr = Hdr0#diameter_header{is_retransmitted = true},
- Pkt = Pkt0#diameter_packet{header = Hdr},
+ Pkt = make_retransmit_packet(Pkt0),
retransmit(cb(App, prepare_retransmit, [Pkt, SvcName, {TPid, Caps}]),
Transport,
@@ -1712,19 +1804,19 @@ retransmit(T, {_, _, App}, _, _, _, _) ->
?ERROR({invalid_return, T, prepare_retransmit, App}).
resend_request(Pkt0,
- {TPid, Caps, #diameter_app{dictionary = Dict}},
+ {TPid, Caps, #diameter_app{dictionary = AppDict}},
Req0,
SvcName,
Tmo,
Fs) ->
- Pkt = encode(Dict, TPid, Pkt0, Fs),
+ Pkt = encode(AppDict, TPid, Pkt0, Fs),
Req = Req0#request{transport = TPid,
packet = Pkt0,
caps = Caps},
?LOG(retransmission, Pkt#diameter_packet.header),
- incr(TPid, {msg_id(Pkt, Dict), send, retransmission}),
+ incr(TPid, {msg_id(Pkt, AppDict), send, retransmission}),
TRef = send_request(TPid, Pkt, Req, SvcName, Tmo),
{TRef, Req}.
@@ -1807,7 +1899,7 @@ str([]) ->
str(T) ->
T.
-%% get_avp_value/3
+%% get_avp/3
%%
%% Find an AVP in a message of one of three forms:
%%
@@ -1824,47 +1916,71 @@ str(T) ->
%% look for are in the common dictionary. This is required since the
%% relay dictionary doesn't inherit the common dictionary (which maybe
%% it should).
-get_avp_value(?RELAY, Name, Msg) ->
- get_avp_value(?BASE, Name, Msg);
+get_avp(?RELAY, Name, Msg) ->
+ get_avp(?BASE, Name, Msg);
-%% Message sent as a header/avps list, probably a relay case but not
-%% necessarily.
-get_avp_value(Dict, Name, [#diameter_header{} | Avps]) ->
+%% Message as a header/avps list.
+get_avp(Dict, Name, [#diameter_header{} | Avps]) ->
try
{Code, _, VId} = Dict:avp_header(Name),
- [A|_] = lists:dropwhile(fun(#diameter_avp{code = C, vendor_id = V}) ->
- C /= Code orelse V /= VId
- end,
- Avps),
- avp_decode(Dict, Name, A)
+ find_avp(Code, VId, Avps)
+ of
+ A ->
+ (avp_decode(Dict, Name, ungroup(A)))#diameter_avp{name = Name}
catch
error: _ ->
undefined
end;
%% Outgoing message as a name/values list.
-get_avp_value(_, Name, [_MsgName | Avps]) ->
+get_avp(_, Name, [_MsgName | Avps]) ->
case lists:keyfind(Name, 1, Avps) of
{_, V} ->
- V;
+ #diameter_avp{name = Name, value = V};
_ ->
undefined
end;
%% Message is typically a record but not necessarily.
-get_avp_value(Dict, Name, Rec) ->
+get_avp(Dict, Name, Rec) ->
try
- Dict:'#get-'(Name, Rec)
+ #diameter_avp{name = Name, value = Dict:'#get-'(Name, Rec)}
catch
error:_ ->
undefined
end.
+%% get_avp_value/3
+
+get_avp_value(Dict, Name, Msg) ->
+ case get_avp(Dict, Name, Msg) of
+ #diameter_avp{value = V} ->
+ V;
+ undefined = No ->
+ No
+ end.
+
+%% ungroup/1
+
+ungroup([Avp|_]) ->
+ Avp;
+ungroup(Avp) ->
+ Avp.
+
+%% avp_decode/3
+
avp_decode(Dict, Name, #diameter_avp{value = undefined,
- data = Bin}) ->
- Dict:avp(decode, Bin, Name);
-avp_decode(_, _, #diameter_avp{value = V}) ->
- V.
+ data = Bin}
+ = Avp) ->
+ try Dict:avp(decode, Bin, Name) of
+ V ->
+ Avp#diameter_avp{value = V}
+ catch
+ error:_ ->
+ Avp
+ end;
+avp_decode(_, _, #diameter_avp{} = Avp) ->
+ Avp.
cb(#diameter_app{module = [_|_] = M}, F, A) ->
eval(M, F, A);
diff --git a/lib/diameter/src/base/diameter_types.erl b/lib/diameter/src/base/diameter_types.erl
index ca3338be5f..87a0f0663d 100644
--- a/lib/diameter/src/base/diameter_types.erl
+++ b/lib/diameter/src/base/diameter_types.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -75,7 +75,7 @@
%% message indicating this error MUST include the offending AVPs
%% within a Failed-AVP AVP.
%%
--define(INVALID_LENGTH(Bin), erlang:error({'DIAMETER', 5014, Bin})).
+-define(INVALID_LENGTH(Bitstr), erlang:error({'DIAMETER', 5014, Bitstr})).
%% -------------------------------------------------------------------------
%% 3588, 4.2. Basic AVP Data Formats
@@ -90,7 +90,12 @@
'OctetString'(decode, Bin)
when is_binary(Bin) ->
- binary_to_list(Bin);
+ case diameter_codec:getopt(string_decode) of
+ true ->
+ binary_to_list(Bin);
+ false ->
+ Bin
+ end;
'OctetString'(decode, B) ->
?INVALID_LENGTH(B);
@@ -298,21 +303,29 @@
'OctetString'(M, lists:duplicate(0,7));
'DiameterURI'(encode, #diameter_uri{type = Type,
- fqdn = D,
- port = P,
+ fqdn = DN,
+ port = PN,
transport = T,
- protocol = Prot}
- = U) ->
- S = lists:append([atom_to_list(Type), "://", D,
- ":", integer_to_list(P),
+ protocol = P})
+ when (Type == 'aaa' orelse Type == 'aaas'),
+ is_integer(PN),
+ 0 =< PN,
+ (T == tcp orelse T == sctp orelse T == udp),
+ (P == diameter orelse P == radius orelse P == 'tacacs+'),
+ (P /= diameter orelse T /= udp) ->
+ iolist_to_binary([atom_to_list(Type), "://", DN,
+ ":", integer_to_list(PN),
";transport=", atom_to_list(T),
- ";protocol=", atom_to_list(Prot)]),
- U = scan_uri(S), %% assert
- list_to_binary(S);
+ ";protocol=", atom_to_list(P)]);
+%% Don't omit defaults since they're dependent on whether RFC 3588 or
+%% 6733 is being followed. For one, we don't know this at encode; for
+%% two (more importantly), we don't know how the peer will interpret
+%% defaults, so it's best to be explicit. Interpret defaults on decode
+%% since there's no choice.
'DiameterURI'(encode, Str) ->
Bin = iolist_to_binary(Str),
- #diameter_uri{} = scan_uri(Bin), %% type check
+ #diameter_uri{} = scan_uri(Bin), %% assert
Bin.
%% --------------------
@@ -321,7 +334,6 @@
'IPFilterRule'(encode = M, zero) ->
'OctetString'(M, lists:duplicate(0,33));
-%% TODO: parse grammar.
'IPFilterRule'(M, X) ->
'OctetString'(M, X).
@@ -331,7 +343,6 @@
'QoSFilterRule'(encode = M, zero = X) ->
'IPFilterRule'(M, X);
-%% TODO: parse grammar.
'QoSFilterRule'(M, X) ->
'OctetString'(M, X).
@@ -339,7 +350,13 @@
'UTF8String'(decode, Bin)
when is_binary(Bin) ->
- tl([0|_] = unicode:characters_to_list([0, Bin])); %% assert list return
+ case diameter_codec:getopt(string_decode) of
+ true ->
+ %% assert list return
+ tl([0|_] = unicode:characters_to_list([0, Bin]));
+ false ->
+ <<_/binary>> = unicode:characters_to_binary(Bin)
+ end;
'UTF8String'(decode, B) ->
?INVALID_LENGTH(B);
@@ -507,55 +524,90 @@ msb(false) -> ?TIME_2036.
%%
%% aaa-protocol = ( "diameter" / "radius" / "tacacs+" )
-scan_uri(Bin)
- when is_binary(Bin) ->
- scan_uri(binary_to_list(Bin));
-scan_uri("aaa://" ++ Rest) ->
- scan_fqdn(Rest, #diameter_uri{type = aaa});
-scan_uri("aaas://" ++ Rest) ->
- scan_fqdn(Rest, #diameter_uri{type = aaas}).
-
-scan_fqdn(S, U) ->
- {[_|_] = F, Rest} = lists:splitwith(fun is_fqdn/1, S),
- scan_opt_port(Rest, U#diameter_uri{fqdn = F}).
-
-scan_opt_port(":" ++ S, U) ->
- {[_|_] = P, Rest} = lists:splitwith(fun is_digit/1, S),
- scan_opt_transport(Rest, U#diameter_uri{port = list_to_integer(P)});
-scan_opt_port(S, U) ->
- scan_opt_transport(S, U).
-
-scan_opt_transport(";transport=" ++ S, U) ->
- {P, Rest} = transport(S),
- scan_opt_protocol(Rest, U#diameter_uri{transport = P});
-scan_opt_transport(S, U) ->
- scan_opt_protocol(S, U).
-
-scan_opt_protocol(";protocol=" ++ S, U) ->
- {P, ""} = protocol(S),
- U#diameter_uri{protocol = P};
-scan_opt_protocol("", U) ->
- U.
-
-transport("tcp" ++ S) ->
- {tcp, S};
-transport("sctp" ++ S) ->
- {sctp, S};
-transport("udp" ++ S) ->
- {udp, S}.
-
-protocol("diameter" ++ S) ->
- {diameter, S};
-protocol("radius" ++ S) ->
- {radius, S};
-protocol("tacacs+" ++ S) ->
- {'tacacs+', S}.
-
-is_fqdn(C) ->
- is_digit(C) orelse is_alpha(C) orelse C == $. orelse C == $-.
-
-is_alpha(C) ->
- ($a =< C andalso C =< $z) orelse ($A =< C andalso C =< $Z).
-
-is_digit(C) ->
- $0 =< C andalso C =< $9.
+%% RFC 6733, 4.3.1, changes the defaults:
+%%
+%% "aaa://" FQDN [ port ] [ transport ] [ protocol ]
+%%
+%% ; No transport security
+%%
+%% "aaas://" FQDN [ port ] [ transport ] [ protocol ]
+%%
+%% ; Transport security used
+%%
+%% FQDN = < Fully Qualified Domain Name >
+%%
+%% port = ":" 1*DIGIT
+%%
+%% ; One of the ports used to listen for
+%% ; incoming connections.
+%% ; If absent, the default Diameter port
+%% ; (3868) is assumed if no transport
+%% ; security is used and port 5658 when
+%% ; transport security (TLS/TCP and DTLS/SCTP)
+%% ; is used.
+%%
+%% transport = ";transport=" transport-protocol
+%%
+%% ; One of the transports used to listen
+%% ; for incoming connections. If absent,
+%% ; the default protocol is assumed to be TCP.
+%% ; UDP MUST NOT be used when the aaa-protocol
+%% ; field is set to diameter.
+%%
+%% transport-protocol = ( "tcp" / "sctp" / "udp" )
+%%
+%% protocol = ";protocol=" aaa-protocol
+%%
+%% ; If absent, the default AAA protocol
+%% ; is Diameter.
+%%
+%% aaa-protocol = ( "diameter" / "radius" / "tacacs+" )
+
+scan_uri(Bin) ->
+ RE = "^(aaas?)://"
+ "([-a-zA-Z0-9.]{1,255})"
+ "(:0{0,5}([0-9]{1,5}))?"
+ "(;transport=(tcp|sctp|udp))?"
+ "(;protocol=(diameter|radius|tacacs\\+))?$",
+ %% A port number is 16-bit, so an arbitrary number of digits is
+ %% just a vulnerability, but provide a little slack with leading
+ %% zeros in a port number just because the regexp was previously
+ %% [0-9]+ and it's not inconceivable that a value might be padded.
+ %% Don't fantasize about this padding being more than the number
+ %% of digits in the port number proper.
+ %%
+ %% Similarly, a FQDN can't be arbitrarily long: at most 255
+ %% octets.
+ {match, [A, DN, PN, T, P]} = re:run(Bin,
+ RE,
+ [{capture, [1,2,4,6,8], binary}]),
+ Type = to_atom(A),
+ {PN0, T0} = defaults(diameter_codec:getopt(rfc), Type),
+ PortNr = to_int(PN, PN0),
+ 0 = PortNr bsr 16, %% assert
+ #diameter_uri{type = Type,
+ fqdn = 'OctetString'(decode, DN),
+ port = PortNr,
+ transport = to_atom(T, T0),
+ protocol = to_atom(P, diameter)}.
+
+%% Choose defaults based on the RFC, since 6733 has changed them.
+defaults(3588, _) ->
+ {3868, sctp};
+defaults(6733, aaa) ->
+ {3868, tcp};
+defaults(6733, aaas) ->
+ {5658, tcp}.
+
+to_int(<<>>, N) ->
+ N;
+to_int(B, _) ->
+ binary_to_integer(B).
+
+to_atom(<<>>, A) ->
+ A;
+to_atom(B, _) ->
+ to_atom(B).
+
+to_atom(B) ->
+ binary_to_atom(B, latin1).
diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl
index b7f2d24941..de9c4bca33 100644
--- a/lib/diameter/src/base/diameter_watchdog.erl
+++ b/lib/diameter/src/base/diameter_watchdog.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -122,15 +122,18 @@ i({Ack, T, Pid, {RecvData,
= Svc}}) ->
erlang:monitor(process, Pid),
wait(Ack, Pid),
- random:seed(now()),
- putr(restart, {T, Opts, Svc}), %% save seeing it in trace
- putr(dwr, dwr(Caps)), %%
+ {_, Seed} = diameter_lib:seed(),
+ random:seed(Seed),
+ putr(restart, {T, Opts, Svc, SvcOpts}), %% save seeing it in trace
+ putr(dwr, dwr(Caps)), %%
{_,_} = Mask = proplists:get_value(sequence, SvcOpts),
Restrict = proplists:get_value(restrict_connections, SvcOpts),
Nodes = restrict_nodes(Restrict),
Dict0 = common_dictionary(Apps),
+ diameter_codec:setopts([{common_dictionary, Dict0},
+ {string_decode, false}]),
#watchdog{parent = Pid,
- transport = start(T, Opts, Mask, Nodes, Dict0, Svc),
+ transport = start(T, Opts, SvcOpts, Nodes, Dict0, Svc),
tw = proplists:get_value(watchdog_timer,
Opts,
?DEFAULT_TW_INIT),
@@ -165,11 +168,11 @@ config({okay, N}, Rec)
when ?IS_NATURAL(N) ->
Rec#config{okay = N}.
-%% start/5
+%% start/6
-start(T, Opts, Mask, Nodes, Dict0, Svc) ->
+start(T, Opts, SvcOpts, Nodes, Dict0, Svc) ->
{_MRef, Pid}
- = diameter_peer_fsm:start(T, Opts, {Mask, Nodes, Dict0, Svc}),
+ = diameter_peer_fsm:start(T, Opts, {SvcOpts, Nodes, Dict0, Svc}),
Pid.
%% common_dictionary/1
@@ -319,7 +322,7 @@ code_change(_, State, _) ->
%% expiry; or another watchdog is saying the same after reestablishing
%% a connection previously had by this one.
transition(close, #watchdog{}) ->
- {{accept, _}, _, _} = getr(restart), %% assert
+ {accept, _} = role(), %% assert
stop;
%% Service is asking for the peer to be taken down gracefully.
@@ -332,8 +335,9 @@ transition({shutdown = T, Pid, Reason}, #watchdog{parent = Pid,
send(TPid, {T, self(), Reason}),
S#watchdog{shutdown = true};
-%% Transport is telling us that DPA has been sent in response to DPR:
-%% its death should lead to ours.
+%% Transport is telling us that DPA has been sent in response to DPR,
+%% or that DPR has been explicitly sent: transport death should lead
+%% to ours.
transition({'DPR', TPid}, #watchdog{transport = TPid} = S) ->
S#watchdog{shutdown = true};
@@ -368,7 +372,7 @@ transition({open, TPid, Hosts, _} = Open,
restrict = {_,R},
config = #config{suspect = OS}}
= S) ->
- case okay(getr(restart), Hosts, R) of
+ case okay(role(), Hosts, R) of
okay ->
set_watchdog(S#watchdog{status = okay,
num_dwa = OS});
@@ -422,7 +426,7 @@ transition({'DOWN', _, process, TPid, _Reason} = D,
= S0) ->
S = S0#watchdog{pending = false,
transport = undefined},
- {{M,_}, _, _} = getr(restart),
+ {M,_} = role(),
%% Close an accepting watchdog immediately if there's no
%% restriction on the number of connections to the same peer: the
@@ -489,7 +493,7 @@ encode(dwa, Dict0, #diameter_packet{header = H, transport_data = TD}
%% okay/3
-okay({{accept, Ref}, _, _}, Hosts, Restrict) ->
+okay({accept, Ref}, Hosts, Restrict) ->
T = {?MODULE, connection, Ref, Hosts},
diameter_reg:add(T),
if Restrict ->
@@ -500,7 +504,7 @@ okay({{accept, Ref}, _, _}, Hosts, Restrict) ->
%% Register before matching so that at least one of two registering
%% processes will match the other.
-okay({{connect, _}, _, _}, _, _) ->
+okay({connect, _}, _, _) ->
okay.
%% okay/2
@@ -515,6 +519,11 @@ okay(C) ->
[_|_] = [send(P, close) || {_,P} <- C, self() /= P],
reopen.
+%% role/0
+
+role() ->
+ element(1, getr(restart)).
+
%% set_watchdog/1
set_watchdog(#watchdog{tw = TwInit,
@@ -549,7 +558,7 @@ send_watchdog(#watchdog{pending = false,
?LOG(send, 'DWR'),
S#watchdog{pending = true}.
-%% Dont' count encode errors since we don't expect any on DWR/DWA.
+%% Don't count encode errors since we don't expect any on DWR/DWA.
%% recv/3
@@ -571,11 +580,18 @@ rcv('DWR', Pkt, #watchdog{transport = TPid,
DPkt = diameter_codec:decode(Dict0, Pkt),
diameter_traffic:incr(recv, DPkt, TPid, Dict0),
diameter_traffic:incr_error(recv, DPkt, TPid, Dict0),
- EPkt = encode(dwa, Dict0, Pkt),
+ #diameter_packet{header = H,
+ transport_data = T,
+ bin = Bin}
+ = EPkt
+ = encode(dwa, Dict0, Pkt),
diameter_traffic:incr(send, EPkt, TPid, Dict0),
diameter_traffic:incr_rc(send, EPkt, TPid, Dict0),
- send(TPid, {send, EPkt}),
+ %% Strip potentially large message terms.
+ send(TPid, {send, #diameter_packet{header = H,
+ transport_data = T,
+ bin = Bin}}),
?LOG(send, 'DWA');
rcv('DWA', Pkt, #watchdog{transport = TPid,
@@ -590,9 +606,10 @@ rcv('DWA', Pkt, #watchdog{transport = TPid,
rcv(N, _, _)
when N == 'CER';
N == 'CEA';
- N == 'DPR';
- N == 'DPA' ->
+ N == 'DPR' ->
false;
+%% DPR can be sent explicitly with diameter:call/4. Only the
+%% corresponding DPAs arrive here.
rcv(_, Pkt, #watchdog{transport = TPid,
dictionary = Dict0,
@@ -793,26 +810,28 @@ restart(S) -> %% reconnect has won race with timeout
%% state down rather then initial when receiving notification of an
%% open connection.
-restart({{connect, _} = T, Opts, Svc},
+restart({T, Opts, Svc}, S) -> %% put in old code
+ restart({T, Opts, Svc, []}, S);
+
+restart({{connect, _} = T, Opts, Svc, SvcOpts},
#watchdog{parent = Pid,
- sequence = Mask,
restrict = {R,_},
dictionary = Dict0}
= S) ->
send(Pid, {reconnect, self()}),
Nodes = restrict_nodes(R),
- S#watchdog{transport = start(T, Opts, Mask, Nodes, Dict0, Svc),
+ S#watchdog{transport = start(T, Opts, SvcOpts, Nodes, Dict0, Svc),
restrict = {R, lists:member(node(), Nodes)}};
%% No restriction on the number of connections to the same peer: just
%% die. Note that a state machine never enters state REOPEN in this
%% case.
-restart({{accept, _}, _, _}, #watchdog{restrict = {_, false}}) ->
+restart({{accept, _}, _, _, _}, #watchdog{restrict = {_, false}}) ->
stop; %% 'DOWN' was in old code: 'close' was not sent
%% Otherwise hang around until told to die, either by the service or
%% by another watchdog.
-restart({{accept, _}, _, _}, S) ->
+restart({{accept, _}, _, _, _}, S) ->
S.
%% Don't currently use Opts/Svc in the accept case.
diff --git a/lib/diameter/src/compiler/diameter_codegen.erl b/lib/diameter/src/compiler/diameter_codegen.erl
index d91a776321..d5a9c81b09 100644
--- a/lib/diameter/src/compiler/diameter_codegen.erl
+++ b/lib/diameter/src/compiler/diameter_codegen.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -183,7 +183,7 @@ erl_forms(Mod, ParseD) ->
f_enumerated_avp(ParseD),
f_empty_value(ParseD),
f_dict(ParseD),
- {eof, ?LINE}]],
+ {eof, erl_anno:new(?LINE)}]],
lists:append(Forms).
diff --git a/lib/diameter/src/compiler/diameter_forms.hrl b/lib/diameter/src/compiler/diameter_forms.hrl
index dd03401b9e..04d5834c88 100644
--- a/lib/diameter/src/compiler/diameter_forms.hrl
+++ b/lib/diameter/src/compiler/diameter_forms.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,8 +28,10 @@
[],
[?APPLY(erlang, error, [?ATOM(badarg)])]}).
+-define(ANNO(L), erl_anno:new(L)).
+
%% Form tag with line number.
--define(F(T), T, ?LINE).
+-define(F(T), T, ?ANNO(?LINE)).
%% Yes, that's right. The replacement is to the first unmatched ')'.
-define(attribute, ?F(attribute)).
@@ -47,10 +49,10 @@
-define(record_index, ?F(record_index)).
-define(tuple, ?F(tuple)).
--define(ATOM(T), {atom, ?LINE, T}).
--define(INTEGER(N), {integer, ?LINE, N}).
--define(VAR(V), {var, ?LINE, V}).
--define(NIL, {nil, ?LINE}).
+-define(ATOM(T), {atom, ?ANNO(?LINE), T}).
+-define(INTEGER(N), {integer, ?ANNO(?LINE), N}).
+-define(VAR(V), {var, ?ANNO(?LINE), V}).
+-define(NIL, {nil, ?ANNO(?LINE)}).
-define(CALL(F,A), {?call, ?ATOM(F), A}).
-define(APPLY(M,F,A), {?call, {?remote, ?ATOM(M), ?ATOM(F)}, A}).
diff --git a/lib/diameter/src/diameter.appup.src b/lib/diameter/src/diameter.appup.src
index 881d25b5fb..b89859ed24 100644
--- a/lib/diameter/src/diameter.appup.src
+++ b/lib/diameter/src/diameter.appup.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,31 +35,44 @@
{"1.4.3", [{restart_application, diameter}]}, %% R16B02
{"1.4.4", [{restart_application, diameter}]},
{"1.5", [{restart_application, diameter}]}, %% R16B03
- {"1.6", [{load_module, diameter_lib}, %% 17.0
+ {"1.6", [{restart_application, diameter}]}, %% 17.0
+ {"1.7", [{restart_application, diameter}]}, %% 17.[12]
+ {<<"^1\\.(7\\.1|8)$">>, %% 17.[34]
+ [{load_module, diameter_lib},
+ {load_module, diameter_peer},
+ {load_module, diameter_reg},
+ {load_module, diameter_session},
+ {load_module, diameter_stats},
+ {load_module, diameter_sync},
+ {load_module, diameter_capx},
+ {load_module, diameter_codec},
+ {load_module, diameter_types},
{load_module, diameter_traffic},
- {load_module, diameter_watchdog},
- {load_module, diameter_peer_fsm},
{load_module, diameter_service},
+ {load_module, diameter_peer_fsm},
+ {load_module, diameter_watchdog},
+ {load_module, diameter_tcp},
+ {load_module, diameter_sctp},
+ {load_module, diameter_config},
+ {load_module, diameter},
{load_module, diameter_gen_base_rfc6733},
{load_module, diameter_gen_acct_rfc6733},
{load_module, diameter_gen_base_rfc3588},
{load_module, diameter_gen_base_accounting},
{load_module, diameter_gen_relay},
- {load_module, diameter_codec},
- {load_module, diameter_sctp}]},
- {"1.7", [{load_module, diameter_service}, %% 17.1
- {load_module, diameter_codec},
+ {update, diameter_transport_sup, supervisor},
+ {update, diameter_service_sup, supervisor},
+ {update, diameter_sup, supervisor}]},
+ {"1.9", [{load_module, diameter_codec}, %% 17.5
+ {load_module, diameter_traffic},
+ {load_module, diameter_sctp},
{load_module, diameter_gen_base_rfc6733},
{load_module, diameter_gen_acct_rfc6733},
{load_module, diameter_gen_base_rfc3588},
{load_module, diameter_gen_base_accounting},
- {load_module, diameter_gen_relay},
- {load_module, diameter_traffic},
- {load_module, diameter_peer_fsm}]},
- {"1.7.1", [{load_module, diameter_traffic}, %% 17.3
- {load_module, diameter_watchdog},
- {load_module, diameter_peer_fsm},
- {load_module, diameter_service}]}
+ {load_module, diameter_gen_relay}]},
+ {"1.9.1", [{load_module, diameter_traffic}, %% 17.5.3
+ {load_module, diameter_sctp}]}
],
[
{"0.9", [{restart_application, diameter}]},
@@ -77,30 +90,43 @@
{"1.4.3", [{restart_application, diameter}]},
{"1.4.4", [{restart_application, diameter}]},
{"1.5", [{restart_application, diameter}]},
- {"1.6", [{load_module, diameter_sctp},
- {load_module, diameter_codec},
+ {"1.6", [{restart_application, diameter}]},
+ {"1.7", [{restart_application, diameter}]},
+ {<<"^1\\.(7\\.1|8)$">>,
+ [{update, diameter_sup, supervisor},
+ {update, diameter_service_sup, supervisor},
+ {update, diameter_transport_sup, supervisor},
{load_module, diameter_gen_relay},
{load_module, diameter_gen_base_accounting},
{load_module, diameter_gen_base_rfc3588},
{load_module, diameter_gen_acct_rfc6733},
{load_module, diameter_gen_base_rfc6733},
- {load_module, diameter_service},
- {load_module, diameter_peer_fsm},
+ {load_module, diameter},
+ {load_module, diameter_config},
+ {load_module, diameter_sctp},
+ {load_module, diameter_tcp},
{load_module, diameter_watchdog},
+ {load_module, diameter_peer_fsm},
+ {load_module, diameter_service},
{load_module, diameter_traffic},
+ {load_module, diameter_types},
+ {load_module, diameter_codec},
+ {load_module, diameter_capx},
+ {load_module, diameter_sync},
+ {load_module, diameter_stats},
+ {load_module, diameter_session},
+ {load_module, diameter_reg},
+ {load_module, diameter_peer},
{load_module, diameter_lib}]},
- {"1.7", [{load_module, diameter_peer_fsm},
- {load_module, diameter_traffic},
- {load_module, diameter_gen_relay},
+ {"1.9", [{load_module, diameter_gen_relay},
{load_module, diameter_gen_base_accounting},
{load_module, diameter_gen_base_rfc3588},
{load_module, diameter_gen_acct_rfc6733},
{load_module, diameter_gen_base_rfc6733},
- {load_module, diameter_codec},
- {load_module, diameter_service}]},
- {"1.7.1", [{load_module, diameter_service},
- {load_module, diameter_peer_fsm},
- {load_module, diameter_watchdog},
+ {load_module, diameter_sctp},
+ {load_module, diameter_traffic},
+ {load_module, diameter_codec}]},
+ {"1.9.1", [{load_module, diameter_sctp},
{load_module, diameter_traffic}]}
]
}.
diff --git a/lib/diameter/src/modules.mk b/lib/diameter/src/modules.mk
index a2a7a51892..c9dd4e683a 100644
--- a/lib/diameter/src/modules.mk
+++ b/lib/diameter/src/modules.mk
@@ -1,7 +1,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2014. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -94,7 +94,7 @@ BINS = \
# Released files relative to ../examples.
EXAMPLES = \
code/GNUmakefile \
- code/peer.erl \
+ code/node.erl \
code/client.erl \
code/client_cb.erl \
code/server.erl \
diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl
index 32e7aaca39..f80de0a816 100644
--- a/lib/diameter/src/transport/diameter_sctp.erl
+++ b/lib/diameter/src/transport/diameter_sctp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,9 +18,11 @@
%%
-module(diameter_sctp).
-
-behaviour(gen_server).
+-compile({no_auto_import, [now/0]}).
+-import(diameter_lib, [now/0]).
+
%% interface
-export([start/3]).
@@ -37,7 +39,8 @@
code_change/3,
terminate/2]).
--export([info/1]). %% service_info callback
+-export([listener/1,%% diameter_sync callback
+ info/1]). %% service_info callback
-export([ports/0,
ports/1]).
@@ -99,22 +102,31 @@
-record(listener,
{ref :: reference(),
socket :: gen_sctp:sctp_socket(),
- count = 0 :: uint(),
+ count = 0 :: uint(), %% attached transport processes
tmap = ets:new(?MODULE, []) :: ets:tid(),
%% {MRef, Pid|AssocId}, {AssocId, Pid}
pending = {0, ets:new(?MODULE, [ordered_set])},
tref :: reference(),
accept :: [match()]}).
%% Field tmap is used to map an incoming message or event to the
-%% relevent transport process. Field pending implements a queue of
-%% transport processes to which an association has been assigned (at
-%% comm_up and written into tmap) but for which diameter hasn't yet
-%% spawned a transport process: a short-lived state of affairs as a
-%% new transport is spawned as a consequence of a peer being taken up,
-%% transport processes being spawned by the listener on demand. In
-%% case diameter starts a transport before comm_up on a new
-%% association, pending is set to an improper list with the spawned
-%% transport as head and the queue as tail.
+%% relevant transport process. Field pending implements two queues:
+%% the first of transport-to-be processes to which an association has
+%% been assigned (at comm_up and written into tmap) but for which
+%% diameter hasn't yet spawned a transport process, a short-lived
+%% state of affairs as a new transport is spawned as a consequence of
+%% a peer being taken up, transport processes being spawned by the
+%% listener on demand; the second of started transport processes that
+%% have not yet been assigned an association.
+%%
+%% When diameter calls start/3, the transport process is either taken
+%% from the first queue or spawned and placed in the second queue
+%% until an association is established. When an association is
+%% established, a controlling process is either taken from the second
+%% queue or spawned and placed in the first queue. Thus, there are
+%% only elements in one queue at a time, so share an ets table queue
+%% and tag it with a positive length if it contains the first queue, a
+%% negative length if it contains the second queue. The case -1 is
+%% handled differently for backwards compatibility reasons.
%% ---------------------------------------------------------------------------
%% # start/3
@@ -139,9 +151,9 @@ ip(T) ->
T.
%% A listener spawns transports either as a consequence of this call
-%% when there is not yet an association to associate with it, or at
-%% comm_up on a new association in which case the call retrieves a
-%% transport from the pending queue.
+%% when there is not yet an association to assign it, or at comm_up on
+%% a new association in which case the call retrieves a transport from
+%% the pending queue.
s({accept, Ref} = A, Addrs, Opts) ->
{LPid, LAs} = listener(Ref, {Opts, Addrs}),
try gen_server:call(LPid, {A, self()}, infinity) of
@@ -211,9 +223,9 @@ init(T) ->
i({listen, Ref, {Opts, Addrs}}) ->
{[Matches], Rest} = proplists:split(Opts, [accept]),
{LAs, Sock} = AS = open(Addrs, Rest, ?DEFAULT_PORT),
- proc_lib:init_ack({ok, self(), LAs}),
ok = gen_sctp:listen(Sock, true),
true = diameter_reg:add_new({?MODULE, listener, {Ref, AS}}),
+ proc_lib:init_ack({ok, self(), LAs}),
start_timer(#listener{ref = Ref,
socket = Sock,
accept = accept(Matches)});
@@ -226,7 +238,7 @@ i({connect, Pid, Opts, Addrs, Ref}) ->
{LAs, Sock} = open(Addrs, Rest, 0),
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self(), LAs}),
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
#transport{parent = Pid,
mode = {connect, connect(Sock, RAs, RP, [])},
socket = Sock};
@@ -236,8 +248,8 @@ i({accept, Pid, LPid, Sock, Ref})
when is_pid(Pid) ->
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self()}),
- erlang:monitor(process, Pid),
- erlang:monitor(process, LPid),
+ monitor(process, Pid),
+ monitor(process, LPid),
#transport{parent = Pid,
mode = {accept, LPid},
socket = Sock};
@@ -246,7 +258,7 @@ i({accept, Pid, LPid, Sock, Ref})
i({accept, Ref, LPid, Sock, Id}) ->
putr(?REF_KEY, Ref),
proc_lib:init_ack({ok, self()}),
- MRef = erlang:monitor(process, LPid),
+ MRef = monitor(process, LPid),
%% Wait for a signal that the transport has been started before
%% processing other messages.
receive
@@ -270,15 +282,23 @@ close(Sock, Id) ->
%% listener/2
+%% Accepting processes can be started concurrently: ensure only one
+%% listener is started.
listener(LRef, T) ->
+ diameter_sync:call({?MODULE, listener, LRef},
+ {?MODULE, listener, [{LRef, T}]},
+ infinity,
+ infinity).
+
+listener({LRef, T}) ->
l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T).
-%% Existing process with the listening socket ...
+%% Existing listening process ...
l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) ->
- {LAs, _Sock} = AS,
- {LPid, LAs};
-
-%% ... or not: start one.
+ {LAs, _Sock} = AS,
+ {LPid, LAs};
+
+%% ... or not.
l([], LRef, T) ->
{ok, LPid, LAs} = diameter_sctp_sup:start_child({listen, LRef, T}),
{LPid, LAs}.
@@ -347,11 +367,17 @@ type(T) ->
%% # handle_call/3
%% ---------------------------------------------------------------------------
+handle_call(T, From, #listener{pending = L} = S)
+ when is_list(L) ->
+ handle_call(T, From, upgrade(S));
+
handle_call({{accept, Ref}, Pid}, _, #listener{ref = Ref,
- count = N}
+ pending = {N,Q},
+ count = K}
= S) ->
- {TPid, NewS} = accept(Ref, Pid, S),
- {reply, {ok, TPid}, NewS#listener{count = N+1}};
+ TPid = accept(Ref, Pid, S),
+ {reply, {ok, TPid}, downgrade(S#listener{pending = {N-1,Q},
+ count = K+1})};
handle_call(_, _, State) ->
{reply, nok, State}.
@@ -370,8 +396,46 @@ handle_cast(_, State) ->
handle_info(T, #transport{} = S) ->
{noreply, #transport{} = t(T,S)};
+handle_info(T, #listener{pending = L} = S)
+ when is_list(L) ->
+ handle_info(T, upgrade(S));
+
handle_info(T, #listener{} = S) ->
- {noreply, #listener{} = l(T,S)}.
+ {noreply, downgrade(#listener{} = l(T,S))}.
+
+%% upgrade/1
+
+upgrade(#listener{pending = [TPid | {0,Q}]} = S) ->
+ ets:insert(Q, {TPid, now()}),
+ S#listener{pending = {-1,Q}}.
+%% Prior to the possiblity of setting pool_size on in transport
+%% configuration, a new accepting transport was only started following
+%% the death of a predecessor, so that there was only at most one
+%% previously started transport process waiting for an association.
+%% This assumption no longer holds with pool_size > 1, in which case
+%% several accepting transports are started concurrently. Deal with
+%% this by placing the started transports in a new queue of transport
+%% processes waiting for an association.
+%%
+%% Since only one of this queue and the existing queue of controlling
+%% processes waiting for a transport to be started can be non-empty at
+%% any given time, implement both queues in the same ets table. The
+%% absolute value of the first element of the 2-tuple is the queue
+%% length, the sign says which queue it is.
+
+%% downgrade/1
+%%
+%% Revert to the pre-pool_size representation when possible, for
+%% backwards compatibility in the case that the pool_size option
+%% hasn't been used.
+
+downgrade(#listener{pending = {-1,Q}} = S) ->
+ TPid = ets:first(Q),
+ ets:delete(Q, TPid),
+ S#listener{pending = [TPid | {0,Q}]};
+
+downgrade(S) ->
+ S.
%% ---------------------------------------------------------------------------
%% # code_change/3
@@ -436,54 +500,46 @@ l({sctp, Sock, _RA, _RP, Data} = Msg, #listener{socket = Sock} = S) ->
setopts(Sock)
end;
-%% Transport is asking message to be sent. See send/3 for why the send
-%% isn't directly from the transport.
-l({send, AssocId, StreamId, Bin}, #listener{socket = Sock} = S) ->
- send(Sock, AssocId, StreamId, Bin),
- S;
+l({'DOWN', MRef, process, TPid, _}, #listener{pending = {_,Q}} = S) ->
+ down(ets:member(Q, TPid), MRef, TPid, S);
+
+%% Timeout after the last accepting process has died.
+l({timeout, TRef, close = T}, #listener{tref = TRef,
+ count = 0}) ->
+ x(T);
+l({timeout, _, close}, #listener{} = S) ->
+ S.
+
+%% down/4
%% Accepting transport has died. One that's awaiting an association ...
-l({'DOWN', MRef, process, TPid, _}, #listener{pending = [TPid | Q],
- tmap = T,
- count = N}
- = S) ->
+down(true, MRef, TPid, #listener{pending = {N,Q},
+ tmap = T,
+ count = K}
+ = S)
+ when N < 0 ->
+ ets:delete(Q, TPid),
ets:delete(T, MRef),
ets:delete(T, TPid),
- start_timer(S#listener{count = N-1,
- pending = Q});
-
-%% ... ditto and a new transport has already been started ...
-l({'DOWN', _, process, _, _} = T, #listener{pending = [TPid | Q]}
- = S) ->
- #listener{pending = NQ}
- = NewS
- = l(T, S#listener{pending = Q}),
- NewS#listener{pending = [TPid | NQ]};
-
-%% ... or not.
-l({'DOWN', MRef, process, TPid, _}, #listener{socket = Sock,
- tmap = T,
- count = N,
- pending = {P,Q}}
- = S) ->
+ start_timer(S#listener{count = K-1,
+ pending = {N+1,Q}});
+
+%% ... or one that already has one.
+down(B, MRef, TPid, #listener{socket = Sock,
+ tmap = T,
+ count = K,
+ pending = {N,Q}}
+ = S) ->
[{MRef, Id}] = ets:lookup(T, MRef), %% Id = TPid | AssocId
ets:delete(T, MRef),
ets:delete(T, Id),
Id == TPid orelse close(Sock, Id),
- case ets:lookup(Q, TPid) of
- [{TPid, _}] -> %% transport in the pending queue ...
+ if B -> %% Waiting for attachment in the pending queue ...
ets:delete(Q, TPid),
- S#listener{pending = {P-1, Q}};
- [] -> %% ... or not
- start_timer(S#listener{count = N-1})
- end;
-
-%% Timeout after the last accepting process has died.
-l({timeout, TRef, close = T}, #listener{tref = TRef,
- count = 0}) ->
- x(T);
-l({timeout, _, close}, #listener{} = S) ->
- S.
+ S#listener{pending = {N-1,Q}};
+ true -> %% ... or already attached
+ start_timer(S#listener{count = K-1})
+ end.
%% t/2
%%
@@ -582,29 +638,24 @@ accept(Opts) ->
%% No pending associations: spawn a new transport.
accept(Ref, Pid, #listener{socket = Sock,
tmap = T,
- pending = {0,_} = Q}
- = S) ->
+ pending = {N,Q}})
+ when N =< 0 ->
Arg = {accept, Pid, self(), Sock, Ref},
{ok, TPid} = diameter_sctp_sup:start_child(Arg),
- MRef = erlang:monitor(process, TPid),
+ MRef = monitor(process, TPid),
ets:insert(T, [{MRef, TPid}, {TPid, MRef}]),
- {TPid, S#listener{pending = [TPid | Q]}};
-%% Placing the transport in the pending field makes it available to
-%% the next association. The stack starts a new accepting transport
-%% only after this one brings the connection up (or dies).
-
-%% Accepting transport has died. This can happen if a new transport is
-%% started before the DOWN has arrived.
-accept(Ref, Pid, #listener{pending = [TPid | {0,_} = Q]} = S) ->
- false = is_process_alive(TPid), %% assert
- accept(Ref, Pid, S#listener{pending = Q});
+ ets:insert(Q, {TPid, now()}),
+ TPid;
+%% Placing the transport in the second pending table makes it
+%% available to the next association.
%% Pending associations: attach to the first in the queue.
-accept(_, Pid, #listener{ref = Ref, pending = {N,Q}} = S) ->
+accept(_, Pid, #listener{ref = Ref,
+ pending = {_,Q}}) ->
TPid = ets:first(Q),
TPid ! {Ref, Pid},
ets:delete(Q, TPid),
- {TPid, S#listener{pending = {N-1, Q}}}.
+ TPid.
%% send/2
@@ -718,34 +769,12 @@ up(#transport{parent = Pid,
find(Id, Data, #listener{tmap = T} = S) ->
f(ets:lookup(T, Id), Data, S).
-%% New association and a transport waiting for one: use it.
+%% New association ...
f([],
- {_, #sctp_assoc_change{state = comm_up,
- assoc_id = Id}},
- #listener{tmap = T,
- pending = [TPid | {_,_} = Q]}
+ {_, #sctp_assoc_change{state = comm_up, assoc_id = Id}},
+ #listener{pending = {N,Q}}
= S) ->
- [{TPid, MRef}] = ets:lookup(T, TPid),
- ets:insert(T, [{MRef, Id}, {Id, TPid}]),
- ets:delete(T, TPid),
- {TPid, S#listener{pending = Q}};
-
-%% New association and no transport start yet: spawn one and place it
-%% in the queue.
-f([],
- {_, #sctp_assoc_change{state = comm_up,
- assoc_id = Id}},
- #listener{ref = Ref,
- socket = Sock,
- tmap = T,
- pending = {N,Q}}
- = S) ->
- Arg = {accept, Ref, self(), Sock, Id},
- {ok, TPid} = diameter_sctp_sup:start_child(Arg),
- MRef = erlang:monitor(process, TPid),
- ets:insert(T, [{MRef, Id}, {Id, TPid}]),
- ets:insert(Q, {TPid, now()}),
- {TPid, S#listener{pending = {N+1, Q}}};
+ {find(Id, S), S#listener{pending = {N+1,Q}}};
%% Known association ...
f([{_, TPid}], _, S) ->
@@ -755,6 +784,31 @@ f([{_, TPid}], _, S) ->
f([], _, _) ->
false.
+%% find/2
+
+%% Transport waiting for an association: use it.
+find(Id, #listener{tmap = T,
+ pending = {N,Q}})
+ when N < 0 ->
+ TPid = ets:first(Q),
+ [{TPid, MRef}] = ets:lookup(T, TPid),
+ ets:insert(T, [{MRef, Id}, {Id, TPid}]),
+ ets:delete(T, TPid),
+ ets:delete(Q, TPid),
+ TPid;
+
+%% No transport start yet: spawn one and queue.
+find(Id, #listener{ref = Ref,
+ socket = Sock,
+ tmap = T,
+ pending = {_,Q}}) ->
+ Arg = {accept, Ref, self(), Sock, Id},
+ {ok, TPid} = diameter_sctp_sup:start_child(Arg),
+ MRef = monitor(process, TPid),
+ ets:insert(T, [{MRef, Id}, {Id, TPid}]),
+ ets:insert(Q, {TPid, now()}),
+ TPid.
+
%% assoc_id/1
assoc_id({[#sctp_sndrcvinfo{assoc_id = Id}], _}) ->
diff --git a/lib/diameter/src/transport/diameter_tcp.erl b/lib/diameter/src/transport/diameter_tcp.erl
index 4d1b8bec51..0b26f429fb 100644
--- a/lib/diameter/src/transport/diameter_tcp.erl
+++ b/lib/diameter/src/transport/diameter_tcp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -37,7 +37,8 @@
code_change/3,
terminate/2]).
--export([info/1]). %% service_info callback
+-export([listener/1,%% diameter_sync callback
+ info/1]). %% service_info callback
-export([ports/0,
ports/1]).
@@ -191,7 +192,7 @@ init(T) ->
i({T, Ref, Mod, Pid, Opts, Addrs})
when T == accept;
T == connect ->
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
%% Since accept/connect might block indefinitely, spawn a process
%% that does nothing but kill us with the parent until call
%% returns.
@@ -218,8 +219,8 @@ i({T, Ref, Mod, Pid, Opts, Addrs})
%% A monitor process to kill the transport if the parent dies.
i(#monitor{parent = Pid, transport = TPid} = S) ->
proc_lib:init_ack({ok, self()}),
- erlang:monitor(process, Pid),
- erlang:monitor(process, TPid),
+ monitor(process, Pid),
+ monitor(process, TPid),
S;
%% In principle a link between the transport and killer processes
%% could do the same thing: have the accepting/connecting process be
@@ -235,7 +236,7 @@ i({listen, LRef, APid, {Mod, Opts, Addrs}}) ->
LAddr = laddr(LAddrOpt, Mod, LSock),
true = diameter_reg:add_new({?MODULE, listener, {LRef, {LAddr, LSock}}}),
proc_lib:init_ack({ok, self(), {LAddr, LSock}}),
- erlang:monitor(process, APid),
+ monitor(process, APid),
start_timer(#listener{socket = LSock}).
laddr([], Mod, Sock) ->
@@ -336,17 +337,25 @@ accept(Opts) ->
%% listener/2
+%% Accepting processes can be started concurrently: ensure only one
+%% listener is started.
listener(LRef, T) ->
- l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T).
+ diameter_sync:call({?MODULE, listener, LRef},
+ {?MODULE, listener, [{LRef, T, self()}]},
+ infinity,
+ infinity).
-%% Existing process with the listening socket ...
-l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) ->
- LPid ! {accept, self()},
+listener({LRef, T, TPid}) ->
+ l(diameter_reg:match({?MODULE, listener, {LRef, '_'}}), LRef, T, TPid).
+
+%% Existing listening process ...
+l([{{?MODULE, listener, {_, AS}}, LPid}], _, _, TPid) ->
+ LPid ! {accept, TPid},
AS;
-%% ... or not: start one.
-l([], LRef, T) ->
- {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, self(), T}),
+%% ... or not.
+l([], LRef, T, TPid) ->
+ {ok, _, AS} = diameter_tcp_sup:start_child({listen, LRef, TPid, T}),
AS.
%% get_addr/1
@@ -502,7 +511,7 @@ m({'DOWN', _, process, Pid, _}, #monitor{parent = Pid,
%% Another accept transport is attaching.
l({accept, TPid}, #listener{count = N} = S) ->
- erlang:monitor(process, TPid),
+ monitor(process, TPid),
S#listener{count = N+1};
%% Accepting process has died.
diff --git a/lib/diameter/src/transport/diameter_transport_sup.erl b/lib/diameter/src/transport/diameter_transport_sup.erl
index 6457ab78b0..284a41a752 100644
--- a/lib/diameter/src/transport/diameter_transport_sup.erl
+++ b/lib/diameter/src/transport/diameter_transport_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -54,7 +54,7 @@ start_child(Name, Module) ->
Spec = {Name,
{Module, start_link, [Name]},
permanent,
- 1000,
+ infinity,
supervisor,
[Module]},
supervisor:start_child(?MODULE, Spec).
diff --git a/lib/diameter/test/diameter_3xxx_SUITE.erl b/lib/diameter/test/diameter_3xxx_SUITE.erl
index 071b1a1177..44cb0cc484 100644
--- a/lib/diameter/test/diameter_3xxx_SUITE.erl
+++ b/lib/diameter/test/diameter_3xxx_SUITE.erl
@@ -47,6 +47,7 @@
send_double_error/1,
send_3xxx/1,
send_5xxx/1,
+ counters/1,
stop/1]).
%% diameter callbacks
@@ -111,7 +112,7 @@ all() ->
groups() ->
Tc = tc(),
- [{?util:name([E,D]), [], [start] ++ Tc ++ [stop]}
+ [{?util:name([E,D]), [], [start] ++ Tc ++ [counters, stop]}
|| E <- ?ERRORS, D <- ?RFCS].
init_per_suite(Config) ->
@@ -169,6 +170,203 @@ stop(_Config) ->
ok = diameter:stop_service(?SERVER),
ok = diameter:stop_service(?CLIENT).
+%% counters/1
+%%
+%% Check that counters are as expected.
+
+counters(Config) ->
+ Group = proplists:get_value(group, Config),
+ [_Errors, _Rfc] = G = ?util:name(Group),
+ [] = ?util:run([[fun counters/3, K, S, G]
+ || K <- [statistics, transport, connections],
+ S <- [?CLIENT, ?SERVER]]).
+
+counters(Key, Svc, Group) ->
+ counters(Key, Svc, Group, [_|_] = diameter:service_info(Svc, Key)).
+
+counters(statistics, Svc, [Errors, Rfc], L) ->
+ [{P, Stats}] = L,
+ true = is_pid(P),
+ stats(Svc, Errors, Rfc, lists:sort(Stats));
+
+counters(_, _, _, _) ->
+ todo.
+
+stats(?CLIENT, E, rfc3588, L)
+ when E == answer;
+ E == answer_3xxx ->
+ [{{{unknown,0},recv},2},
+ {{{0,257,0},recv},1},
+ {{{0,257,1},send},1},
+ {{{0,275,0},recv},6},
+ {{{0,275,1},send},10},
+ {{{unknown,0},recv,{'Result-Code',3001}},1},
+ {{{unknown,0},recv,{'Result-Code',3007}},1},
+ {{{0,257,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',3008}},2},
+ {{{0,275,0},recv,{'Result-Code',3999}},1},
+ {{{0,275,0},recv,{'Result-Code',5002}},1},
+ {{{0,275,0},recv,{'Result-Code',5005}},1}]
+ = L;
+
+stats(?SERVER, E, rfc3588, L)
+ when E == answer;
+ E == answer_3xxx ->
+ [{{{unknown,0},send},2},
+ {{{unknown,1},recv},1},
+ {{{0,257,0},send},1},
+ {{{0,257,1},recv},1},
+ {{{0,275,0},send},6},
+ {{{0,275,1},recv},8},
+ {{{unknown,0},send,{'Result-Code',3001}},1},
+ {{{unknown,0},send,{'Result-Code',3007}},1},
+ {{{unknown,1},recv,error},1},
+ {{{0,257,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',3008}},2},
+ {{{0,275,0},send,{'Result-Code',3999}},1},
+ {{{0,275,0},send,{'Result-Code',5002}},1},
+ {{{0,275,0},send,{'Result-Code',5005}},1},
+ {{{0,275,1},recv,error},5}]
+ = L;
+
+stats(?CLIENT, answer, rfc6733, L) ->
+ [{{{unknown,0},recv},2},
+ {{{0,257,0},recv},1},
+ {{{0,257,1},send},1},
+ {{{0,275,0},recv},8},
+ {{{0,275,1},send},10},
+ {{{unknown,0},recv,{'Result-Code',3001}},1},
+ {{{unknown,0},recv,{'Result-Code',3007}},1},
+ {{{0,257,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',3008}},2},
+ {{{0,275,0},recv,{'Result-Code',3999}},1},
+ {{{0,275,0},recv,{'Result-Code',5002}},1},
+ {{{0,275,0},recv,{'Result-Code',5005}},3},
+ {{{0,275,0},recv,{'Result-Code',5999}},1}]
+ = L;
+
+stats(?SERVER, answer, rfc6733, L) ->
+ [{{{unknown,0},send},2},
+ {{{unknown,1},recv},1},
+ {{{0,257,0},send},1},
+ {{{0,257,1},recv},1},
+ {{{0,275,0},send},8},
+ {{{0,275,1},recv},8},
+ {{{unknown,0},send,{'Result-Code',3001}},1},
+ {{{unknown,0},send,{'Result-Code',3007}},1},
+ {{{unknown,1},recv,error},1},
+ {{{0,257,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',3008}},2},
+ {{{0,275,0},send,{'Result-Code',3999}},1},
+ {{{0,275,0},send,{'Result-Code',5002}},1},
+ {{{0,275,0},send,{'Result-Code',5005}},3},
+ {{{0,275,0},send,{'Result-Code',5999}},1},
+ {{{0,275,1},recv,error},5}]
+ = L;
+
+stats(?CLIENT, answer_3xxx, rfc6733, L) ->
+ [{{{unknown,0},recv},2},
+ {{{0,257,0},recv},1},
+ {{{0,257,1},send},1},
+ {{{0,275,0},recv},8},
+ {{{0,275,1},send},10},
+ {{{unknown,0},recv,{'Result-Code',3001}},1},
+ {{{unknown,0},recv,{'Result-Code',3007}},1},
+ {{{0,257,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',3008}},2},
+ {{{0,275,0},recv,{'Result-Code',3999}},1},
+ {{{0,275,0},recv,{'Result-Code',5002}},1},
+ {{{0,275,0},recv,{'Result-Code',5005}},2},
+ {{{0,275,0},recv,{'Result-Code',5999}},1}]
+ = L;
+
+stats(?SERVER, answer_3xxx, rfc6733, L) ->
+ [{{{unknown,0},send},2},
+ {{{unknown,1},recv},1},
+ {{{0,257,0},send},1},
+ {{{0,257,1},recv},1},
+ {{{0,275,0},send},8},
+ {{{0,275,1},recv},8},
+ {{{unknown,0},send,{'Result-Code',3001}},1},
+ {{{unknown,0},send,{'Result-Code',3007}},1},
+ {{{unknown,1},recv,error},1},
+ {{{0,257,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',3008}},2},
+ {{{0,275,0},send,{'Result-Code',3999}},1},
+ {{{0,275,0},send,{'Result-Code',5002}},1},
+ {{{0,275,0},send,{'Result-Code',5005}},2},
+ {{{0,275,0},send,{'Result-Code',5999}},1},
+ {{{0,275,1},recv,error},5}]
+ = L;
+
+stats(?CLIENT, callback, rfc3588, L) ->
+ [{{{unknown,0},recv},1},
+ {{{0,257,0},recv},1},
+ {{{0,257,1},send},1},
+ {{{0,275,0},recv},6},
+ {{{0,275,1},send},10},
+ {{{unknown,0},recv,{'Result-Code',3007}},1},
+ {{{0,257,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',2001}},2},
+ {{{0,275,0},recv,{'Result-Code',3999}},1},
+ {{{0,275,0},recv,{'Result-Code',5002}},1},
+ {{{0,275,0},recv,{'Result-Code',5005}},2}]
+ = L;
+
+stats(?SERVER, callback, rfc3588, L) ->
+ [{{{unknown,0},send},1},
+ {{{unknown,1},recv},1},
+ {{{0,257,0},send},1},
+ {{{0,257,1},recv},1},
+ {{{0,275,0},send},6},
+ {{{0,275,1},recv},8},
+ {{{unknown,0},send,{'Result-Code',3007}},1},
+ {{{unknown,1},recv,error},1},
+ {{{0,257,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',2001}},2},
+ {{{0,275,0},send,{'Result-Code',3999}},1},
+ {{{0,275,0},send,{'Result-Code',5002}},1},
+ {{{0,275,0},send,{'Result-Code',5005}},2},
+ {{{0,275,1},recv,error},5}]
+ = L;
+
+stats(?CLIENT, callback, rfc6733, L) ->
+ [{{{unknown,0},recv},1},
+ {{{0,257,0},recv},1},
+ {{{0,257,1},send},1},
+ {{{0,275,0},recv},8},
+ {{{0,275,1},send},10},
+ {{{unknown,0},recv,{'Result-Code',3007}},1},
+ {{{0,257,0},recv,{'Result-Code',2001}},1},
+ {{{0,275,0},recv,{'Result-Code',2001}},2},
+ {{{0,275,0},recv,{'Result-Code',3999}},1},
+ {{{0,275,0},recv,{'Result-Code',5002}},1},
+ {{{0,275,0},recv,{'Result-Code',5005}},3},
+ {{{0,275,0},recv,{'Result-Code',5999}},1}]
+ = L;
+
+stats(?SERVER, callback, rfc6733, L) ->
+ [{{{unknown,0},send},1},
+ {{{unknown,1},recv},1},
+ {{{0,257,0},send},1},
+ {{{0,257,1},recv},1},
+ {{{0,275,0},send},8},
+ {{{0,275,1},recv},8},
+ {{{unknown,0},send,{'Result-Code',3007}},1},
+ {{{unknown,1},recv,error},1},
+ {{{0,257,0},send,{'Result-Code',2001}},1},
+ {{{0,275,0},send,{'Result-Code',2001}},2},
+ {{{0,275,0},send,{'Result-Code',3999}},1},
+ {{{0,275,0},send,{'Result-Code',5002}},1},
+ {{{0,275,0},send,{'Result-Code',5005}},3},
+ {{{0,275,0},send,{'Result-Code',5999}},1},
+ {{{0,275,1},recv,error},5}]
+ = L.
+
%% send_unknown_application/1
%%
%% Send an unknown application that a callback (which shouldn't take
diff --git a/lib/diameter/test/diameter_app_SUITE.erl b/lib/diameter/test/diameter_app_SUITE.erl
index f68a18b5c2..84f8a66a8a 100644
--- a/lib/diameter/test/diameter_app_SUITE.erl
+++ b/lib/diameter/test/diameter_app_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -187,15 +187,14 @@ xref(Config) ->
xref:stop(XRef),
+ Rel = release(), %% otp_release-ish
+
%% Only care about calls from our own application.
- [] = lists:filter(fun({{F,_,_},{T,_,_}}) ->
+ [] = lists:filter(fun({{F,_,_} = From, {_,_,_} = To}) ->
lists:member(F, Mods)
- andalso {F,T} /= {diameter_tcp, ssl}
+ andalso not ignored(From, To, Rel)
end,
Undefs),
- %% diameter_tcp does call ssl despite the latter not being listed
- %% as a dependency in the app file since ssl is only required for
- %% TLS security: it's up to a client who wants TLS to start ssl.
%% Ensure that only runtime or info modules call runtime modules.
%% It's not strictly necessary that diameter compiler modules not
@@ -214,12 +213,46 @@ xref(Config) ->
[] = lists:filter(fun(M) -> not lists:member(app(M), Deps) end,
RTdeps -- Mods).
-unversion(App) ->
- T = lists:dropwhile(fun is_vsn_ch/1, lists:reverse(App)),
- lists:reverse(case T of [$-|TT] -> TT; _ -> T end).
+ignored({FromMod,_,_}, {ToMod,_,_} = To, Rel)->
+ %% diameter_tcp does call ssl despite the latter not being listed
+ %% as a dependency in the app file since ssl is only required for
+ %% TLS security: it's up to a client who wants TLS to start ssl.
+ %% The OTP 18 time api is also called if it exists, so that the
+ %% same code can be run on older releases.
+ {FromMod, ToMod} == {diameter_tcp, ssl}
+ orelse (FromMod == diameter_lib
+ andalso Rel < 18
+ andalso lists:member(To, time_api())).
+
+%% New time api in OTP 18.
+time_api() ->
+ [{erlang, F, A} || {F,A} <- [{convert_time_unit,3},
+ {monotonic_time,0},
+ {monotonic_time,1},
+ {system_time,0},
+ {system_time,1},
+ {time_offset,0},
+ {time_offset,1},
+ {timestamp,0},
+ {unique_integer,0},
+ {unique_integer,1}]]
+ ++ [{os, system_time, 0},
+ {os, system_time, 1}].
+
+release() ->
+ Rel = erlang:system_info(otp_release),
+ try list_to_integer(Rel) of
+ N -> N
+ catch
+ error:_ ->
+ 0 %% aka < 17
+ end.
-is_vsn_ch(C) ->
- $0 =< C andalso C =< $9 orelse $. == C.
+unversion(App) ->
+ {Name, [$-|Vsn]} = lists:splitwith(fun(C) -> C /= $- end, App),
+ true = is_app(Name), %% assert
+ Vsn = vsn_str(Vsn), %%
+ Name.
app('$M_EXPR') -> %% could be anything but assume it's ok
"erts";
@@ -288,11 +321,11 @@ acc_rel(Dir, Rel, {Vsn, _}, Acc) ->
%% Write a rel file and return its name.
write_rel(Dir, [Erts | Apps], Vsn) ->
- true = is_vsn(Vsn),
- Name = "diameter_test_" ++ Vsn,
+ VS = vsn_str(Vsn),
+ Name = "diameter_test_" ++ VS,
ok = write_file(filename:join([Dir, Name ++ ".rel"]),
{release,
- {"diameter " ++ Vsn ++ " test release", Vsn},
+ {"diameter " ++ VS ++ " test release", VS},
Erts,
Apps}),
Name.
@@ -307,10 +340,34 @@ fetch(Key, List) ->
write_file(Path, T) ->
file:write_file(Path, io_lib:format("~p.", [T])).
-%% Is a version string of the expected form? Return the argument
-%% itself for 'false' for a useful badmatch.
+%% Is a version string of the expected form?
is_vsn(V) ->
- is_list(V)
- andalso length(V) == string:span(V, "0123456789.")
- andalso V == string:join(string:tokens(V, [$.]), ".") %% no ".."
- orelse {error, V}.
+ V = vsn_str(V),
+ true.
+
+%% Turn a from/to version in appup to a version string after ensuring
+%% that it's valid version number of regexp. In the regexp case, the
+%% regexp itself becomes the version string since there's no
+%% requirement that a version in appup be anything but a string. The
+%% restrictions placed on string-valued version numbers (that they be
+%% '.'-separated integers) are our own.
+
+vsn_str(S)
+ when is_list(S) ->
+ {_, match} = {S, match(S, "^(0|[1-9][0-9]*)(\\.(0|[1-9][0-9]*))*$")},
+ {_, nomatch} = {S, match(S, "\\.0\\.0$")},
+ S;
+
+vsn_str(B)
+ when is_binary(B) ->
+ {ok, _} = re:compile(B),
+ binary_to_list(B).
+
+match(S, RE) ->
+ re:run(S, RE, [{capture, none}]).
+
+%% Is an application name of the expected form?
+is_app(S)
+ when is_list(S) ->
+ {_, match} = {S, match(S, "^([a-z]([a-z_]*|[a-zA-Z]*))$")},
+ true.
diff --git a/lib/diameter/test/diameter_capx_SUITE.erl b/lib/diameter/test/diameter_capx_SUITE.erl
index deabdd720b..1c0f25864c 100644
--- a/lib/diameter/test/diameter_capx_SUITE.erl
+++ b/lib/diameter/test/diameter_capx_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -144,8 +144,8 @@ end_per_suite(_Config) ->
%% Generate a unique hostname for each testcase so that watchdogs
%% don't prevent a connection from being brought up immediately.
init_per_testcase(Name, Config) ->
- Uniq = ["." ++ integer_to_list(N) || N <- tuple_to_list(now())],
- [{host, lists:flatten([?L(Name) | Uniq])} | Config].
+ [{host, ?L(Name) ++ "." ++ diameter_util:unique_string()}
+ | Config].
init_per_group(Name, Config) ->
[{rfc, Name} | Config].
@@ -378,10 +378,14 @@ dict(N) ->
%% id's, failing with app_not_configured if it can't.
load_dict(N) ->
Mod = dict(N),
- Forms = [{attribute, 1, module, Mod},
- {attribute, 2, compile, [export_all]},
- {function, 3, id, 0,
- [{clause, 4, [], [], [{integer, 4, N}]}]}],
+ A1 = erl_anno:new(1),
+ A2 = erl_anno:new(2),
+ A3 = erl_anno:new(3),
+ A4 = erl_anno:new(4),
+ Forms = [{attribute, A1, module, Mod},
+ {attribute, A2, compile, [export_all]},
+ {function, A3, id, 0,
+ [{clause, A4, [], [], [{integer, A4, N}]}]}],
{ok, Mod, Bin, []} = compile:forms(Forms, [return]),
{module, Mod} = code:load_binary(Mod, Mod, Bin),
N = Mod:id().
diff --git a/lib/diameter/test/diameter_codec_SUITE.erl b/lib/diameter/test/diameter_codec_SUITE.erl
index cd8ca41f66..64ea90554d 100644
--- a/lib/diameter/test/diameter_codec_SUITE.erl
+++ b/lib/diameter/test/diameter_codec_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -29,6 +29,9 @@
-export([suite/0,
all/0,
+ groups/0,
+ init_per_group/2,
+ end_per_group/2,
init_per_testcase/2,
end_per_testcase/2]).
@@ -36,9 +39,13 @@
-export([base/1,
gen/1,
lib/1,
- unknown/1]).
+ unknown/1,
+ success/1,
+ grouped_error/1,
+ failed_error/1]).
-include("diameter_ct.hrl").
+-include("diameter.hrl").
-define(L, atom_to_list).
@@ -48,7 +55,19 @@ suite() ->
[{timetrap, {seconds, 10}}].
all() ->
- [base, gen, lib, unknown].
+ [base, gen, lib, unknown, {group, recode}].
+
+groups() ->
+ [{recode, [], [success,
+ grouped_error,
+ failed_error]}].
+
+init_per_group(recode, Config) ->
+ ok = diameter:start(),
+ Config.
+
+end_per_group(_, _) ->
+ ok = diameter:stop().
init_per_testcase(gen, Config) ->
[{application, ?APP, App}] = diameter_util:consult(?APP, app),
@@ -98,3 +117,166 @@ compile(File) ->
compile(File, Opts) ->
compile:file(File, [return | Opts]).
+
+%% ===========================================================================
+
+%% Ensure a Grouped AVP is represented by a list in the avps field.
+success(_) ->
+ Avps = [{295, <<1:32>>}, %% Termination-Cause
+ {284, [{280, "Proxy-Host"}, %% Proxy-Info
+ {33, "Proxy-State"}, %%
+ {295, <<2:32>>}]}], %% Termination-Cause
+ #diameter_packet{avps = [#diameter_avp{code = 295,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33},
+ #diameter_avp{code = 295,
+ value = 2,
+ data = <<2:32>>}]],
+ errors = []}
+ = str(recode(str(Avps))).
+
+%% ===========================================================================
+
+%% Ensure a Grouped AVP is represented by a list in the avps field
+%% even in the case of a decode error on a component AVP.
+grouped_error(_) ->
+ Avps = [{295, <<1:32>>}, %% Termination-Cause
+ {284, [{295, <<0:32>>}, %% Proxy-Info, Termination-Cause
+ {280, "Proxy-Host"},
+ {33, "Proxy-State"}]}],
+ #diameter_packet{avps = [#diameter_avp{code = 295,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 295,
+ value = undefined,
+ data = <<0:32>>},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33}]],
+ errors = [{5004, #diameter_avp{code = 284}}]}
+ = str(recode(str(Avps))).
+
+%% ===========================================================================
+
+%% Ensure that a failed decode in Failed-AVP is acceptable, and that
+%% the component AVPs are decoded if possible.
+failed_error(_) ->
+ Avps = [{279, [{295, <<0:32>>}, %% Failed-AVP, Termination-Cause
+ {258, <<1:32>>}, %% Auth-Application-Id
+ {284, [{280, "Proxy-Host"}, %% Proxy-Info
+ {33, "Proxy-State"},
+ {295, <<0:32>>}, %% Termination-Cause, invalid
+ {258, <<2:32>>}]}]}], %% Auth-Application-Id
+ #diameter_packet{avps = [[#diameter_avp{code = 279},
+ #diameter_avp{code = 295,
+ value = undefined,
+ data = <<0:32>>},
+ #diameter_avp{code = 258,
+ value = 1,
+ data = <<1:32>>},
+ [#diameter_avp{code = 284},
+ #diameter_avp{code = 280},
+ #diameter_avp{code = 33},
+ #diameter_avp{code = 295,
+ value = undefined},
+ #diameter_avp{code = 258,
+ value = 2,
+ data = <<2:32>>}]]],
+ errors = []}
+ = sta(recode(sta(Avps))).
+
+%% ===========================================================================
+
+%% str/1
+
+str(#diameter_packet{avps = [#diameter_avp{code = 263},
+ #diameter_avp{code = 264},
+ #diameter_avp{code = 296},
+ #diameter_avp{code = 283},
+ #diameter_avp{code = 258,
+ value = 0}
+ | T]}
+ = Pkt) ->
+ Pkt#diameter_packet{avps = T};
+
+str(Avps) ->
+ OH = "diameter.erlang.org",
+ OR = "erlang.org",
+ DR = "example.com",
+ Sid = "diameter.erlang.org;123;456",
+
+ [#diameter_header{version = 1,
+ cmd_code = 275, %% STR
+ is_request = true,
+ application_id = 0,
+ hop_by_hop_id = 17,
+ end_to_end_id = 42,
+ is_proxiable = false,
+ is_error = false,
+ is_retransmitted = false}
+ | avp([{263, Sid}, %% Session-Id
+ {264, OH}, %% Origin-Host
+ {296, OR}, %% Origin-Realm
+ {283, DR}, %% Destination-Realm
+ {258, <<0:32>>}] %% Auth-Application-Id
+ ++ Avps)].
+
+%% sta/1
+
+sta(#diameter_packet{avps = [#diameter_avp{code = 263},
+ #diameter_avp{code = 268},
+ #diameter_avp{code = 264},
+ #diameter_avp{code = 296},
+ #diameter_avp{code = 278,
+ value = 4}
+ | T]}
+ = Pkt) ->
+ Pkt#diameter_packet{avps = T};
+
+sta(Avps) ->
+ OH = "diameter.erlang.org",
+ OR = "erlang.org",
+ Sid = "diameter.erlang.org;123;456",
+
+ [#diameter_header{version = 1,
+ cmd_code = 275, %% STA
+ is_request = false,
+ application_id = 0,
+ hop_by_hop_id = 17,
+ end_to_end_id = 42,
+ is_proxiable = false,
+ is_error = false,
+ is_retransmitted = false}
+ | avp([{263, Sid}, %% Session-Id
+ {268, <<2002:32>>}, %% Result-Code
+ {264, OH}, %% Origin-Host
+ {296, OR}, %% Origin-Realm
+ {278, <<4:32>>}] %% Origin-State-Id
+ ++ Avps)].
+
+avp({Code, Data}) ->
+ #diameter_avp{code = Code,
+ data = avp(Data)};
+
+avp(#diameter_avp{} = A) ->
+ A;
+
+avp([{_,_} | _] = Avps) ->
+ lists:map(fun avp/1, Avps);
+
+avp(V) ->
+ V.
+
+%% recode/1
+
+recode(Msg) ->
+ recode(Msg, diameter_gen_base_rfc6733).
+
+recode(#diameter_packet{} = Pkt, Dict) ->
+ diameter_codec:decode(Dict, diameter_codec:encode(Dict, Pkt));
+
+recode(Msg, Dict) ->
+ recode(#diameter_packet{msg = Msg}, Dict).
diff --git a/lib/diameter/test/diameter_codec_test.erl b/lib/diameter/test/diameter_codec_test.erl
index 90536dcf2b..5f1dbfbd61 100644
--- a/lib/diameter/test/diameter_codec_test.erl
+++ b/lib/diameter/test/diameter_codec_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -229,8 +229,7 @@ v(Max, Ord, E)
when Ord =< Max ->
diameter_enum:to_list(E);
v(Max, Ord, E) ->
- {M,S,U} = now(),
- random:seed(M,S,U),
+ random:seed(diameter_util:seed()),
v(Max, Ord, E, []).
v(0, _, _, Acc) ->
@@ -353,12 +352,23 @@ values('DiameterURI') ->
{[],
["aaa" ++ S ++ "://diameter.se" ++ P ++ Tr ++ Pr
|| S <- ["", "s"],
- P <- ["", ":1234"],
+ P <- ["", ":1234", ":0", ":65535"],
Tr <- ["" | [";transport=" ++ X
|| X <- ["tcp", "sctp", "udp"]]],
Pr <- ["" | [";protocol=" ++ X
- || X <- ["diameter","radius","tacacs+"]]]],
- []};
+ || X <- ["diameter","radius","tacacs+"]]],
+ Tr /= ";transport=udp"
+ orelse (Pr /= ";protocol=diameter" andalso Pr /= "")]
+ ++ ["aaa://" ++ lists:duplicate(255, $x)],
+ ["aaa://diameter.se:65536",
+ "aaa://diameter.se:-1",
+ "aaa://diameter.se;transport=udp;protocol=diameter",
+ "aaa://diameter.se;transport=udp",
+ "aaa://" ++ lists:duplicate(256, $x),
+ "aaa://:3868",
+ "aaax://diameter.se",
+ "aaa://diameter.se;transport=tcpx",
+ "aaa://diameter.se;transport=tcp;protocol=diameter "]};
values(T)
when T == 'IPFilterRule';
@@ -512,7 +522,7 @@ random(Mn,Mx) ->
seed(undefined) ->
put({?MODULE, seed}, true),
- random:seed(now());
+ random:seed(diameter_util:seed());
seed(true) ->
ok.
diff --git a/lib/diameter/test/diameter_config_SUITE.erl b/lib/diameter/test/diameter_config_SUITE.erl
index ad5b3f9420..4bcaa8119f 100644
--- a/lib/diameter/test/diameter_config_SUITE.erl
+++ b/lib/diameter/test/diameter_config_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -50,7 +50,7 @@
{request_errors, RE},
{call_mutates_state, C}]]
|| D <- [diameter_gen_base_rfc3588, diameter_gen_base_rfc6733],
- M <- [?MODULE, [?MODULE, now()]],
+ M <- [?MODULE, [?MODULE, diameter_lib:now()]],
A <- [0, common, make_ref()],
S <- [[], make_ref()],
AE <- [report, callback, discard],
@@ -82,6 +82,15 @@
[false],
[[node(), node()]]],
[[x]]},
+ {string_decode,
+ [[true], [false]],
+ [[0], [x]]},
+ {incoming_maxlen,
+ [[0], [65536], [16#FFFFFF]],
+ [[-1], [1 bsl 24], [infinity], [false]]},
+ {spawn_opt,
+ [[[]], [[monitor, link]]],
+ [[false]]},
{invalid_option, %% invalid service options are rejected
[],
[[x],
@@ -157,6 +166,12 @@
{length_errors,
[[exit], [handle], [discard]],
[[x]]},
+ {dpr_timeout,
+ [[0], [3000], [16#FFFFFFFF]],
+ [[infinity], [-1], [1 bsl 32], [x]]},
+ {dpa_timeout,
+ [[0], [3000], [16#FFFFFFFF]],
+ [[infinity], [-1], [1 bsl 32], [x]]},
{connect_timer,
[[3000]],
[[infinity]]},
@@ -171,9 +186,15 @@
[[{suspect, 2}]]],
[[x],
[[{open, 0}]]]},
+ {pool_size,
+ [[1], [100]],
+ [[0], [infinity], [-1], [x]]},
{private,
[[x]],
[]},
+ {spawn_opt,
+ [[[]], [[monitor, link]]],
+ [[false]]},
{invalid_option, %% invalid transport options are silently ignored
[[x],
[x,x]],
diff --git a/lib/diameter/test/diameter_ct.erl b/lib/diameter/test/diameter_ct.erl
index ed2f884681..85c502ea7f 100644
--- a/lib/diameter/test/diameter_ct.erl
+++ b/lib/diameter/test/diameter_ct.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -43,7 +43,7 @@ ct_run(Opts) ->
info(Start , info()).
info() ->
- [{time, now()},
+ [{time, diameter_lib:now()},
{process_count, erlang:system_info(process_count)}
| erlang:memory()].
@@ -56,6 +56,6 @@ info(L0, L1) ->
io:format("INFO: ~p~n", [Diff]).
diff(time, T0, T1) ->
- timer:now_diff(T1, T0);
+ diameter_lib:micro_diff(T1, T0);
diff(_, N0, N1) ->
N1 - N0.
diff --git a/lib/diameter/test/diameter_dpr_SUITE.erl b/lib/diameter/test/diameter_dpr_SUITE.erl
index f3f16b06e0..81178e2bda 100644
--- a/lib/diameter/test/diameter_dpr_SUITE.erl
+++ b/lib/diameter/test/diameter_dpr_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2012-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2012-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -32,6 +32,7 @@
%% testcases
-export([start/1,
connect/1,
+ send_dpr/1,
remove_transport/1,
stop_service/1,
check/1,
@@ -41,6 +42,7 @@
-export([disconnect/5]).
-include("diameter.hrl").
+-include("diameter_gen_base_rfc6733.hrl").
%% ===========================================================================
@@ -51,9 +53,6 @@
-define(CLIENT, "CLIENT").
-define(SERVER, "SERVER").
--define(DICT_COMMON, ?DIAMETER_DICT_COMMON).
--define(APP_ID, ?DICT_COMMON:id()).
-
%% Config for diameter:start_service/2.
-define(SERVICE(Host),
[{'Origin-Host', Host},
@@ -61,9 +60,10 @@
{'Host-IP-Address', [?ADDR]},
{'Vendor-Id', hd(Host)}, %% match this in disconnect/5
{'Product-Name', "OTP/diameter"},
- {'Acct-Application-Id', [?APP_ID]},
+ {'Acct-Application-Id', [0]},
{restrict_connections, false},
- {application, [{dictionary, ?DICT_COMMON},
+ {application, [{dictionary, diameter_gen_base_rfc6733},
+ {alias, common},
{module, #diameter_callback{_ = false}}]}]).
%% Disconnect reasons that diameter passes as the first argument of a
@@ -74,10 +74,12 @@
-define(CAUSES, [0, rebooting, 1, busy, 2, goaway]).
%% Establish one client connection for each element of this list,
-%% configured with disconnect/5 as disconnect_cb and returning the
-%% specified value.
+%% configured with disconnect/5, disconnect_cb returning the specified
+%% value.
-define(RETURNS,
- [[close, {dpr, [{cause, invalid}]}], [ignore, close], []]
+ [[close, {dpr, [{cause, invalid}]}],
+ [ignore, close],
+ []]
++ [[{dpr, [{timeout, 5000}, {cause, T}]}] || T <- ?CAUSES]).
%% ===========================================================================
@@ -86,7 +88,7 @@ suite() ->
[{timetrap, {seconds, 60}}].
all() ->
- [{group, R} || R <- ?REASONS].
+ [start, send_dpr, stop | [{group, R} || R <- ?REASONS]].
%% The group determines how transports are terminated: by remove_transport,
%% stop_service or application stop.
@@ -111,6 +113,22 @@ start(_Config) ->
ok = diameter:start_service(?SERVER, ?SERVICE(?SERVER)),
ok = diameter:start_service(?CLIENT, ?SERVICE(?CLIENT)).
+send_dpr(_Config) ->
+ LRef = ?util:listen(?SERVER, tcp),
+ Ref = ?util:connect(?CLIENT, tcp, LRef, [{dpa_timeout, 10000}]),
+ #diameter_base_DPA{'Result-Code' = 2001}
+ = diameter:call(?CLIENT,
+ common,
+ ['DPR', {'Origin-Host', "CLIENT.erlang.org"},
+ {'Origin-Realm', "erlang.org"},
+ {'Disconnect-Cause', 0}]),
+ ok = receive %% endure the transport dies on DPA
+ #diameter_event{service = ?CLIENT, info = {down, Ref, _, _}} ->
+ ok
+ after 5000 ->
+ erlang:process_info(self(), messages)
+ end.
+
connect(Config) ->
Pid = spawn(fun init/0), %% process for disconnect_cb to bang
Grp = group(Config),
diff --git a/lib/diameter/test/diameter_event_SUITE.erl b/lib/diameter/test/diameter_event_SUITE.erl
index f43f111d20..bfe160203c 100644
--- a/lib/diameter/test/diameter_event_SUITE.erl
+++ b/lib/diameter/test/diameter_event_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
+%% Copyright Ericsson AB 2013-15. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -168,16 +168,15 @@ connect(Config, Opts) ->
{Name, Ref}.
uniq() ->
- {MS,S,US} = now(),
- lists:flatten(io_lib:format("-~p-~p-~p-", [MS,S,US])).
+ "-" ++ diameter_util:unique_string().
event(Name) ->
receive #diameter_event{service = Name, info = T} -> T end.
event(Name, TL, TH) ->
- T0 = now(),
+ T0 = diameter_lib:now(),
Event = event(Name),
- DT = timer:now_diff(now(), T0) div 1000,
+ DT = diameter_lib:micro_diff(T0) div 1000,
{true, true, DT, Event} = {TL < DT, DT < TH, DT, Event},
Event.
diff --git a/lib/diameter/test/diameter_examples_SUITE.erl b/lib/diameter/test/diameter_examples_SUITE.erl
index aef4bc35ef..ef8e459175 100644
--- a/lib/diameter/test/diameter_examples_SUITE.erl
+++ b/lib/diameter/test/diameter_examples_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -295,15 +295,15 @@ slave() ->
[{timetrap, {minutes, 10}}].
slave(_) ->
- T0 = now(),
+ T0 = diameter_lib:now(),
{ok, Node} = ct_slave:start(?MODULE, ?TIMEOUTS),
- T1 = now(),
+ T1 = diameter_lib:now(),
T2 = rpc:call(Node, erlang, now, []),
{ok, Node} = ct_slave:stop(?MODULE),
- now_diff([T0, T1, T2, now()]).
+ now_diff([T0, T1, T2, diameter_lib:now()]).
now_diff([T1,T2|_] = Ts) ->
- [timer:now_diff(T2,T1) | now_diff(tl(Ts))];
+ [diameter_lib:micro_diff(T2,T1) | now_diff(tl(Ts))];
now_diff(_) ->
[].
@@ -397,4 +397,4 @@ stop(Name)
stop(Config) ->
Prot = proplists:get_value(group, Config),
- [] = [RC || N <- ?NODES, RC <- [stop(concat(Prot, N))], RC /= ok].
+ [] = [RC || N <- ?NODES, RC <- [catch stop(concat(Prot, N))], RC /= ok].
diff --git a/lib/diameter/test/diameter_gen_sctp_SUITE.erl b/lib/diameter/test/diameter_gen_sctp_SUITE.erl
index 51ccb1e6ec..4ea5e80095 100644
--- a/lib/diameter/test/diameter_gen_sctp_SUITE.erl
+++ b/lib/diameter/test/diameter_gen_sctp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -119,10 +119,10 @@ send_not_from_controlling_process(_) ->
send_not_from_controlling_process() ->
FPid = self(),
- {L, MRef} = spawn_monitor(fun() -> listen(FPid) end),%% listening process
+ {L, MRef} = spawn_monitor(fun() -> listen(FPid) end),
receive
{?MODULE, C, S} ->
- erlang:demonitor(MRef, [flush]),
+ demonitor(MRef, [flush]),
[L,C,S];
{'DOWN', MRef, process, _, _} = T ->
error(T)
@@ -137,13 +137,7 @@ listen(FPid) ->
LPid = self(),
spawn(fun() -> connect1(PortNr, FPid, LPid) end), %% connecting process
Id = assoc(Sock),
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], _Bin})
- = recv(). %% Waits with this as current_function.
-
-%% recv/0
-
-recv() ->
- receive T -> T end.
+ recv(Sock, Id).
%% connect1/3
@@ -154,7 +148,7 @@ connect1(PortNr, FPid, LPid) ->
FPid ! {?MODULE,
self(),
spawn(fun() -> send(Sock, Id) end)}, %% sending process
- MRef = erlang:monitor(process, LPid),
+ MRef = monitor(process, LPid),
down(MRef). %% Waits with this as current_function.
%% down/1
@@ -277,7 +271,8 @@ acc(N, Acc) ->
loop(Sock, MRef, Bin) ->
receive
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B}) ->
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], B})
+ when is_binary(B) ->
Sz = size(Bin),
{Sz, Bin} = {size(B), B}, %% assert
ok = send(Sock, Id, mark(Bin)),
@@ -291,7 +286,7 @@ loop(Sock, MRef, Bin) ->
%% connect2/3
connect2(Pid, PortNr, Bin) ->
- erlang:monitor(process, Pid),
+ monitor(process, Pid),
{ok, Sock} = open(),
ok = gen_sctp:connect_init(Sock, ?ADDR, PortNr, []),
@@ -301,19 +296,25 @@ connect2(Pid, PortNr, Bin) ->
%% T2 = time after listening process received our message
%% T3 = time after reply is received
- T1 = now(),
+ T1 = diameter_util:timestamp(),
ok = send(Sock, Id, Bin),
T2 = unmark(recv(Sock, Id)),
- T3 = now(),
- {timer:now_diff(T2, T1), timer:now_diff(T3, T2)}. %% {Outbound, Inbound}
+ T3 = diameter_util:timestamp(),
+ {diameter_lib:micro_diff(T2, T1), %% Outbound
+ diameter_lib:micro_diff(T3, T2)}. %% Inbound
%% recv/2
recv(Sock, Id) ->
receive
- ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}) ->
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = I}], Bin})
+ when is_binary(Bin) ->
+ Id = I, %% assert
Bin;
- T -> %% eg. 'DOWN'
+ ?SCTP(S, _) ->
+ Sock = S, %% assert
+ recv(Sock, Id);
+ T ->
exit(T)
end.
@@ -325,7 +326,7 @@ send(Sock, Id, Bin) ->
%% mark/1
mark(Bin) ->
- Info = term_to_binary(now()),
+ Info = term_to_binary(diameter_util:timestamp()),
<<Info/binary, Bin/binary>>.
%% unmark/1
diff --git a/lib/diameter/test/diameter_gen_tcp_SUITE.erl b/lib/diameter/test/diameter_gen_tcp_SUITE.erl
index 7e232edb44..4b542e0156 100644
--- a/lib/diameter/test/diameter_gen_tcp_SUITE.erl
+++ b/lib/diameter/test/diameter_gen_tcp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013. All Rights Reserved.
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,10 +18,10 @@
%%
%%
-%% Some gen_sctp-specific tests demonstrating problems that were
+%% Some gen_tcp-specific tests demonstrating problems that were
%% encountered during diameter development but have nothing
-%% specifically to do with diameter. At least one of them can cause
-%% diameter_traffic_SUITE testcases to fail.
+%% specifically to do with diameter. These can cause testcases in
+%% other suites to fail.
%%
-module(diameter_gen_tcp_SUITE).
@@ -30,7 +30,8 @@
all/0]).
%% testcases
--export([send_long/1]).
+-export([send_long/1,
+ connect/1]).
-define(LOOPBACK, {127,0,0,1}).
-define(GEN_OPTS, [binary, {active, true}, {ip, ?LOOPBACK}]).
@@ -41,7 +42,8 @@ suite() ->
[{timetrap, {minutes, 2}}].
all() ->
- [send_long].
+ [connect, %% Appears to fail only when run first.
+ send_long].
%% ===========================================================================
@@ -87,15 +89,6 @@ connect(PortNr, LPid) ->
LPid ! {self(), fun(B) -> send(Sock, B) end},
down(LPid).
-%% down/1
-
-down(Pid)
- when is_pid(Pid) ->
- down(erlang:monitor(process, Pid));
-
-down(MRef) ->
- receive {'DOWN', MRef, process, _, Reason} -> Reason end.
-
%% send/2
%%
%% Send from a spawned process just to avoid sending from the
@@ -104,3 +97,47 @@ down(MRef) ->
send(Sock, Bin) ->
{_, MRef} = spawn_monitor(fun() -> exit(gen_tcp:send(Sock, Bin)) end),
down(MRef).
+
+%% ===========================================================================
+
+%% connect/1
+%%
+%% Test that simultaneous connections succeed. This fails sporadically
+%% on OS X at the time of writing, when gen_tcp:connect/2 returns
+%% {error, econnreset}.
+
+connect(_) ->
+ {ok, LSock} = gen_tcp:listen(0, ?GEN_OPTS),
+ {ok, {_,PortNr}} = inet:sockname(LSock),
+ Count = lists:seq(1,8), %% 8 simultaneous connects
+ As = [gen_accept(LSock) || _ <- Count],
+ %% Wait for spawned processes to have called gen_tcp:accept/1
+ %% (presumably).
+ receive after 2000 -> ok end,
+ Cs = [gen_connect(PortNr) || _ <- Count],
+ [] = failures(Cs),
+ [] = failures(As).
+
+failures(Monitors) ->
+ [RC || {_, MRef} <- Monitors, RC <- [down(MRef)], ok /= element(1, RC)].
+
+gen_accept(LSock) ->
+ spawn_monitor(fun() ->
+ exit(gen_tcp:accept(LSock))
+ end).
+
+gen_connect(PortNr) ->
+ spawn_monitor(fun() ->
+ exit(gen_tcp:connect(?LOOPBACK, PortNr, ?GEN_OPTS))
+ end).
+
+%% ===========================================================================
+
+%% down/1
+
+down(Pid)
+ when is_pid(Pid) ->
+ down(monitor(process, Pid));
+
+down(MRef) ->
+ receive {'DOWN', MRef, process, _, Reason} -> Reason end.
diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl
new file mode 100644
index 0000000000..a59cd66a2e
--- /dev/null
+++ b/lib/diameter/test/diameter_pool_SUITE.erl
@@ -0,0 +1,133 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+%% Test of the pool_size option in connecting nodes with multiple
+%% connections.
+%%
+
+-module(diameter_pool_SUITE).
+
+-export([suite/0,
+ all/0,
+ init_per_testcase/2,
+ end_per_testcase/2,
+ init_per_suite/1,
+ end_per_suite/1]).
+
+%% testcases
+-export([tcp_connect/1,
+ sctp_connect/1,
+ any_connect/1]).
+
+%% ===========================================================================
+
+-define(util, diameter_util).
+
+%% Config for diameter:start_service/2.
+-define(SERVICE(Host),
+ [{'Origin-Host', Host ++ ".ericsson.com"},
+ {'Origin-Realm', "ericsson.com"},
+ {'Host-IP-Address', [{127,0,0,1}]},
+ {'Vendor-Id', 12345},
+ {'Product-Name', "OTP/diameter"},
+ {'Auth-Application-Id', [0]}, %% common
+ {'Acct-Application-Id', [3]}, %% accounting
+ {restrict_connections, false},
+ {application, [{alias, common},
+ {dictionary, diameter_gen_base_rfc6733},
+ {module, diameter_callback}]},
+ {application, [{alias, accounting},
+ {dictionary, diameter_gen_acct_rfc6733},
+ {module, diameter_callback}]}]).
+
+%% ===========================================================================
+
+suite() ->
+ [{timetrap, {seconds, 30}}].
+
+all() ->
+ [tcp_connect,
+ sctp_connect,
+ any_connect].
+
+init_per_testcase(_Name, Config) ->
+ Config.
+
+end_per_testcase(_Name, _Config) ->
+ diameter:stop().
+
+init_per_suite(Config) ->
+ [{sctp, ?util:have_sctp()} | Config].
+
+end_per_suite(_Config) ->
+ ok.
+
+%% ===========================================================================
+
+tcp_connect(_Config) ->
+ connect(tcp, tcp).
+
+sctp_connect(Config) ->
+ case lists:member({sctp, true}, Config) of
+ true -> connect(sctp, sctp);
+ false -> {skip, no_sctp}
+ end.
+
+any_connect(_Config) ->
+ connect(any, tcp).
+
+%% connect/2
+
+%% Establish multiple connections between a client and server.
+connect(ClientProt, ServerProt) ->
+ ok = diameter:start(),
+ [] = [{S,T} || S <- ["server", "client"],
+ T <- [diameter:start_service(S, ?SERVICE(S))],
+ T /= ok],
+ %% Listen with a single transport with pool_size = 4. Ensure the
+ %% expected number of transport processes are started.
+ LRef = ?util:listen("server", ServerProt, [{pool_size, 4}]),
+ {4,0} = count("server", LRef, accept), %% 4 transports, no connections
+ %% Establish 5 connections.
+ Ref = ?util:connect("client", ClientProt, LRef, [{pool_size, 5}]),
+ {5,5} = count("client", Ref, pool), %% 5 connections
+ %% Ensure the server has started replacement transports within a
+ %% reasonable time. Sleepsince there's no guarantee the
+ %% replacements have been started before the client has received
+ %% 'up' events. (Although it's likely.)
+ sleep(),
+ {9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting
+ %% Ensure ther are still the expected number of accepting transports
+ %% after stopping the client service.
+ ok = diameter:stop_service("client"),
+ sleep(),
+ {4,0} = count("server", LRef, accept), %% 4 transports, no connections
+ %% Done.
+ ok = diameter:stop_service("server").
+
+count(Name, Ref, Key) ->
+ [{transport, [[{ref, Ref} | T]]},
+ {connections, Cs}]
+ = diameter:service_info(Name, [transport, connections]),
+ {Key, Ps} = lists:keyfind(Key, 1, T),
+ {length(Ps), length(Cs)}. %% number of processes, connections
+
+sleep() ->
+ receive after 1000 -> ok end.
diff --git a/lib/diameter/test/diameter_relay_SUITE.erl b/lib/diameter/test/diameter_relay_SUITE.erl
index 735a908d97..7142239bbb 100644
--- a/lib/diameter/test/diameter_relay_SUITE.erl
+++ b/lib/diameter/test/diameter_relay_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -49,6 +49,7 @@
send_timeout_1/1,
send_timeout_2/1,
info/1,
+ counters/1,
disconnect/1,
stop_services/1,
stop/1]).
@@ -120,6 +121,7 @@ all() ->
start_services,
connect,
{group, all},
+ counters,
{group, all, [parallel]},
disconnect,
stop_services,
@@ -201,8 +203,8 @@ send3(_Config) ->
send4(_Config) ->
call(?SERVER4).
-%% Send an ASR that loops between the relays and expect the loop to
-%% be detected.
+%% Send an ASR that loops between the relays (RELAY1 -> RELAY2 ->
+%% RELAY1) and expect the loop to be detected.
send_loop(_Config) ->
Req = ['ASR', {'Destination-Realm', realm(?SERVER1)},
{'Destination-Host', ?SERVER1},
@@ -227,8 +229,103 @@ send_timeout(Tmo) ->
call(Req, [{filter, realm}, {timeout, Tmo}]).
info(_Config) ->
+ %% Wait for RELAY1 to have answered all requests, so that the
+ %% suite doesn't end before all answers are sent and counted.
+ receive after 6000 -> ok end,
[] = ?util:info().
+counters(_Config) ->
+ [] = ?util:run([[fun counters/2, K, S]
+ || K <- [statistics, transport, connections],
+ S <- ?SERVICES]).
+
+counters(Key, Svc) ->
+ counters(Key, Svc, [_|_] = diameter:service_info(Svc, Key)).
+
+counters(statistics, Svc, Stats) ->
+ stats(Svc, lists:foldl(fun({K,N},D) -> orddict:update_counter(K, N, D) end,
+ orddict:new(),
+ lists:append([L || {P,L} <- Stats, is_pid(P)])));
+
+counters(_, _, _) ->
+ todo.
+
+stats(?CLIENT, L) ->
+ [{{{0,257,0},recv},2}, %% CEA
+ {{{0,257,1},send},2}, %% CER
+ {{{0,258,0},recv},1}, %% RAA (send_timeout_1)
+ {{{0,258,1},send},2}, %% RAR (send_timeout_[12])
+ {{{0,274,0},recv},1}, %% ASA (send_loop)
+ {{{0,274,1},send},1}, %% ASR (send_loop)
+ {{{0,275,0},recv},4}, %% STA (send[1-4])
+ {{{0,275,1},send},4}, %% STR (send[1-4])
+ {{{unknown,0},recv,discarded},1}, %% RAR (send_timeout_2)
+ {{{0,257,0},recv,{'Result-Code',2001}},2}, %% CEA
+ {{{0,258,0},recv,{'Result-Code',3002}},1}, %% RAA (send_timeout_1)
+ {{{0,274,0},recv,{'Result-Code',3005}},1}, %% ASA (send_loop)
+ {{{0,275,0},recv,{'Result-Code',2001}},4}] %% STA (send[1-4])
+ = L;
+
+stats(S, L)
+ when S == ?SERVER1;
+ S == ?SERVER2;
+ S == ?SERVER3;
+ S == ?SERVER4 ->
+ [{{{0,257,0},send},1}, %% CEA
+ {{{0,257,1},recv},1}, %% CER
+ {{{0,275,0},send},1}, %% STA (send[1-4])
+ {{{0,275,1},recv},1}, %% STR (send[1-4])
+ {{{0,257,0},send,{'Result-Code',2001}},1}, %% CEA
+ {{{0,275,0},send,{'Result-Code',2001}},1}] %% STA (send[1-4])
+ = L;
+
+stats(?RELAY1, L) ->
+ [{{{relay,0},recv},3}, %% STA x 2 (send[12])
+ %% ASA (send_loop)
+ {{{relay,0},send},6}, %% STA x 2 (send[12])
+ %% ASA x 2 (send_loop)
+ %% RAA x 2 (send_timeout_[12])
+ {{{relay,1},recv},6}, %% STR x 2 (send[12])
+ %% ASR x 2 (send_loop)
+ %% RAR x 2 (send_timeout_[12])
+ {{{relay,1},send},5}, %% STR x 2 (send[12])
+ %% ASR (send_loop)
+ %% RAR x 2 (send_timeout_[12])
+ {{{0,257,0},recv},3}, %% CEA
+ {{{0,257,0},send},1}, %% "
+ {{{0,257,1},recv},1}, %% CER
+ {{{0,257,1},send},3}, %% "
+ {{{relay,0},recv,{'Result-Code',2001}},2}, %% STA x 2 (send[34])
+ {{{relay,0},recv,{'Result-Code',3005}},1}, %% ASA (send_loop)
+ {{{relay,0},send,{'Result-Code',2001}},2}, %% STA x 2 (send[34])
+ {{{relay,0},send,{'Result-Code',3002}},2}, %% RAA (send_timeout_[12])
+ {{{relay,0},send,{'Result-Code',3005}},2}, %% ASA (send_loop)
+ {{{0,257,0},recv,{'Result-Code',2001}},3}, %% CEA
+ {{{0,257,0},send,{'Result-Code',2001}},1}] %% "
+ = L;
+
+stats(?RELAY2, L) ->
+ [{{{relay,0},recv},3}, %% STA x 2 (send[34])
+ %% ASA (send_loop)
+ {{{relay,0},send},3}, %% STA x 2 (send[34])
+ %% ASA (send_loop)
+ {{{relay,1},recv},5}, %% STR x 2 (send[34])
+ %% RAR x 2 (send_timeout_[12])
+ %% ASR (send_loop)
+ {{{relay,1},send},3}, %% STR x 2 (send[34])
+ %% ASR (send_loop)
+ {{{0,257,0},recv},2}, %% CEA
+ {{{0,257,0},send},2}, %% "
+ {{{0,257,1},recv},2}, %% CER
+ {{{0,257,1},send},2}, %% "
+ {{{relay,0},recv,{'Result-Code',2001}},2}, %% STA x 2 (send[34])
+ {{{relay,0},recv,{'Result-Code',3005}},1}, %% ASA (send_loop)
+ {{{relay,0},send,{'Result-Code',2001}},2}, %% STA x 2 (send[34])
+ {{{relay,0},send,{'Result-Code',3005}},1}, %% ASA (send_loop)
+ {{{0,257,0},recv,{'Result-Code',2001}},2}, %% CEA
+ {{{0,257,0},send,{'Result-Code',2001}},2}] %% "
+ = L.
+
%% ===========================================================================
realm(Host) ->
@@ -303,18 +400,24 @@ handle_request(Pkt, OH, {_Ref, #diameter_caps{origin_host = {OH,_}} = Caps})
when OH /= ?CLIENT ->
request(Pkt, Caps).
-%% RELAY1 routes any ASR or RAR to RELAY2 ...
+%% RELAY1 answers ACR after it's timed out at the client.
+request(#diameter_packet{header = #diameter_header{cmd_code = 271}},
+ #diameter_caps{origin_host = {?RELAY1, _}}) ->
+ receive after 1000 -> {answer_message, 3004} end; %% TOO_BUSY
+
+%% RELAY1 routes any ASR or RAR to RELAY2.
request(#diameter_packet{header = #diameter_header{cmd_code = C}},
#diameter_caps{origin_host = {?RELAY1, _}})
when C == 274; %% ASR
C == 258 -> %% RAR
{relay, [{filter, {realm, realm(?RELAY2)}}]};
-%% ... which in turn routes it back. Expect diameter to either answer
-%% either with DIAMETER_LOOP_DETECTED/DIAMETER_UNABLE_TO_COMPLY.
+%% RELAY2 routes ASR back to RELAY1 to induce DIAMETER_LOOP_DETECTED.
request(#diameter_packet{header = #diameter_header{cmd_code = 274}},
#diameter_caps{origin_host = {?RELAY2, _}}) ->
{relay, [{filter, {host, ?RELAY1}}]};
+
+%% RELAY2 discards RAR to induce DIAMETER_UNABLE_TO_DELIVER.
request(#diameter_packet{header = #diameter_header{cmd_code = 258}},
#diameter_caps{origin_host = {?RELAY2, _}}) ->
discard;
diff --git a/lib/diameter/test/diameter_tls_SUITE.erl b/lib/diameter/test/diameter_tls_SUITE.erl
index 55565692ec..e5bbda9c91 100644
--- a/lib/diameter/test/diameter_tls_SUITE.erl
+++ b/lib/diameter/test/diameter_tls_SUITE.erl
@@ -319,19 +319,19 @@ make_cert(Dir, Base) ->
make_cert(Dir, Base ++ "_key.pem", Base ++ "_ca.pem").
make_cert(Dir, Keyfile, Certfile) ->
- [K,C] = Paths = [filename:join([Dir, F]) || F <- [Keyfile, Certfile]],
+ [KP,CP] = [filename:join([Dir, F]) || F <- [Keyfile, Certfile]],
- KCmd = join(["openssl genrsa -out", K, "2048"]),
- CCmd = join(["openssl req -new -x509 -key", K, "-out", C, "-days 7",
- "-subj /C=SE/ST=./L=Stockholm/CN=www.erlang.org"]),
+ KC = join(["openssl genrsa -out", KP, "2048"]),
+ CC = join(["openssl req -new -x509 -key", KP, "-out", CP, "-days 7",
+ "-subj /C=SE/ST=./L=Stockholm/CN=www.erlang.org"]),
%% Hope for the best and only check that files are written.
- os:cmd(KCmd),
- os:cmd(CCmd),
+ [{_, _, {ok,_}},{_, _, {ok,_}}]
+ = [{P,O,T} || {P,C} <- [{KP,KC}, {CP,CC}],
+ O <- [os:cmd(C)],
+ T <- [file:read_file_info(P)]],
- [_,_] = [T || P <- Paths, {ok, T} <- [file:read_file_info(P)]],
-
- {K,C}.
+ {KP,CP}.
join(Strs) ->
string:join(Strs, " ").
diff --git a/lib/diameter/test/diameter_traffic_SUITE.erl b/lib/diameter/test/diameter_traffic_SUITE.erl
index 4b67372016..17faf30a9b 100644
--- a/lib/diameter/test/diameter_traffic_SUITE.erl
+++ b/lib/diameter/test/diameter_traffic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -41,6 +41,7 @@
send_eval/1,
send_bad_answer/1,
send_protocol_error/1,
+ send_experimental_result/1,
send_arbitrary/1,
send_unknown/1,
send_unknown_short/1,
@@ -59,6 +60,7 @@
send_unexpected_mandatory_decode/1,
send_unexpected_mandatory/1,
send_long/1,
+ send_maxlen/1,
send_nopeer/1,
send_noapp/1,
send_discard/1,
@@ -122,8 +124,6 @@
-define(ADDR, {127,0,0,1}).
--define(CLIENT, "CLIENT").
--define(SERVER, "SERVER").
-define(REALM, "erlang.org").
-define(HOST(Host, Realm), Host ++ [$.|Realm]).
@@ -141,11 +141,23 @@
%% Which common dictionary to use in the clients.
-define(RFCS, [rfc3588, rfc6733]).
+%% Whether to decode stringish Diameter types to strings, or leave
+%% them as binary.
+-define(STRING_DECODES, [true, false]).
+
+%% Which transport protocol to use.
+-define(TRANSPORTS, [tcp, sctp]).
+
-record(group,
- {client_encoding,
+ {transport,
+ client_service,
+ client_encoding,
client_dict0,
+ client_strings,
+ server_service,
server_encoding,
- server_container}).
+ server_container,
+ server_strings}).
%% Not really what we should be setting unless the message is sent in
%% the common application but diameter doesn't care.
@@ -166,7 +178,7 @@
?answer_message(_, ResultCode)).
%% Config for diameter:start_service/2.
--define(SERVICE(Name),
+-define(SERVICE(Name, Decode),
[{'Origin-Host', Name ++ "." ++ ?REALM},
{'Origin-Realm', ?REALM},
{'Host-IP-Address', [?ADDR]},
@@ -175,6 +187,8 @@
{'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
{'Acct-Application-Id', [?DIAMETER_APP_ID_ACCOUNTING]},
{restrict_connections, false},
+ {string_decode, Decode},
+ {incoming_maxlen, 1 bsl 21},
{spawn_opt, [{min_heap_size, 5000}]}
| [{application, [{dictionary, D},
{module, ?MODULE},
@@ -224,37 +238,78 @@
%% ===========================================================================
suite() ->
- [{timetrap, {seconds, 60}}].
+ [{timetrap, {seconds, 10}}].
all() ->
- [start, start_services, add_transports, result_codes]
- ++ [{group, ?util:name([R,D,A,C]), P} || R <- ?ENCODINGS,
- D <- ?RFCS,
- A <- ?ENCODINGS,
- C <- ?CONTAINERS,
- P <- [[], [parallel]]]
- ++ [outstanding, remove_transports, empty, stop_services, stop].
+ [start, result_codes, {group, traffic}, outstanding, empty, stop].
groups() ->
Ts = tc(),
- [{?util:name([R,D,A,C]), [], Ts} || R <- ?ENCODINGS,
- D <- ?RFCS,
- A <- ?ENCODINGS,
- C <- ?CONTAINERS].
+ Sctp = ?util:have_sctp(),
+ [{?util:name([R,D,A,C]), [parallel], Ts} || R <- ?ENCODINGS,
+ D <- ?RFCS,
+ A <- ?ENCODINGS,
+ C <- ?CONTAINERS]
+ ++
+ [{?util:name([T,R,D,A,C,SD,CD]),
+ [],
+ [start_services,
+ add_transports,
+ result_codes,
+ {group, ?util:name([R,D,A,C])},
+ remove_transports,
+ stop_services]}
+ || T <- ?TRANSPORTS,
+ T /= sctp orelse Sctp,
+ R <- ?ENCODINGS,
+ D <- ?RFCS,
+ A <- ?ENCODINGS,
+ C <- ?CONTAINERS,
+ SD <- ?STRING_DECODES,
+ CD <- ?STRING_DECODES]
+ ++
+ [{traffic, [parallel], [{group, ?util:name([T,R,D,A,C,SD,CD])}
+ || T <- ?TRANSPORTS,
+ T /= sctp orelse Sctp,
+ R <- ?ENCODINGS,
+ D <- ?RFCS,
+ A <- ?ENCODINGS,
+ C <- ?CONTAINERS,
+ SD <- ?STRING_DECODES,
+ CD <- ?STRING_DECODES]}].
init_per_group(Name, Config) ->
- [R,D,A,C] = ?util:name(Name),
- G = #group{client_encoding = R,
- client_dict0 = dict0(D),
- server_encoding = A,
- server_container = C},
- [{group, G} | Config].
+ case ?util:name(Name) of
+ [T,R,D,A,C,SD,CD] ->
+ G = #group{transport = T,
+ client_service = [$C|?util:unique_string()],
+ client_encoding = R,
+ client_dict0 = dict0(D),
+ client_strings = CD,
+ server_service = [$S|?util:unique_string()],
+ server_encoding = A,
+ server_container = C,
+ server_strings = SD},
+ [{group, G} | Config];
+ _ ->
+ Config
+ end.
end_per_group(_, _) ->
ok.
+%% Skip testcases that can reasonably fail under SCTP.
init_per_testcase(Name, Config) ->
- [{testcase, Name} | Config].
+ case [skip || #group{transport = sctp}
+ <- [proplists:get_value(group, Config)],
+ send_maxlen == Name
+ orelse send_long == Name]
+ of
+ [skip] ->
+ {skip, sctp};
+ [] ->
+ [{testcase, Name} | Config]
+ end.
end_per_testcase(_, _) ->
ok.
@@ -267,6 +322,7 @@ tc() ->
send_eval,
send_bad_answer,
send_protocol_error,
+ send_experimental_result,
send_arbitrary,
send_unknown,
send_unknown_short,
@@ -285,6 +341,7 @@ tc() ->
send_unexpected_mandatory_decode,
send_unexpected_mandatory,
send_long,
+ send_maxlen,
send_nopeer,
send_noapp,
send_discard,
@@ -319,19 +376,29 @@ tc() ->
start(_Config) ->
ok = diameter:start().
-start_services(_Config) ->
- ok = diameter:start_service(?SERVER, ?SERVICE(?SERVER)),
- ok = diameter:start_service(?CLIENT, [{sequence, ?CLIENT_MASK}
- | ?SERVICE(?CLIENT)]).
+start_services(Config) ->
+ #group{client_service = CN,
+ client_strings = CD,
+ server_service = SN,
+ server_strings = SD}
+ = group(Config),
+ ok = diameter:start_service(SN, ?SERVICE(SN, SD)),
+ ok = diameter:start_service(CN, [{sequence, ?CLIENT_MASK}
+ | ?SERVICE(CN, CD)]).
add_transports(Config) ->
- LRef = ?util:listen(?SERVER,
- tcp,
+ #group{transport = T,
+ client_service = CN,
+ server_service = SN}
+ = group(Config),
+ LRef = ?util:listen(SN,
+ T,
[{capabilities_cb, fun capx/2},
+ {pool_size, 8},
{spawn_opt, [{min_heap_size, 8096}]},
{applications, apps(rfc3588)}]),
- Cs = [?util:connect(?CLIENT,
- tcp,
+ Cs = [?util:connect(CN,
+ T,
LRef,
[{id, Id},
{capabilities, [{'Origin-State-Id', origin(Id)}]},
@@ -354,12 +421,18 @@ outstanding(_Config) ->
is_atom(element(1,T))].
remove_transports(Config) ->
+ #group{client_service = CN,
+ server_service = SN}
+ = group(Config),
[LRef | Cs] = ?util:read_priv(Config, "transport"),
- [?util:disconnect(?CLIENT, C, ?SERVER, LRef) || C <- Cs].
+ [?util:disconnect(CN, C, SN, LRef) || C <- Cs].
-stop_services(_Config) ->
- ok = diameter:stop_service(?CLIENT),
- ok = diameter:stop_service(?SERVER).
+stop_services(Config) ->
+ #group{client_service = CN,
+ server_service = SN}
+ = group(Config),
+ ok = diameter:stop_service(CN),
+ ok = diameter:stop_service(SN).
%% Ensure even transports have been removed from request table.
empty(_Config) ->
@@ -394,7 +467,7 @@ send_ok(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 1}],
- ['ACA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['ACA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req).
%% Send an accounting ACR that the server answers badly to.
@@ -410,16 +483,17 @@ send_eval(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 3}],
- ['ACA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['ACA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req).
%% Send an accounting ACR that the server tries to answer with an
-%% inappropriate header, resulting in no answer being sent and the
-%% request timing out.
+%% inappropriate header. That the error is detected is coded in
+%% handle_answer.
send_bad_answer(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 2}],
- {timeout, _} = call(Config, Req).
+ ?answer_message(?SUCCESS)
+ = call(Config, Req).
%% Send an ACR that the server callback answers explicitly with a
%% protocol error.
@@ -430,23 +504,32 @@ send_protocol_error(Config) ->
?answer_message(?TOO_BUSY)
= call(Config, Req).
+%% Send a 3xxx Experimental-Result in an answer not setting the E-bit
+%% and missing a Result-Code.
+send_experimental_result(Config) ->
+ Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
+ {'Accounting-Record-Number', 5}],
+ ['ACA', {'Session-Id', _} | _]
+ = call(Config, Req).
+
%% Send an ASR with an arbitrary non-mandatory AVP and expect success
%% and the same AVP in the reply.
send_arbitrary(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{name = 'Product-Name',
value = "XXX"}]}],
- ['ASA', _SessionId, {'Result-Code', ?SUCCESS} | Avps]
+ ['ASA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | Avps]
= call(Config, Req),
{'AVP', [#diameter_avp{name = 'Product-Name',
- value = "XXX"}]}
- = lists:last(Avps).
+ value = V}]}
+ = lists:last(Avps),
+ "XXX" = string(V, Config).
%% Send an unknown AVP (to some client) and check that it comes back.
send_unknown(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = false,
data = <<17>>}]}],
- ['ASA', _SessionId, {'Result-Code', ?SUCCESS} | Avps]
+ ['ASA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | Avps]
= call(Config, Req),
{'AVP', [#diameter_avp{code = 999,
is_mandatory = false,
@@ -462,7 +545,7 @@ send_unknown_short(Config, M, RC) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = M,
data = <<17>>}]}],
- ['ASA', _SessionId, {'Result-Code', RC} | Avps]
+ ['ASA', {'Session-Id', _}, {'Result-Code', RC} | Avps]
= call(Config, Req),
[#'diameter_base_Failed-AVP'{'AVP' = As}]
= proplists:get_value('Failed-AVP', Avps),
@@ -476,7 +559,7 @@ send_unknown_mandatory(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = true,
data = <<17>>}]}],
- ['ASA', _SessionId, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
+ ['ASA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
= call(Config, Req),
[#'diameter_base_Failed-AVP'{'AVP' = As}]
= proplists:get_value('Failed-AVP', Avps),
@@ -496,7 +579,7 @@ send_unexpected_mandatory_decode(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 27, %% Session-Timeout
is_mandatory = true,
data = <<12:32>>}]}],
- ['ASA', _SessionId, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
+ ['ASA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
= call(Config, Req),
[#'diameter_base_Failed-AVP'{'AVP' = As}]
= proplists:get_value('Failed-AVP', Avps),
@@ -532,7 +615,7 @@ send_error_bit(Config) ->
%% Send a bad version and check that we get 5011.
send_unsupported_version(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', _SessionId, {'Result-Code', ?UNSUPPORTED_VERSION} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?UNSUPPORTED_VERSION} | _]
= call(Config, Req).
%% Send a request containing an AVP length > data size.
@@ -552,14 +635,14 @@ send_zero_avp_length(Config) ->
send_invalid_avp_length(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', _SessionId,
+ ['STA', {'Session-Id', _},
{'Result-Code', ?INVALID_AVP_LENGTH},
- _OriginHost,
- _OriginRealm,
- _UserName,
- _Class,
- _ErrorMessage,
- _ErrorReportingHost,
+ {'Origin-Host', _},
+ {'Origin-Realm', _},
+ {'User-Name', _},
+ {'Class', _},
+ {'Error-Message', _},
+ {'Error-Reporting-Host', _},
{'Failed-AVP', [#'diameter_base_Failed-AVP'{'AVP' = [_]}]}
| _]
= call(Config, Req).
@@ -577,25 +660,33 @@ send_invalid_reject(Config) ->
send_unexpected_mandatory(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', _SessionId, {'Result-Code', ?AVP_UNSUPPORTED} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | _]
= call(Config, Req).
%% Send something long that will be fragmented by TCP.
send_long(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT},
{'User-Name', [lists:duplicate(1 bsl 20, $X)]}],
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req).
+%% Send something longer than the configure incoming_maxlen.
+send_maxlen(Config) ->
+ Req = ['STR', {'Termination-Cause', ?LOGOUT},
+ {'User-Name', [lists:duplicate(1 bsl 21, $X)]}],
+ {timeout, _} = call(Config, Req).
+
%% Send something for which pick_peer finds no suitable peer.
send_nopeer(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
{error, no_connection} = call(Config, Req, [{extra, [?EXTRA]}]).
%% Send something on an unconfigured application.
-send_noapp(_Config) ->
+send_noapp(Config) ->
+ #group{client_service = CN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- {error, no_connection} = diameter:call(?CLIENT, unknown_alias, Req).
+ {error, no_connection} = diameter:call(CN, unknown_alias, Req).
%% Send something that's discarded by prepare_request.
send_discard(Config) ->
@@ -607,8 +698,10 @@ send_any_1(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
{error, no_connection} = call(Config, Req, [{filter, {any, []}}]).
send_any_2(Config) ->
+ #group{server_service = SN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(?SERVER, "unknown.org")]}],
+ {'Destination-Host', [?HOST(SN, "unknown.org")]}],
?answer_message(?UNABLE_TO_DELIVER)
= call(Config, Req, [{filter, {any, [host, realm]}}]).
@@ -616,12 +709,14 @@ send_any_2(Config) ->
send_all_1(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
Realm = lists:foldr(fun(C,A) -> [C,A] end, [], ?REALM),
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req, [{filter, {all, [{host, any},
{realm, Realm}]}}]).
send_all_2(Config) ->
+ #group{server_service = SN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(?SERVER, "unknown.org")]}],
+ {'Destination-Host', [?HOST(SN, "unknown.org")]}],
{error, no_connection}
= call(Config, Req, [{filter, {all, [host, realm]}}]).
@@ -634,9 +729,8 @@ send_timeout(Config) ->
%% received the Session-Id.
send_error(Config) ->
Req = ['RAR', {'Re-Auth-Request-Type', ?AUTHORIZE_AUTHENTICATE}],
- ?answer_message(SId, ?TOO_BUSY)
- = call(Config, Req),
- true = undefined /= SId.
+ ?answer_message([_], ?TOO_BUSY)
+ = call(Config, Req).
%% Send a request with the detached option and receive it as a message
%% from handle_answer instead.
@@ -645,7 +739,7 @@ send_detach(Config) ->
Ref = make_ref(),
ok = call(Config, Req, [{extra, [{self(), Ref}]}, detach]),
Ans = receive {Ref, T} -> T end,
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= Ans.
%% Send a request which can't be encoded and expect {error, encode}.
@@ -654,13 +748,15 @@ send_encode_error(Config) ->
%% Send with filtering and expect success.
send_destination_1(Config) ->
+ #group{server_service = SN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(?SERVER, ?REALM)]}],
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ {'Destination-Host', [?HOST(SN, ?REALM)]}],
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req, [{filter, {all, [host, realm]}}]).
send_destination_2(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, Req, [{filter, {all, [host, realm]}}]).
%% Send with filtering on and expect failure when specifying an
@@ -671,8 +767,10 @@ send_destination_3(Config) ->
{error, no_connection}
= call(Config, Req, [{filter, {all, [host, realm]}}]).
send_destination_4(Config) ->
+ #group{server_service = SN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(?SERVER, "unknown.org")]}],
+ {'Destination-Host', [?HOST(SN, "unknown.org")]}],
{error, no_connection}
= call(Config, Req, [{filter, {all, [host, realm]}}]).
@@ -684,8 +782,10 @@ send_destination_5(Config) ->
?answer_message(?REALM_NOT_SERVED)
= call(Config, Req).
send_destination_6(Config) ->
+ #group{server_service = SN}
+ = group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(?SERVER, "unknown.org")]}],
+ {'Destination-Host', [?HOST(SN, "unknown.org")]}],
?answer_message(?UNABLE_TO_DELIVER)
= call(Config, Req).
@@ -720,7 +820,7 @@ send_bad_filter(Config, F) ->
%% Specify multiple filter options and expect them be conjunctive.
send_multiple_filters_1(Config) ->
Fun = fun(#diameter_caps{}) -> true end,
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= send_multiple_filters(Config, [host, {eval, Fun}]).
send_multiple_filters_2(Config) ->
E = {erlang, is_tuple, []},
@@ -731,7 +831,7 @@ send_multiple_filters_3(Config) ->
E2 = {erlang, is_tuple, []},
E3 = {erlang, is_record, [diameter_caps]},
E4 = [{erlang, is_record, []}, diameter_caps],
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= send_multiple_filters(Config, [{eval, E} || E <- [E1,E2,E3,E4]]).
send_multiple_filters(Config, Fs) ->
@@ -742,24 +842,39 @@ send_multiple_filters(Config, Fs) ->
%% only the return value from the prepare_request callback being
%% significant.
send_anything(Config) ->
- ['STA', _SessionId, {'Result-Code', ?SUCCESS} | _]
+ ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
= call(Config, anything).
%% ===========================================================================
+group(Config) ->
+ #group{} = proplists:get_value(group, Config).
+
+string(V, Config) ->
+ #group{client_strings = B} = group(Config),
+ decode(V,B).
+
+decode(S, true)
+ when is_list(S) ->
+ S;
+decode(B, false)
+ when is_binary(B) ->
+ binary_to_list(B).
+
call(Config, Req) ->
call(Config, Req, []).
call(Config, Req, Opts) ->
Name = proplists:get_value(testcase, Config),
- #group{client_encoding = ReqEncoding,
+ #group{client_service = CN,
+ client_encoding = ReqEncoding,
client_dict0 = Dict0}
= Group
- = proplists:get_value(group, Config),
- diameter:call(?CLIENT,
+ = group(Config),
+ diameter:call(CN,
dict(Req, Dict0),
msg(Req, ReqEncoding, Dict0),
- [{extra, [{Name, Group}, now()]} | Opts]).
+ [{extra, [{Name, Group}, diameter_lib:now()]} | Opts]).
origin({A,C}) ->
2*codec(A) + container(C);
@@ -843,35 +958,38 @@ peer_down(_SvcName, _Peer, State) ->
%% pick_peer/6-7
-pick_peer(Peers, _, ?CLIENT, _State, {Name, Group}, _)
+pick_peer(Peers, _, [$C|_], _State, {Name, Group}, _)
when Name /= send_detach ->
find(Group, Peers).
-pick_peer(_Peers, _, ?CLIENT, _State, {send_nopeer, _}, _, ?EXTRA) ->
+pick_peer(_Peers, _, [$C|_], _State, {send_nopeer, _}, _, ?EXTRA) ->
false;
-pick_peer(Peers, _, ?CLIENT, _State, {send_detach, Group}, _, {_,_}) ->
+pick_peer(Peers, _, [$C|_], _State, {send_detach, Group}, _, {_,_}) ->
find(Group, Peers).
-find(#group{server_encoding = A, server_container = C}, Peers) ->
+find(#group{client_service = CN,
+ server_encoding = A,
+ server_container = C},
+ Peers) ->
Id = {A,C},
- [P] = [P || P <- Peers, id(Id, P)],
+ [P] = [P || P <- Peers, id(Id, P, CN)],
{ok, P}.
-id(Id, {Pid, _Caps}) ->
+id(Id, {Pid, _Caps}, SvcName) ->
[{ref, _}, {type, _}, {options, Opts} | _]
- = diameter:service_info(?CLIENT, Pid),
+ = diameter:service_info(SvcName, Pid),
lists:member({id, Id}, Opts).
%% prepare_request/5-6
-prepare_request(_Pkt, ?CLIENT, {_Ref, _Caps}, {send_discard, _}, _) ->
+prepare_request(_Pkt, [$C|_], {_Ref, _Caps}, {send_discard, _}, _) ->
{discard, unprepared};
-prepare_request(Pkt, ?CLIENT, {_Ref, Caps}, {Name, Group}, _) ->
+prepare_request(Pkt, [$C|_], {_Ref, Caps}, {Name, Group}, _) ->
{send, prepare(Pkt, Caps, Name, Group)}.
-prepare_request(Pkt, ?CLIENT, {_Ref, Caps}, {send_detach, Group}, _, _) ->
+prepare_request(Pkt, [$C|_], {_Ref, Caps}, {send_detach, Group}, _, _) ->
{eval_packet, {send, prepare(Pkt, Caps, Group)}, [fun log/2, detach]}.
log(#diameter_packet{bin = Bin} = P, T)
@@ -1042,10 +1160,10 @@ prepare_retransmit(_Pkt, false, _Peer, _Name, _Group) ->
%% handle_answer/6-7
-handle_answer(Pkt, Req, ?CLIENT, Peer, {Name, Group}, _) ->
+handle_answer(Pkt, Req, [$C|_], Peer, {Name, Group}, _) ->
answer(Pkt, Req, Peer, Name, Group).
-handle_answer(Pkt, Req, ?CLIENT, Peer, {send_detach = Name, Group}, _, X) ->
+handle_answer(Pkt, Req, [$C|_], Peer, {send_detach = Name, Group}, _, X) ->
{Pid, Ref} = X,
Pid ! {Ref, answer(Pkt, Req, Peer, Name, Group)}.
@@ -1057,15 +1175,19 @@ answer(Pkt, Req, _Peer, Name, #group{client_dict0 = Dict0}) ->
[R | Vs] = Dict:'#get-'(answer(Ans, Es, Name)),
[Dict:rec2msg(R) | Vs].
-answer(Rec, [_|_], N)
- when N == send_long_avp_length;
- N == send_short_avp_length;
- N == send_zero_avp_length;
- N == send_invalid_avp_length;
- N == send_invalid_reject;
- N == send_unknown_short_mandatory;
- N == send_unexpected_mandatory_decode ->
+%% Missing Result-Codec and inapproriate Experimental-Result-Code.
+answer(Rec, Es, send_experimental_result) ->
+ [{5004, #diameter_avp{name = 'Experimental-Result'}},
+ {5005, #diameter_avp{name = 'Result-Code'}}]
+ = Es,
+ Rec;
+
+%% An inappropriate E-bit results in a decode error ...
+answer(Rec, Es, send_bad_answer) ->
+ [{5004, #diameter_avp{name = 'Result-Code'}} | _] = Es,
Rec;
+
+%% ... while other errors are reflected in Failed-AVP.
answer(Rec, [], _) ->
Rec.
@@ -1077,11 +1199,13 @@ app(Req, _, Dict0) ->
%% handle_error/6
-handle_error(timeout = Reason, _Req, ?CLIENT, _Peer, _, Time) ->
- Now = now(),
- {Reason, {Time, Now, timer:now_diff(Now, Time)}};
+handle_error(timeout = Reason, _Req, [$C|_], _Peer, _, Time) ->
+ Now = diameter_lib:now(),
+ {Reason, {diameter_lib:timestamp(Time),
+ diameter_lib:timestamp(Now),
+ diameter_lib:micro_diff(Now, Time)}};
-handle_error(Reason, _Req, ?CLIENT, _Peer, _, _Time) ->
+handle_error(Reason, _Req, [$C|_], _Peer, _, _Time) ->
{error, Reason}.
%% handle_request/3
@@ -1089,7 +1213,9 @@ handle_error(Reason, _Req, ?CLIENT, _Peer, _, _Time) ->
%% Note that diameter will set Result-Code and Failed-AVPs if
%% #diameter_packet.errors is non-null.
-handle_request(#diameter_packet{header = H, msg = M}, ?SERVER, {_Ref, Caps}) ->
+handle_request(#diameter_packet{header = H, msg = M, avps = As},
+ _,
+ {_Ref, Caps}) ->
#diameter_header{end_to_end_id = EI,
hop_by_hop_id = HI}
= H,
@@ -1097,10 +1223,12 @@ handle_request(#diameter_packet{header = H, msg = M}, ?SERVER, {_Ref, Caps}) ->
V = EI bsr B, %% assert
V = HI bsr B, %%
#diameter_caps{origin_state_id = {_,[Id]}} = Caps,
- answer(origin(Id), request(M, Caps)).
+ answer(origin(Id), request(M, [H|As], Caps)).
answer(T, {Tag, Action, Post}) ->
{Tag, answer(T, Action), Post};
+answer(_, {reply, [#diameter_header{} | _]} = T) ->
+ T;
answer({A,C}, {reply, Ans}) ->
answer(C, {reply, msg(Ans, A, diameter_gen_base_rfc3588)});
answer(pkt, {reply, Ans})
@@ -1109,6 +1237,41 @@ answer(pkt, {reply, Ans})
answer(_, T) ->
T.
+%% request/3
+
+%% send_experimental_result
+request(#diameter_base_accounting_ACR{'Accounting-Record-Number' = 5},
+ [Hdr | Avps],
+ #diameter_caps{origin_host = {OH, _},
+ origin_realm = {OR, _}}) ->
+ [H,R|T] = [A || N <- ['Origin-Host',
+ 'Origin-Realm',
+ 'Session-Id',
+ 'Accounting-Record-Type',
+ 'Accounting-Record-Number'],
+ #diameter_avp{} = A
+ <- [lists:keyfind(N, #diameter_avp.name, Avps)]],
+ Ans = [Hdr#diameter_header{is_request = false},
+ H#diameter_avp{data = OH},
+ R#diameter_avp{data = OR},
+ #diameter_avp{name = 'Experimental-Result',
+ code = 297,
+ need_encryption = false,
+ data = [#diameter_avp{data = {?DIAMETER_DICT_COMMON,
+ 'Vendor-Id',
+ 123}},
+ #diameter_avp{data
+ = {?DIAMETER_DICT_COMMON,
+ 'Experimental-Result-Code',
+ 3987}}]}
+ | T],
+ {reply, Ans};
+
+request(Msg, _Avps, Caps) ->
+ request(Msg, Caps).
+
+%% request/2
+
%% send_nok
request(#diameter_base_accounting_ACR{'Accounting-Record-Number' = 0},
_) ->
diff --git a/lib/diameter/test/diameter_transport_SUITE.erl b/lib/diameter/test/diameter_transport_SUITE.erl
index fcffa69c24..78bddbd1cf 100644
--- a/lib/diameter/test/diameter_transport_SUITE.erl
+++ b/lib/diameter/test/diameter_transport_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -53,7 +53,7 @@
%% Receive a message.
-define(RECV(Pat, Ret), receive Pat -> Ret end).
--define(RECV(Pat), ?RECV(Pat, now())).
+-define(RECV(Pat), ?RECV(Pat, diameter_util:timestamp())).
%% Sockets are opened on the loopback address.
-define(ADDR, {127,0,0,1}).
@@ -64,7 +64,7 @@
= #diameter_caps{host_ip_address
= Addrs}}).
-%% The term we register after open a listening port with gen_tcp.
+%% The term we register after open a listening port with gen_{tcp,sctp}.
-define(TEST_LISTENER(Ref, PortNr),
{?MODULE, listen, Ref, PortNr}).
@@ -85,7 +85,7 @@
%% ===========================================================================
suite() ->
- [{timetrap, {minutes, 2}}].
+ [{timetrap, {seconds, 15}}].
all() ->
[start,
@@ -104,7 +104,7 @@ tc() ->
reconnect].
init_per_suite(Config) ->
- [{sctp, have_sctp()} | Config].
+ [{sctp, ?util:have_sctp()} | Config].
end_per_suite(_Config) ->
ok.
@@ -127,7 +127,10 @@ tcp_accept(_) ->
accept(tcp).
sctp_accept(Config) ->
- if_sctp(fun accept/1, Config).
+ case lists:member({sctp, true}, Config) of
+ true -> accept(sctp);
+ false -> {skip, no_sctp}
+ end.
%% Start multiple accepting transport processes that are connected to
%% with an equal number of connecting processes using gen_tcp/sctp
@@ -157,7 +160,10 @@ tcp_connect(_) ->
connect(tcp).
sctp_connect(Config) ->
- if_sctp(fun connect/1, Config).
+ case lists:member({sctp, true}, Config) of
+ true -> connect(sctp);
+ false -> {skip, no_sctp}
+ end.
connect(Prot) ->
T = {Prot, make_ref()},
@@ -219,7 +225,7 @@ reconnect(_) ->
|| T <- [listen, connect]]).
start_service(SvcName) ->
- OH = io_lib:format("~p-~p-~p", tuple_to_list(now())),
+ OH = diameter_util:unique_string(),
Opts = [{application, [{dictionary, diameter_gen_base_rfc6733},
{module, diameter_callback}]},
{'Origin-Host', OH},
@@ -251,28 +257,6 @@ abort(SvcName, LRef, Ref)
%% ===========================================================================
%% ===========================================================================
-%% have_sctp/0
-
-have_sctp() ->
- case gen_sctp:open() of
- {ok, Sock} ->
- gen_sctp:close(Sock),
- true;
- {error, E} when E == eprotonosupport;
- E == esocktnosupport -> %% fail on any other reason
- false
- end.
-
-%% if_sctp/2
-
-if_sctp(F, Config) ->
- case proplists:get_value(sctp, Config) of
- true ->
- F(sctp);
- false ->
- {skip, no_sctp}
- end.
-
%% init/2
init(accept, {Prot, Ref}) ->
@@ -351,7 +335,7 @@ make_msg() ->
%% crypto:rand_bytes/1 isn't available on all platforms (since openssl
%% isn't) so roll our own.
rand_bytes(N) ->
- random:seed(now()),
+ random:seed(diameter_util:seed()),
rand_bytes(N, <<>>).
rand_bytes(0, Bin) ->
@@ -381,37 +365,14 @@ start_connect(tcp, T, Svc, Opts) ->
diameter_tcp:start(T, Svc, Opts).
%% start_accept/2
-%%
-%% Start transports sequentially by having each wait for a message
-%% from a job in a queue before commencing. Only one transport with a
-%% pending accept is started at a time since diameter_{tcp,sctp}
-%% currently assume (and diameter currently implements) this.
start_accept(Prot, Ref) ->
- Pid = sync(accept, Ref),
{Mod, Opts} = tmod(Prot),
-
- try
- {ok, TPid, [?ADDR]} = Mod:start({accept, Ref},
- ?SVC([?ADDR]),
- [{port, 0} | Opts]),
- ?RECV(?TMSG({TPid, connected})),
- TPid
- after
- Pid ! Ref
- end.
-
-sync(What, Ref) ->
- ok = diameter_sync:cast({?MODULE, What, Ref},
- [fun lock/2, Ref, self()],
- infinity,
- infinity),
- receive {start, Ref, Pid} -> Pid end.
-
-lock(Ref, Pid) ->
- Pid ! {start, Ref, self()},
- erlang:monitor(process, Pid),
- Ref = receive T -> T end.
+ {ok, TPid, [?ADDR]} = Mod:start({accept, Ref},
+ ?SVC([?ADDR]),
+ [{port, 0} | Opts]),
+ ?RECV(?TMSG({TPid, connected})),
+ TPid.
tmod(sctp) ->
{diameter_sctp, [{sctp_initmsg, ?SCTP_INIT}]};
@@ -440,12 +401,13 @@ gen_listen(tcp) ->
%% gen_accept/2
gen_accept(sctp, Sock) ->
- Assoc = ?RECV(?SCTP(Sock, {_, #sctp_assoc_change{state = comm_up,
- outbound_streams = O,
- inbound_streams = I,
- assoc_id = A}}),
- {O, I, A}),
- putr(assoc, Assoc),
+ #sctp_assoc_change{state = comm_up,
+ outbound_streams = OS,
+ inbound_streams = IS,
+ assoc_id = Id}
+ = ?RECV(?SCTP(Sock, {_, #sctp_assoc_change{} = S}), S),
+
+ putr(assoc, {OS, IS, Id}),
{ok, Sock};
gen_accept(tcp, LSock) ->
gen_tcp:accept(LSock).
@@ -454,7 +416,7 @@ gen_accept(tcp, LSock) ->
gen_send(sctp, Sock, Bin) ->
{OS, _IS, Id} = getr(assoc),
- {_, _, Us} = now(),
+ {_, _, Us} = diameter_util:timestamp(),
gen_sctp:send(Sock, Id, Us rem OS, Bin);
gen_send(tcp, Sock, Bin) ->
gen_tcp:send(Sock, Bin).
@@ -463,7 +425,11 @@ gen_send(tcp, Sock, Bin) ->
gen_recv(sctp, Sock) ->
{_OS, _IS, Id} = getr(assoc),
- ?RECV(?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin}), Bin);
+ receive
+ ?SCTP(Sock, {[#sctp_sndrcvinfo{assoc_id = Id}], Bin})
+ when is_binary(Bin) ->
+ Bin
+ end;
gen_recv(tcp, Sock) ->
tcp_recv(Sock, <<>>).
diff --git a/lib/diameter/test/diameter_util.erl b/lib/diameter/test/diameter_util.erl
index 92c72c84e7..df7d268429 100644
--- a/lib/diameter/test/diameter_util.erl
+++ b/lib/diameter/test/diameter_util.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -29,7 +29,11 @@
run/1,
fold/3,
foldl/3,
- scramble/1]).
+ scramble/1,
+ timestamp/0,
+ seed/0,
+ unique_string/0,
+ have_sctp/0]).
%% diameter-specific
-export([lport/2,
@@ -174,7 +178,7 @@ scramble(L) ->
[[fun s/1, L]]).
s(L) ->
- random:seed(now()),
+ random:seed(seed()),
s([], L).
s(Acc, []) ->
@@ -184,6 +188,45 @@ s(Acc, L) ->
s([T|Acc], H ++ Rest).
%% ---------------------------------------------------------------------------
+%% timestamp/0
+
+timestamp() ->
+ diameter_lib:timestamp(diameter_lib:now()).
+
+%% ---------------------------------------------------------------------------
+%% seed/0
+
+seed() ->
+ {_,T} = diameter_lib:seed(),
+ T.
+
+%% ---------------------------------------------------------------------------
+%% unique_string/0
+
+unique_string() ->
+ try erlang:unique_integer() of
+ N ->
+ integer_to_list(N)
+ catch
+ error: undef -> %% OTP < 18
+ {M,S,U} = timestamp(),
+ tl(lists:append(["-" ++ integer_to_list(N) || N <- [M,S,U]]))
+ end.
+
+%% ---------------------------------------------------------------------------
+%% have_sctp/0
+
+have_sctp() ->
+ case gen_sctp:open() of
+ {ok, Sock} ->
+ gen_sctp:close(Sock),
+ true;
+ {error, E} when E == eprotonosupport;
+ E == esocktnosupport -> %% fail on any other reason
+ false
+ end.
+
+%% ---------------------------------------------------------------------------
%% eval/1
%%
%% Evaluate a function in one of a number of forms.
@@ -254,13 +297,12 @@ path(Config, Name) ->
%%
%% Lookup the port number of a tcp/sctp listening transport.
-lport(M, {Node, Ref}) ->
- rpc:call(Node, ?MODULE, lport, [M, Ref]);
+lport(Prot, {Node, Ref}) ->
+ rpc:call(Node, ?MODULE, lport, [Prot, Ref]);
lport(Prot, Ref) ->
- Mod = tmod(Prot),
[_] = diameter_reg:wait({'_', listener, {Ref, '_'}}),
- [N || {listen, N, _} <- Mod:ports(Ref)].
+ [N || M <- tmod(Prot), {listen, N, _} <- M:ports(Ref)].
%% ---------------------------------------------------------------------------
%% listen/2-3
@@ -292,13 +334,17 @@ connect(Client, Prot, LRef, Opts) ->
Ref = add_transport(Client, {connect, opts(Prot, PortNr) ++ Opts}),
true = transport(Client, Ref), %% assert
- ok = receive
- {diameter_event, Client, {up, Ref, _, _, _}} -> ok
- after 10000 ->
- {Client, Prot, PortNr, process_info(self(), messages)}
- end,
+ diameter_lib:for_n(fun(_) -> ok = up(Client, Ref, Prot, PortNr) end,
+ proplists:get_value(pool_size, Opts, 1)),
Ref.
+up(Client, Ref, Prot, PortNr) ->
+ receive
+ {diameter_event, Client, {up, Ref, _, _, _}} -> ok
+ after 10000 ->
+ {Client, Prot, PortNr, process_info(self(), messages)}
+ end.
+
transport(SvcName, Ref) ->
[Ref] == [R || [{ref, R} | _] <- diameter:service_info(SvcName, transport),
R == Ref].
@@ -327,13 +373,15 @@ add_transport(SvcName, T) ->
Ref.
tmod(tcp) ->
- diameter_tcp;
+ [diameter_tcp];
tmod(sctp) ->
- diameter_sctp.
+ [diameter_sctp];
+tmod(any) ->
+ [diameter_sctp, diameter_tcp].
opts(Prot, T) ->
- [{transport_module, tmod(Prot)},
- {transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}].
+ [{transport_module, M} || M <- tmod(Prot)]
+ ++ [{transport_config, [{ip, ?ADDR}, {port, 0} | opts(T)]}].
opts(listen) ->
[{accept, M} || M <- [{256,0,0,1}, ["256.0.0.1", ["^.+$"]]]];
diff --git a/lib/diameter/test/diameter_watchdog_SUITE.erl b/lib/diameter/test/diameter_watchdog_SUITE.erl
index b6e8730ec2..f39e12686e 100644
--- a/lib/diameter/test/diameter_watchdog_SUITE.erl
+++ b/lib/diameter/test/diameter_watchdog_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -420,6 +420,7 @@ suspect(TRef, false, SvcName, N) ->
%% abuse/1
abuse(F) ->
+
[] = run([[abuse, F, T] || T <- [listen, connect]]).
abuse(F, [_,_,_|_] = Args) ->
@@ -672,7 +673,7 @@ jitter(T,D) ->
%% Generate a unique hostname for the faked peer.
hostname() ->
- lists:flatten(io_lib:format("~p-~p-~p", tuple_to_list(now()))).
+ ?util:unique_string().
putr(Key, Val) ->
put({?MODULE, Key}, Val).
diff --git a/lib/diameter/test/modules.mk b/lib/diameter/test/modules.mk
index 4fea62461c..6da96bd676 100644
--- a/lib/diameter/test/modules.mk
+++ b/lib/diameter/test/modules.mk
@@ -1,8 +1,7 @@
-#-*-makefile-*- ; force emacs to enter makefile-mode
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2013. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -40,6 +39,7 @@ MODULES = \
diameter_gen_sctp_SUITE \
diameter_gen_tcp_SUITE \
diameter_length_SUITE \
+ diameter_pool_SUITE \
diameter_reg_SUITE \
diameter_relay_SUITE \
diameter_stats_SUITE \
diff --git a/lib/diameter/vsn.mk b/lib/diameter/vsn.mk
index 587ae08b3d..c278e74dca 100644
--- a/lib/diameter/vsn.mk
+++ b/lib/diameter/vsn.mk
@@ -1,8 +1,6 @@
-#-*-makefile-*- ; force emacs to enter makefile-mode
-
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2014. All Rights Reserved.
+# Copyright Ericsson AB 2010-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -18,5 +16,5 @@
# %CopyrightEnd%
APPLICATION = diameter
-DIAMETER_VSN = 1.8
+DIAMETER_VSN = 1.9.2
APP_VSN = $(APPLICATION)-$(DIAMETER_VSN)$(PRE_VSN)
diff --git a/lib/edoc/doc/overview.edoc b/lib/edoc/doc/overview.edoc
index 2af425272e..3639bb43a5 100644
--- a/lib/edoc/doc/overview.edoc
+++ b/lib/edoc/doc/overview.edoc
@@ -76,11 +76,9 @@ The following are the main functions for running EDoc:
<ul>
<li>{@link edoc:application/2}: Creates documentation for a
typical Erlang application.</li>
- <li>{@link edoc:packages/2}: Creates documentation for one or
- more packages, automatically locating source files.</li>
<li>{@link edoc:files/2}: Creates documentation for a
specified set of source files.</li>
- <li>{@link edoc:run/3}: General interface function; the common
+ <li>{@link edoc:run/2}: General interface function; the common
back-end for the above functions. Options are documented here.</li>
</ul>
@@ -184,7 +182,7 @@ The following tags can be used anywhere within a module:
path (see {@link edoc:read_source/2}).</dd>
<dt><a name="gtag-todo">`@todo' (or `@TODO')</a></dt>
- <dd>Attaches a To-Do note to a function, module, package, or
+ <dd>Attaches a To-Do note to a function, module or
overview-page. The content can be any XHTML text describing
the issue, e.g.:
```%% @TODO Finish writing the documentation.'''
@@ -338,7 +336,7 @@ The following tags can be used before a module declaration:
<dt><a name="mtag-since">`@since'</a></dt>
<dd>Specifies when the module was introduced, with respect to
- the application, package, release or distribution it is part
+ the application, release or distribution it is part
of. The content can be arbitrary text.</dd>
<dt><a name="mtag-version">`@version'</a></dt>
@@ -445,7 +443,6 @@ possible formats for references are:
<table border="1" summary="reference syntax">
<tr><th>Reference syntax</th><th>Example</th><th>Scope</th></tr>
<tr><td>`Module'</td><td>{@link edoc_run}, `erl.lang.list'</td><td>Global</td></tr>
- <tr><td>`Package.*'</td><td>`erl.lang.*'</td><td>Global</td></tr>
<tr><td>`Function/Arity'</td><td>`file/2'</td><td>Within module</td></tr>
<tr><td>`Module:Function/Arity'</td><td>{@link edoc:application/2}</td><td>Global</td></tr>
<tr><td>`Type()'</td><td>`filename()'</td><td>Within module</td></tr>
@@ -531,7 +528,7 @@ after the empty line into separate paragraphs. For example:
```%% @doc This will all be part of the first paragraph.
%% It can stretch over several lines and contain <em>any
%% XHTML markup</em>.
- %%
+ %%
%% This is the second paragraph. The above line is
%% regarded as "empty" by EDoc, even though it ends with
%% a space.'''
@@ -685,17 +682,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the current date, as "<tt>Month Day Year</tt>",
e.g. "{@date}".</dd>
- <dt><a name="predefmacro-docRoot"><code>@{@docRoot}</code></a></dt>
- <dd>Expands to the relative URL path (such as
- `"../../.."') from the current page to the root
- directory of the generated documentation. This can be used to
- create XHTML references such as `<img
- src="@{@docRoot}/images/logo.jpeg">' that are independent of how
- deep down in a package structure they occur. If packages are not
- used (i.e., if all modules are in the "empty" package),
- <code>@{@docRoot}</code> will always resolve to the empty
- string.</dd>
-
<dt><a name="predefmacro-link"><code>@{@link <em>reference</em>.
<em>description</em>}</code></a></dt>
<dd>This creates a hypertext link; cf. the
@@ -710,9 +696,6 @@ information. User-defined macros override predefined macros.
<dd>Expands to the name of the current module. Only defined when a
module is being processed.</dd>
- <dt><a name="predefmacro-package"><code>@{@package}</code></a></dt>
- <dd>Expands to the name of the current package.</dd>
-
<dt><a name="predefmacro-section"><code>@{@section
<em>heading</em>}</code></a></dt>
<dd>Expands to a hypertext link to the specified section heading;
diff --git a/lib/edoc/include/edoc_doclet.hrl b/lib/edoc/include/edoc_doclet.hrl
index 60ec7f44e4..ac6763fb33 100644
--- a/lib/edoc/include/edoc_doclet.hrl
+++ b/lib/edoc/include/edoc_doclet.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc doclet modules.
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -43,16 +43,11 @@
%% @type doclet_gen() = #doclet_gen{sources = [string()],
%% app = no_app() | atom(),
-%% packages = [atom()],
-%% modules = [atom()],
-%% modules = [atom()],
-%% filemap = function()}
+%% modules = [atom()]}
-record(doclet_gen, {sources = [],
app = ?NO_APP,
- packages = [],
- modules = [],
- filemap
+ modules = []
}).
%% @type doclet_toc() = #doclet_gen{paths = [string()],
diff --git a/lib/edoc/priv/edoc.dtd b/lib/edoc/priv/edoc.dtd
index ba4ac0db28..4278a9e643 100644
--- a/lib/edoc/priv/edoc.dtd
+++ b/lib/edoc/priv/edoc.dtd
@@ -2,20 +2,13 @@
<!-- EDoc DTD Version 0.3 -->
<!ELEMENT overview (title, description?, author*, copyright?, version?,
- since?, see*, reference*, todo?, packages, modules)>
+ since?, see*, reference*, todo?, modules)>
<!ATTLIST overview
root CDATA #IMPLIED
encoding CDATA #IMPLIED>
<!ELEMENT title (#PCDATA)>
-<!ELEMENT package (description?, author*, copyright?, version?,
- since?, deprecated?, see*, reference*, todo?,
- modules)>
-<!ATTLIST package
- name CDATA #REQUIRED
- root CDATA #IMPLIED>
-
<!ELEMENT modules (module+)>
diff --git a/lib/edoc/priv/stylesheet.css b/lib/edoc/priv/stylesheet.css
index e426a90483..ab170c091f 100644
--- a/lib/edoc/priv/stylesheet.css
+++ b/lib/edoc/priv/stylesheet.css
@@ -27,10 +27,10 @@ div.spec {
margin-left: 2em;
background-color: #eeeeee;
}
-a.module,a.package {
+a.module {
text-decoration:none
}
-a.module:hover,a.package:hover {
+a.module:hover {
background-color: #eeeeee;
}
ul.definitions {
diff --git a/lib/edoc/src/edoc.app.src b/lib/edoc/src/edoc.app.src
index 9e1155d3e8..e4b9040c78 100644
--- a/lib/edoc/src/edoc.app.src
+++ b/lib/edoc/src/edoc.app.src
@@ -23,5 +23,5 @@
{registered,[]},
{applications, [compiler,kernel,stdlib,syntax_tools]},
{env, []},
- {runtime_dependencies, ["xmerl-1.3.7","syntax_tools-1.6.14","stdlib-2.0",
+ {runtime_dependencies, ["xmerl-1.3.7","syntax_tools-1.6.14","stdlib-2.5",
"kernel-3.0","inets-5.10","erts-6.0"]}]}.
diff --git a/lib/edoc/src/edoc.erl b/lib/edoc/src/edoc.erl
index 983f04e8b6..90f1fc3071 100644
--- a/lib/edoc/src/edoc.erl
+++ b/lib/edoc/src/edoc.erl
@@ -24,12 +24,11 @@
%% TODO: option for ignoring functions matching some pattern ('..._test_'/0)
%% TODO: @private_type tag, opaque unless generating private docs?
%% TODO: document the record type syntax
-%% TODO: some 'skip' option for ignoring particular modules/packages?
-%% TODO: intermediate-level packages: document even if no local sources.
+%% TODO: some 'skip' option for ignoring particular modules?
%% TODO: multiline comment support (needs modified comment representation)
%% TODO: config-file for default settings
%% TODO: config: locations of all local docdirs; generate local doc-index page
-%% TODO: config: URL:s of offline packages/apps
+%% TODO: config: URL:s of offline apps
%% TODO: config: default stylesheet
%% TODO: config: default header/footer, etc.
%% TODO: offline linkage
@@ -45,10 +44,10 @@
-module(edoc).
--export([packages/1, packages/2, files/1, files/2,
+-export([files/1, files/2,
application/1, application/2, application/3,
toc/1, toc/2, toc/3,
- run/3,
+ run/2,
file/1, file/2,
read/1, read/2,
layout/1, layout/2,
@@ -68,15 +67,15 @@
file(Name) ->
file(Name, []).
-%% @spec file(filename(), proplist()) -> ok
+%% @spec file(filename(), proplist()) -> ok
%%
%% @type filename() = //kernel/file:filename()
%% @type proplist() = [term()]
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/2},
-%% {@link packages/2} and {@link files/2}.
+%% documentation is through one of the functions {@link application/2}
+%% and {@link files/2}.
%%
%% @doc Reads a source code file and outputs formatted documentation to
%% a corresponding file.
@@ -121,44 +120,24 @@ file(Name, Options) ->
?DEFAULT_FILE_SUFFIX),
Dir = proplists:get_value(dir, Options, filename:dirname(Name)),
Encoding = [{encoding, edoc_lib:read_encoding(Name, [])}],
- edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, '', Encoding).
+ edoc_lib:write_file(Text, Dir, BaseName ++ Suffix, Encoding).
-%% TODO: better documentation of files/1/2, packages/1/2, application/1/2/3
+%% TODO: better documentation of files/1/2, application/1/2/3
-%% @spec (Files::[filename() | {package(), [filename()]}]) -> ok
-%% @equiv packages(Packages, [])
+%% @spec (Files::[filename()]) -> ok
files(Files) ->
files(Files, []).
-%% @spec (Files::[filename() | {package(), [filename()]}],
+%% @spec (Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files. See {@link run/3} for
+%% @doc Runs EDoc on a given set of source files. See {@link run/2} for
%% details, including options.
%% @equiv run([], Files, Options)
files(Files, Options) ->
- run([], Files, Options).
-
-%% @spec (Packages::[package()]) -> ok
-%% @equiv packages(Packages, [])
-
-packages(Packages) ->
- packages(Packages, []).
-
-%% @spec (Packages::[package()], Options::proplist()) -> ok
-%% @type package() = atom() | string()
-%%
-%% @doc Runs EDoc on a set of packages. The `source_path' option is used
-%% to locate the files; see {@link run/3} for details, including
-%% options. This function automatically appends the current directory to
-%% the source path.
-%%
-%% @equiv run(Packages, [], Options)
-
-packages(Packages, Options) ->
- run(Packages, [], Options ++ [{source_path, [?CURRENT_DIR]}]).
+ run(Files, Options).
%% @spec (Application::atom()) -> ok
%% @equiv application(Application, [])
@@ -194,7 +173,7 @@ application(App, Options) when is_atom(App) ->
%% subdirectory, if it exists, or otherwise in the application
%% directory itself.
%% </li>
-%% <li>The {@link run/3. `subpackages'} option is turned on. All found
+%% <li>The {@link run/2. `subpackages'} option is turned on. All found
%% source files will be processed.
%% </li>
%% <li>The `include' subdirectory is automatically added to the
@@ -203,7 +182,7 @@ application(App, Options) when is_atom(App) ->
%% </li>
%% </ul>
%%
-%% See {@link run/3} for details, including options.
+%% See {@link run/2} for details, including options.
%%
%% @see application/2
@@ -219,7 +198,7 @@ application(App, Dir, Options) when is_atom(App) ->
{includes, [filename:join(Dir, "include")]}],
Opts1 = set_app_default(App, Dir, Opts),
%% Recursively document all subpackages of '' - i.e., everything.
- run([''], [], [{application, App} | Opts1]).
+ run([], [{application, App} | Opts1]).
%% Try to set up a default application base URI in a smart way if the
%% user has not specified it explicitly.
@@ -240,31 +219,20 @@ set_app_default(App, Dir0, Opts) ->
Opts
end.
-%% If no source files are found for a (specified) package, no package
-%% documentation will be generated either (even if there is a
-%% package-documentation file). This is the way it should be. For
-%% specified files, use empty package (unless otherwise specified). The
-%% assumed package is always used for creating the output. If the actual
-%% module or package of the source differs from the assumption gathered
-%% from the path and file name, a warning should be issued (since links
-%% are likely to be incorrect).
-
opt_defaults() ->
- [packages].
+ [].
opt_negations() ->
[{no_preprocess, preprocess},
{no_subpackages, subpackages},
- {no_report_missing_types, report_missing_types},
- {no_packages, packages}].
+ {no_report_missing_types, report_missing_types}].
-%% @spec run(Packages::[package()],
-%% Files::[filename() | {package(), [filename()]}],
+%% @spec run(Files::[filename()],
%% Options::proplist()) -> ok
-%% @doc Runs EDoc on a given set of source files and/or packages. Note
+%% @doc Runs EDoc on a given set of source files. Note
%% that the doclet plugin module has its own particular options; see the
%% `doclet' option below.
-%%
+%%
%% Also see {@link layout/2} for layout-related options, and
%% {@link get_doc/2} for options related to reading source
%% files.
@@ -298,11 +266,6 @@ opt_negations() ->
%% The default doclet module is {@link edoc_doclet}; see {@link
%% edoc_doclet:run/2} for doclet-specific options.
%% </dd>
-%% <dt>{@type {exclude_packages, [package()]@}}
-%% </dt>
-%% <dd>Lists packages to be excluded from the documentation. Typically
-%% used in conjunction with the `subpackages' option.
-%% </dd>
%% <dt>{@type {file_suffix, string()@}}
%% </dt>
%% <dd>Specifies the suffix used for output files. The default value is
@@ -314,22 +277,6 @@ opt_negations() ->
%% target directory will be ignored and overwritten. The default
%% value is `false'.
%% </dd>
-%% <dt>{@type {packages, boolean()@}}
-%% </dt>
-%% <dd>If the value is `true', it it assumed that packages (module
-%% namespaces) are being used, and that the source code directory
-%% structure reflects this. The default value is `true'. (Usually,
-%% this does the right thing even if all the modules belong to the
-%% top-level "empty" package.) `no_packages' is an alias for
-%% `{packages, false}'. See the `subpackages' option below for
-%% further details.
-%%
-%% If the source code is organized in a hierarchy of
-%% subdirectories although it does not use packages, use
-%% `no_packages' together with the recursive-search `subpackages'
-%% option (on by default) to automatically generate documentation
-%% for all the modules.
-%% </dd>
%% <dt>{@type {source_path, [filename()]@}}
%% </dt>
%% <dd>Specifies a list of file system paths used to locate the source
@@ -345,7 +292,7 @@ opt_negations() ->
%% <dd>If the value is `true', all subpackages of specified packages
%% will also be included in the documentation. The default value is
%% `false'. `no_subpackages' is an alias for `{subpackages,
-%% false}'. See also the `exclude_packages' option.
+%% false}'.
%%
%% Subpackage source files are found by recursively searching
%% for source code files in subdirectories of the known source code
@@ -358,38 +305,31 @@ opt_negations() ->
%% </dl>
%%
%% @see files/2
-%% @see packages/2
%% @see application/2
%% NEW-OPTIONS: source_path, application
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: expand_sources/2
%% INHERIT-OPTIONS: target_dir_info/5
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
-%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
+%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3
-run(Packages, Files, Opts0) ->
+run(Files, Opts0) ->
Opts = expand_opts(Opts0),
Ctxt = init_context(Opts),
Dir = Ctxt#context.dir,
Path = proplists:append_values(source_path, Opts),
- Ss = sources(Path, Packages, Opts),
+ Ss = sources(Path, Opts),
{Ss1, Ms} = expand_sources(expand_files(Files) ++ Ss, Opts),
- Ps = [P || {_, P, _, _} <- Ss1],
App = proplists:get_value(application, Opts, ?NO_APP),
- {App1, Ps1, Ms1} = target_dir_info(Dir, App, Ps, Ms, Opts),
- %% The "empty package" is never included in the list of packages.
- Ps2 = edoc_lib:unique(lists:sort(Ps1)) -- [''],
+ {App1, Ms1} = target_dir_info(Dir, App, Ms, Opts),
Ms2 = edoc_lib:unique(lists:sort(Ms1)),
- Fs = package_files(Path, Ps2),
- Env = edoc_lib:get_doc_env(App1, Ps2, Ms2, Opts),
+ Env = edoc_lib:get_doc_env(App1, Ms2, Opts),
Ctxt1 = Ctxt#context{env = Env},
Cmd = #doclet_gen{sources = Ss1,
app = App1,
- packages = Ps2,
- modules = Ms2,
- filemap = Fs
+ modules = Ms2
},
F = fun (M) ->
M:run(Cmd, Ctxt1)
@@ -401,42 +341,22 @@ expand_opts(Opts0) ->
Opts0 ++ opt_defaults()).
%% NEW-OPTIONS: dir
-%% DEFER-OPTIONS: run/3
+%% DEFER-OPTIONS: run/2
init_context(Opts) ->
#context{dir = proplists:get_value(dir, Opts, ?CURRENT_DIR),
opts = Opts
}.
-%% INHERIT-OPTIONS: edoc_lib:find_sources/3
-
-sources(Path, Packages, Opts) ->
- lists:foldl(fun (P, Xs) ->
- edoc_lib:find_sources(Path, P, Opts) ++ Xs
- end,
- [], Packages).
-
-package_files(Path, Packages) ->
- Name = ?PACKAGE_FILE, % this is hard-coded for now
- D = lists:foldl(fun (P, D) ->
- F = edoc_lib:find_file(Path, P, Name),
- dict:store(P, F, D)
- end,
- dict:new(), Packages),
- fun (P) ->
- case dict:find(P, D) of
- {ok, F} -> F;
- error -> ""
- end
- end.
+%% INHERIT-OPTIONS: edoc_lib:find_sources/2
+
+sources(Path, Opts) ->
+ edoc_lib:find_sources(Path, Opts).
%% Expand user-specified sets of files.
-expand_files([{P, Fs1} | Fs]) ->
- [{P, filename:basename(F), filename:dirname(F)} || F <- Fs1]
- ++ expand_files(Fs);
expand_files([F | Fs]) ->
- [{'', filename:basename(F), filename:dirname(F)} |
+ [{filename:basename(F), filename:dirname(F)} |
expand_files(Fs)];
expand_files([]) ->
[].
@@ -444,26 +364,23 @@ expand_files([]) ->
%% Create the (assumed) full module names. Keep only the first source
%% for each module, but preserve the order of the list.
-%% NEW-OPTIONS: source_suffix, packages
-%% DEFER-OPTIONS: run/3
+%% NEW-OPTIONS: source_suffix
+%% DEFER-OPTIONS: run/2
expand_sources(Ss, Opts) ->
Suffix = proplists:get_value(source_suffix, Opts,
?DEFAULT_SOURCE_SUFFIX),
- Ss1 = case proplists:get_bool(packages, Opts) of
- true -> Ss;
- false -> [{'',F,D} || {_P,F,D} <- Ss]
- end,
+ Ss1 = [{F,D} || {F,D} <- Ss],
expand_sources(Ss1, Suffix, sets:new(), [], []).
-expand_sources([{'', F, D} | Fs], Suffix, S, As, Ms) ->
+expand_sources([{F, D} | Fs], Suffix, S, As, Ms) ->
M = list_to_atom(filename:rootname(F, Suffix)),
case sets:is_element(M, S) of
true ->
expand_sources(Fs, Suffix, S, As, Ms);
false ->
S1 = sets:add_element(M, S),
- expand_sources(Fs, Suffix, S1, [{M, '', F, D} | As],
+ expand_sources(Fs, Suffix, S1, [{M, F, D} | As],
[M | Ms])
end;
expand_sources([], _Suffix, _S, As, Ms) ->
@@ -471,16 +388,15 @@ expand_sources([], _Suffix, _S, As, Ms) ->
%% NEW-OPTIONS: new
-target_dir_info(Dir, App, Ps, Ms, Opts) ->
+target_dir_info(Dir, App, Ms, Opts) ->
case proplists:get_bool(new, Opts) of
true ->
- {App, Ps, Ms};
+ {App, Ms};
false ->
- {App1, Ps1, Ms1} = edoc_lib:read_info_file(Dir),
+ {App1, Ms1} = edoc_lib:read_info_file(Dir),
{if App == ?NO_APP -> App1;
true -> App
end,
- Ps ++ Ps1,
Ms ++ Ms1}
end.
@@ -505,12 +421,12 @@ toc(Dir, Opts) ->
%% INHERIT-OPTIONS: init_context/1
%% INHERIT-OPTIONS: edoc_lib:run_doclet/2
-%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
+%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3
toc(Dir, Paths, Opts0) ->
Opts = expand_opts(Opts0 ++ [{dir, Dir}]),
Ctxt = init_context(Opts),
- Env = edoc_lib:get_doc_env('', [], [], Opts),
+ Env = edoc_lib:get_doc_env('', [], Opts),
Ctxt1 = Ctxt#context{env = Env},
F = fun (M) ->
M:run(#doclet_toc{paths=Paths}, Ctxt1)
@@ -562,7 +478,7 @@ layout(Doc) ->
%% </dl>
%%
%% @see layout/1
-%% @see run/3
+%% @see run/2
%% @see read/2
%% @see file/2
@@ -773,13 +689,12 @@ scan_and_parse(Epp) ->
fix_last_line(Toks0) ->
Toks1 = lists:reverse(Toks0),
- {line, LastLine} = erl_scan:token_info(hd(Toks1), line),
+ LastLine = erl_scan:line(hd(Toks1)),
fll(Toks1, LastLine, []).
-fll([{Category, Attributes0, Symbol} | L], LastLine, Ts) ->
- F = fun(_OldLine) -> LastLine end,
- Attributes = erl_scan:set_attribute(line, Attributes0, F),
- lists:reverse(L, [{Category, Attributes, Symbol} | Ts]);
+fll([{Category, Anno0, Symbol} | L], LastLine, Ts) ->
+ Anno = erl_anno:set_line(LastLine, Anno0),
+ lists:reverse(L, [{Category, Anno, Symbol} | Ts]);
fll([T | L], LastLine, Ts) ->
fll(L, LastLine, [T | Ts]);
fll(L, _LastLine, Ts) ->
@@ -853,16 +768,16 @@ get_doc(File) ->
%% </dl>
%%
%% See {@link read_source/2}, {@link read_comments/2} and {@link
-%% edoc_lib:get_doc_env/4} for further options.
+%% edoc_lib:get_doc_env/3} for further options.
%%
%% @see get_doc/3
-%% @see run/3
+%% @see run/2
%% @see edoc_extract:source/5
%% @see read/2
%% @see layout/2
%% INHERIT-OPTIONS: get_doc/3
-%% INHERIT-OPTIONS: edoc_lib:get_doc_env/4
+%% INHERIT-OPTIONS: edoc_lib:get_doc_env/3
get_doc(File, Opts) ->
Env = edoc_lib:get_doc_env(Opts),
@@ -874,7 +789,7 @@ get_doc(File, Opts) ->
%%
%% @doc Like {@link get_doc/2}, but for a given environment
%% parameter. `Env' is an environment created by {@link
-%% edoc_lib:get_doc_env/4}.
+%% edoc_lib:get_doc_env/3}.
%% INHERIT-OPTIONS: read_source/2, read_comments/2, edoc_extract:source/5
%% DEFER-OPTIONS: get_doc/2
diff --git a/lib/edoc/src/edoc.hrl b/lib/edoc/src/edoc.hrl
index 44c5d6fef4..5b0fb68cf9 100644
--- a/lib/edoc/src/edoc.hrl
+++ b/lib/edoc/src/edoc.hrl
@@ -1,6 +1,6 @@
%% =====================================================================
%% Header file for EDoc
-%%
+%%
%% Copyright (C) 2001-2004 Richard Carlsson
%%
%% This library is free software; you can redistribute it and/or modify
@@ -25,9 +25,7 @@
-define(APPLICATION, edoc).
-define(INFO_FILE, "edoc-info").
--define(PACKAGE_FILE, "package.edoc").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary").
-define(DEFAULT_SOURCE_SUFFIX, ".erl").
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(DEFAULT_DOCLET, edoc_doclet).
@@ -65,13 +63,10 @@
%% Environment for generating documentation data
-record(env, {module = [],
- package = [],
root = "",
file_suffix,
- package_summary,
apps,
modules,
- packages,
app_default,
macros = [],
includes = []
diff --git a/lib/edoc/src/edoc_data.erl b/lib/edoc/src/edoc_data.erl
index eceb5cb1bd..b797d74a71 100644
--- a/lib/edoc/src/edoc_data.erl
+++ b/lib/edoc/src/edoc_data.erl
@@ -26,7 +26,7 @@
-module(edoc_data).
--export([module/4, package/4, overview/4, type/2]).
+-export([module/4, overview/4, type/2]).
-export([hidden_filter/2, get_all_tags/1]).
@@ -510,41 +510,14 @@ get_tags(_, []) -> [].
type(T, Env) ->
xmerl_lib:expand_element({type, [edoc_types:to_xml(T, Env)]}).
-%% <!ELEMENT package (description?, author*, copyright?, version?,
-%% since?, deprecated?, see*, reference*, todo?,
-%% modules)>
-%% <!ATTLIST package
-%% name CDATA #REQUIRED
-%% root CDATA #IMPLIED>
-%% <!ELEMENT modules (module+)>
-
-package(Package, Tags, Env, Opts) ->
- Env1 = Env#env{package = Package,
- root = edoc_refs:relative_package_path('', Package)},
- xmerl_lib:expand_element(package_1(Package, Tags, Env1, Opts)).
-
-package_1(Package, Tags, Env, Opts) ->
- {package, [{root, Env#env.root}],
- ([{packageName, [atom_to_list(Package)]}]
- ++ get_doc(Tags)
- ++ authors(Tags)
- ++ get_copyright(Tags)
- ++ get_version(Tags)
- ++ get_since(Tags)
- ++ get_deprecated(Tags)
- ++ sees(Tags, Env)
- ++ references(Tags)
- ++ todos(Tags, Opts))
- }.
-
%% <!ELEMENT overview (title, description?, author*, copyright?, version?,
-%% since?, see*, reference*, todo?, packages, modules)>
+%% since?, see*, reference*, todo?, modules)>
%% <!ATTLIST overview
%% root CDATA #IMPLIED>
%% <!ELEMENT title (#PCDATA)>
overview(Title, Tags, Env, Opts) ->
- Env1 = Env#env{package = '',
+ Env1 = Env#env{
root = ""},
xmerl_lib:expand_element(overview_1(Title, Tags, Env1, Opts)).
diff --git a/lib/edoc/src/edoc_doclet.erl b/lib/edoc/src/edoc_doclet.erl
index 5653b5894b..5961ca8cc0 100644
--- a/lib/edoc/src/edoc_doclet.erl
+++ b/lib/edoc/src/edoc_doclet.erl
@@ -42,9 +42,7 @@
-define(DEFAULT_FILE_SUFFIX, ".html").
-define(INDEX_FILE, "index.html").
-define(OVERVIEW_FILE, "overview.edoc").
--define(PACKAGE_SUMMARY, "package-summary.html").
-define(OVERVIEW_SUMMARY, "overview-summary.html").
--define(PACKAGES_FRAME, "packages-frame.html").
-define(MODULES_FRAME, "modules-frame.html").
-define(STYLESHEET, "stylesheet.css").
-define(IMAGE, "erlang.png").
@@ -52,11 +50,10 @@
-include_lib("xmerl/include/xmerl.hrl").
-%% Sources is the list of inputs in the order they were found. Packages
-%% and Modules are sorted lists of atoms without duplicates. (They
+%% Sources is the list of inputs in the order they were found.
+%% Modules are sorted lists of atoms without duplicates. (They
%% usually include the data from the edoc-info file in the target
-%% directory, if it exists.) Note that the "empty package" is never
-%% included in Packages!
+%% directory, if it exists.)
%% @spec (Command::doclet_gen() | doclet_toc(), edoc_context()) -> ok
%% @doc Main doclet entry point. See the file <a
@@ -117,14 +114,12 @@
run(#doclet_gen{}=Cmd, Ctxt) ->
gen(Cmd#doclet_gen.sources,
Cmd#doclet_gen.app,
- Cmd#doclet_gen.packages,
Cmd#doclet_gen.modules,
- Cmd#doclet_gen.filemap,
Ctxt);
run(#doclet_toc{}=Cmd, Ctxt) ->
toc(Cmd#doclet_toc.paths, Ctxt).
-gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
+gen(Sources, App, Modules, Ctxt) ->
Dir = Ctxt#context.dir,
Env = Ctxt#context.env,
Options = Ctxt#context.opts,
@@ -132,11 +127,9 @@ gen(Sources, App, Packages, Modules, FileMap, Ctxt) ->
CSS = stylesheet(Options),
{Modules1, Error} = sources(Sources, Dir, Modules, Env, Options),
modules_frame(Dir, Modules1, Title, CSS),
- packages(Packages, Dir, FileMap, Env, Options),
- packages_frame(Dir, Packages, Title, CSS),
overview(Dir, Title, Env, Options),
- index_file(Dir, length(Packages) > 1, Title),
- edoc_lib:write_info_file(App, Packages, Modules1, Dir),
+ index_file(Dir, Title),
+ edoc_lib:write_info_file(App, Modules1, Dir),
copy_stylesheet(Dir, Options),
copy_image(Dir),
%% handle postponed error during processing of source files
@@ -182,19 +175,19 @@ sources(Sources, Dir, Modules, Env, Options) ->
%% set if it was successful. Errors are just flagged at this stage,
%% allowing all source files to be processed even if some of them fail.
-source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
+source({M, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
Error, Options) ->
File = filename:join(Path, Name),
case catch {ok, edoc:get_doc(File, Env, Options)} of
{ok, {Module, Doc}} ->
- check_name(Module, M, P, File),
+ check_name(Module, M, File),
case ((not is_private(Doc)) orelse Private)
andalso ((not is_hidden(Doc)) orelse Hidden) of
true ->
Text = edoc:layout(Doc, Options),
Name1 = atom_to_list(M) ++ Suffix,
Encoding = [{encoding,encoding(Doc)}],
- edoc_lib:write_file(Text, Dir, Name1, P, Encoding),
+ edoc_lib:write_file(Text, Dir, Name1, Encoding),
{sets:add_element(Module, Set), Error};
false ->
{Set, Error}
@@ -204,8 +197,7 @@ source({M, P, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
{Set, true}
end.
-check_name(M, M0, P0, File) ->
- P = '',
+check_name(M, M0, File) ->
N = M,
N0 = M0,
case N of
@@ -222,47 +214,12 @@ check_name(M, M0, P0, File) ->
ok
end
end,
- if P =/= P0 ->
- warning("file '~ts' belongs to package '~s', not '~s'.",
- [File, P, P0]);
- true ->
- ok
- end.
-
-
-%% Generating the summary files for packages.
-
-%% INHERIT-OPTIONS: read_file/4
-%% INHERIT-OPTIONS: edoc_lib:run_layout/2
-
-packages(Packages, Dir, FileMap, Env, Options) ->
- lists:foreach(fun (P) ->
- package(P, Dir, FileMap, Env, Options)
- end,
- Packages).
-
-package(P, Dir, FileMap, Env, Opts) ->
- Tags = case FileMap(P) of
- "" ->
- [];
- File ->
- read_file(File, package, Env, Opts)
- end,
- Data = edoc_data:package(P, Tags, Env, Opts),
- F = fun (M) ->
- M:package(Data, Opts)
- end,
- Text = edoc_lib:run_layout(F, Opts),
- edoc_lib:write_file(Text, Dir, ?PACKAGE_SUMMARY, P).
-
+ ok.
%% Creating an index file, with some frames optional.
%% TODO: get rid of frames, or change doctype to Frameset
-index_file(Dir, Packages, Title) ->
- Frame1 = {frame, [{src,?PACKAGES_FRAME},
- {name,"packagesFrame"},{title,""}],
- []},
+index_file(Dir, Title) ->
Frame2 = {frame, [{src,?MODULES_FRAME},
{name,"modulesFrame"},{title,""}],
[]},
@@ -270,16 +227,7 @@ index_file(Dir, Packages, Title) ->
{name,"overviewFrame"},{title,""}],
[]},
Frameset = {frameset, [{cols,"20%,80%"}],
- case Packages of
- true ->
- [?NL,
- {frameset, [{rows,"30%,70%"}],
- [?NL, Frame1, ?NL, Frame2, ?NL]}
- ];
- false ->
- [?NL, Frame2, ?NL]
- end
- ++ [?NL, Frame3, ?NL,
+ [?NL, Frame2, ?NL, ?NL, Frame3, ?NL,
{noframes,
[?NL,
{h2, ["This page uses frames"]},
@@ -296,24 +244,6 @@ index_file(Dir, Packages, Title) ->
Text = xmerl:export_simple([XML], xmerl_html, []),
edoc_lib:write_file(Text, Dir, ?INDEX_FILE).
-packages_frame(Dir, Ps, Title, CSS) ->
- Body = [?NL,
- {h2, [{class, "indextitle"}], ["Packages"]},
- ?NL,
- {table, [{width, "100%"}, {border, 0},
- {summary, "list of packages"}],
- lists:concat(
- [[?NL,
- {tr, [{td, [], [{a, [{href, package_ref(P)},
- {target,"overviewFrame"},
- {class, "package"}],
- [atom_to_list(P)]}]}]}]
- || P <- Ps])},
- ?NL],
- XML = xhtml(Title, CSS, Body),
- Text = xmerl:export_simple([XML], xmerl_html, []),
- edoc_lib:write_file(Text, Dir, ?PACKAGES_FRAME).
-
modules_frame(Dir, Ms, Title, CSS) ->
Body = [?NL,
{h2, [{class, "indextitle"}], ["Modules"]},
@@ -334,11 +264,7 @@ modules_frame(Dir, Ms, Title, CSS) ->
edoc_lib:write_file(Text, Dir, ?MODULES_FRAME).
module_ref(M) ->
- edoc_refs:relative_package_path(M, '') ++ ?DEFAULT_FILE_SUFFIX.
-
-package_ref(P) ->
- edoc_lib:join_uri(edoc_refs:relative_package_path(P, ''),
- ?PACKAGE_SUMMARY).
+ atom_to_list(M) ++ ?DEFAULT_FILE_SUFFIX.
xhtml(Title, CSS, Content) ->
xhtml_1(Title, CSS, {body, [{bgcolor, "white"}], Content}).
@@ -372,7 +298,7 @@ overview(Dir, Title, Env, Opts) ->
end,
Text = edoc_lib:run_layout(F, Opts),
EncOpts = [{encoding,Encoding}],
- edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, '', EncOpts).
+ edoc_lib:write_file(Text, Dir, ?OVERVIEW_SUMMARY, EncOpts).
copy_image(Dir) ->
case code:priv_dir(?EDOC_APP) of
@@ -505,7 +431,7 @@ app_index_file(Paths, Dir, Env, Options) ->
% Priv = proplists:get_bool(private, Options),
CSS = stylesheet(Options),
Apps1 = [{filename:dirname(A),filename:basename(A)} || A <- Paths],
- index_file(Dir, false, Title),
+ index_file(Dir, Title),
application_frame(Dir, Apps1, Title, CSS),
modules_frame(Dir, [], Title, CSS),
overview(Dir, Title, Env, Options),
diff --git a/lib/edoc/src/edoc_extract.erl b/lib/edoc/src/edoc_extract.erl
index 67a95e80aa..758750083d 100644
--- a/lib/edoc/src/edoc_extract.erl
+++ b/lib/edoc/src/edoc_extract.erl
@@ -91,7 +91,7 @@ source(Forms, Comments, File, Env, Opts) ->
%% type `form_list', or a list of syntax trees representing
%% "program forms" (cf. {@link edoc:read_source/2}.
%% `Env' is an environment created by {@link
-%% edoc_lib:get_doc_env/4}. The `File' argument is used for
+%% edoc_lib:get_doc_env/3}. The `File' argument is used for
%% error reporting and output file name generation only.
%%
%% See {@link edoc:get_doc/2} for descriptions of the `def',
@@ -121,10 +121,8 @@ source1(Tree, File0, Env, Opts, TypeDocs) ->
Module = get_module_info(Tree, File),
{Header, Footer, Entries} = collect(Forms, Module),
Name = Module#module.name,
- Package = '',
Env1 = Env#env{module = Name,
- package = Package,
- root = edoc_refs:relative_package_path('', Package)},
+ root = ""},
Env2 = add_macro_defs(module_macros(Env1), Opts, Env1),
Entries1 = get_tags([Header, Footer | Entries], Env2, File, TypeDocs),
Entries2 = edoc_specs:add_data(Entries1, Opts, File, Module),
@@ -218,13 +216,13 @@ add_macro_defs(Defs0, Opts, Env) ->
%% @spec file(File::filename(), Context, Env::edoc_env(),
%% Options::proplist()) -> {ok, Tags} | {error, Reason}
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%% Reason = term()
%%
%% @doc Reads a text file and returns the list of tags in the file. Any
%% lines of text before the first tag are ignored. `Env' is an
-%% environment created by {@link edoc_lib:get_doc_env/4}. Upon error,
+%% environment created by {@link edoc_lib:get_doc_env/3}. Upon error,
%% `Reason' is an atom returned from the call to {@link
%% //kernel/file:read_file/1} or the atom 'invalid_unicode'.
%%
@@ -249,12 +247,12 @@ file(File, Context, Env, Opts) ->
%% @spec (Text::string(), Context, Env::edoc_env(),
%% Options::proplist()) -> Tags
-%% Context = overview | package
+%% Context = overview
%% Tags = [term()]
%%
%% @doc Returns the list of tags in the text. Any lines of text before
%% the first tag are ignored. `Env' is an environment created by {@link
-%% edoc_lib:get_doc_env/4}.
+%% edoc_lib:get_doc_env/3}.
%%
%% See {@link source/4} for a description of the `def' option.
@@ -353,8 +351,6 @@ preprocess_forms_2(F, Fs) ->
[F | preprocess_forms_1(Fs)];
{function, _} ->
[F | preprocess_forms_1(Fs)];
- {rule, _} ->
- [F | preprocess_forms_1(Fs)];
{attribute, {module, _}} ->
[F | preprocess_forms_1(Fs)];
text ->
@@ -392,15 +388,6 @@ collect([F | Fs], Cs, Ss, Ts, As, Header, Mod) ->
export = Export,
data = {comment_text(Cs),Ss,Ts}} | As],
Header, Mod);
- {rule, Name} ->
- L = erl_syntax:get_pos(F),
- Export = ordsets:is_element(Name, Mod#module.exports),
- Args = parameters(erl_syntax:rule_clauses(F)),
- collect(Fs, [], [], [],
- [#entry{name = Name, args = Args, line = L,
- export = Export,
- data = {comment_text(Cs),Ss,Ts}} | As],
- Header, Mod);
{attribute, {module, _}} when Header =:= undefined ->
L = erl_syntax:get_pos(F),
collect(Fs, [], [], [], As,
diff --git a/lib/edoc/src/edoc_layout.erl b/lib/edoc/src/edoc_layout.erl
index 36d067d9bc..62d5eb9a18 100644
--- a/lib/edoc/src/edoc_layout.erl
+++ b/lib/edoc/src/edoc_layout.erl
@@ -27,7 +27,7 @@
-module(edoc_layout).
--export([module/2, package/2, overview/2, type/1]).
+-export([module/2, overview/2, type/1]).
-import(edoc_report, [report/2]).
@@ -535,7 +535,8 @@ t_clause(Name, Type) ->
pp_clause(Pre, Type) ->
Types = ot_utype([Type]),
Atom = lists:duplicate(iolist_size(Pre), $a),
- L1 = erl_pp:attribute({attribute,0,spec,{{list_to_atom(Atom),0},[Types]}}),
+ Attr = {attribute,0,spec,{{list_to_atom(Atom),0},[Types]}},
+ L1 = erl_pp:attribute(erl_parse:new_anno(Attr)),
"-spec " ++ L2 = lists:flatten(L1),
L3 = Pre ++ lists:nthtail(length(Atom), L2),
re:replace(L3, "\n ", "\n", [{return,list},global]).
@@ -555,7 +556,8 @@ format_type(Prefix, _Name, Type, Last, _Opts) ->
pp_type(Prefix, Type) ->
Atom = list_to_atom(lists:duplicate(iolist_size(Prefix), $a)),
- L1 = erl_pp:attribute({attribute,0,type,{Atom,ot_utype(Type),[]}}),
+ Attr = {attribute,0,type,{Atom,ot_utype(Type),[]}},
+ L1 = erl_pp:attribute(erl_parse:new_anno(Attr)),
{L2,N} = case lists:dropwhile(fun(C) -> C =/= $: end, lists:flatten(L1)) of
":: " ++ L3 -> {L3,9}; % compensation for extra "()" and ":"
"::\n" ++ L3 -> {"\n"++L3,6}
@@ -978,9 +980,6 @@ get_text(Name, Es) ->
local_label(R) ->
"#" ++ R.
-xhtml(Title, CSS, Body) ->
- xhtml(Title, CSS, Body, "latin1").
-
xhtml(Title, CSS, Body, Encoding) ->
EncString = case Encoding of
"latin1" -> "ISO-8859-1";
@@ -1010,27 +1009,6 @@ type(E, Ds) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds, Opts),
?HTML_EXPORT).
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = ["Package ", Name],
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ todos(Es)
- ++ FullDesc),
- XML = xhtml(Title, stylesheet(Opts), Body),
- xmerl:export_simple(XML, ?HTML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = [get_text(title, Es)],
@@ -1109,8 +1087,8 @@ ot_var(E) ->
{var,0,list_to_atom(get_attrval(name, E))}.
ot_atom(E) ->
- {ok, [Atom], _} = erl_scan:string(get_attrval(value, E), 0),
- Atom.
+ {ok, [{atom,A,Name}], _} = erl_scan:string(get_attrval(value, E), 0),
+ {atom,erl_anno:line(A),Name}.
ot_integer(E) ->
{integer,0,list_to_integer(get_attrval(value, E))}.
diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl
index c46338a2e1..dcc239f6b4 100644
--- a/lib/edoc/src/edoc_lib.erl
+++ b/lib/edoc/src/edoc_lib.erl
@@ -29,9 +29,9 @@
get_first_sentence/1, is_space/1, strip_space/1, parse_expr/2,
parse_contact/2, escape_uri/1, join_uri/2, is_relative_uri/1,
is_name/1, to_label/1, find_doc_dirs/0, find_sources/2,
- find_sources/3, find_file/3, try_subdir/2, unique/1,
- write_file/3, write_file/4, write_file/5, write_info_file/4,
- read_info_file/1, get_doc_env/1, get_doc_env/4, copy_file/2,
+ find_file/2, try_subdir/2, unique/1,
+ write_file/3, write_file/4, write_info_file/3,
+ read_info_file/1, get_doc_env/1, get_doc_env/3, copy_file/2,
uri_get/1, run_doclet/2, run_layout/2,
simplify_path/1, timestr/1, datestr/1, read_encoding/2]).
@@ -266,13 +266,6 @@ is_name_1([$_ | Cs]) ->
is_name_1([]) -> true;
is_name_1(_) -> false.
-to_atom(A) when is_atom(A) -> A;
-to_atom(S) when is_list(S) -> list_to_atom(S).
-
-to_list(A) when is_atom(A) -> atom_to_list(A);
-to_list(S) when is_list(S) -> S.
-
-
%% @private
unique([X | Xs]) -> [X | unique(Xs, X)];
unique([]) -> [].
@@ -674,7 +667,7 @@ simplify_path(P) ->
try_subdir(Dir, Subdir) ->
D = filename:join(Dir, Subdir),
case filelib:is_dir(D) of
- true -> D;
+ true -> D;
false -> Dir
end.
@@ -686,19 +679,10 @@ try_subdir(Dir, Subdir) ->
%% @private
write_file(Text, Dir, Name) ->
- write_file(Text, Dir, Name, '').
-
-%% @spec (Text::deep_string(), Dir::edoc:filename(),
-%% Name::edoc:filename(), Package::atom()|string()) -> ok
-%% @doc Like {@link write_file/3}, but adds path components to the target
-%% directory corresponding to the specified package.
-%% @private
+ write_file(Text, Dir, Name, [{encoding,latin1}]).
-write_file(Text, Dir, Name, Package) ->
- write_file(Text, Dir, Name, Package, [{encoding,latin1}]).
-
-write_file(Text, Dir, Name, Package, Options) ->
- File = filename:join([Dir, to_list(Package), Name]),
+write_file(Text, Dir, Name, Options) ->
+ File = filename:join([Dir, Name]),
ok = filelib:ensure_dir(File),
case file:open(File, [write] ++ Options) of
{ok, FD} ->
@@ -711,15 +695,14 @@ write_file(Text, Dir, Name, Package, Options) ->
end.
%% @private
-write_info_file(App, Packages, Modules, Dir) ->
- Ts = [{packages, Packages},
- {modules, Modules}],
+write_info_file(App, Modules, Dir) ->
+ Ts = [{modules, Modules}],
Ts1 = if App =:= ?NO_APP -> Ts;
true -> [{application, App} | Ts]
end,
S0 = [io_lib:fwrite("~p.\n", [T]) || T <- Ts1],
S = ["%% encoding: UTF-8\n" | S0],
- write_file(S, Dir, ?INFO_FILE, '', [{encoding,unicode}]).
+ write_file(S, Dir, ?INFO_FILE, [{encoding,unicode}]).
%% @spec (Name::edoc:filename()) -> {ok, string()} | {error, Reason}
%%
@@ -744,9 +727,8 @@ read_file(File) ->
info_file_data(Ts) ->
App = proplists:get_value(application, Ts, ?NO_APP),
- Ps = proplists:append_values(packages, Ts),
Ms = proplists:append_values(modules, Ts),
- {App, Ps, Ms}.
+ {App, Ms}.
%% Local file access - don't complain if file does not exist.
@@ -761,10 +743,10 @@ read_info_file(Dir) ->
{error, R} ->
R1 = file:format_error(R),
warning("could not read '~ts': ~ts.", [File, R1]),
- {?NO_APP, [], []}
- end;
+ {?NO_APP, []}
+ end;
false ->
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
%% URI access
@@ -776,7 +758,7 @@ uri_get_info_file(Base) ->
parse_info_file(Text, URI);
{error, Msg} ->
warning("could not read '~ts': ~ts.", [URI, Msg]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_info_file(Text, Name) ->
@@ -785,10 +767,10 @@ parse_info_file(Text, Name) ->
info_file_data(Vs);
{error, eof} ->
warning("unexpected end of file in '~ts'.", [Name]),
- {?NO_APP, [], []};
+ {?NO_APP, []};
{error, {_Line,Module,R}} ->
warning("~ts: ~ts.", [Module:format_error(R), Name]),
- {?NO_APP, [], []}
+ {?NO_APP, []}
end.
parse_terms(Text) ->
@@ -815,82 +797,67 @@ parse_terms_1([], _As, _Vs) ->
%% ---------------------------------------------------------------------
-%% Source files and packages
+%% Source files
+%% @doc See {@link edoc:run/2} for a description of the options
+%% `subpackages', `source_suffix'.
%% @private
-find_sources(Path, Opts) ->
- find_sources(Path, "", Opts).
-%% @doc See {@link edoc:run/3} for a description of the options
-%% `subpackages', `source_suffix' and `exclude_packages'.
-%% @private
+%% NEW-OPTIONS: subpackages, source_suffix
+%% DEFER-OPTIONS: edoc:run/2
-%% NEW-OPTIONS: subpackages, source_suffix, exclude_packages
-%% DEFER-OPTIONS: edoc:run/3
-
-find_sources(Path, Pkg, Opts) ->
+find_sources(Path, Opts) ->
Rec = proplists:get_bool(subpackages, Opts),
Ext = proplists:get_value(source_suffix, Opts, ?DEFAULT_SOURCE_SUFFIX),
- find_sources(Path, Pkg, Rec, Ext, Opts).
+ find_sources(Path, Rec, Ext, Opts).
-find_sources(Path, Pkg, Rec, Ext, Opts) ->
- Skip = proplists:get_value(exclude_packages, Opts, []),
- lists:flatten(find_sources_1(Path, to_atom(Pkg), Rec, Ext, Skip)).
+find_sources(Path, Rec, Ext, _Opts) ->
+ lists:flatten(find_sources_1(Path, Rec, Ext)).
-find_sources_1([P | Ps], Pkg, Rec, Ext, Skip) ->
- Dir = filename:join(P, atom_to_list(Pkg)),
- Fs1 = find_sources_1(Ps, Pkg, Rec, Ext, Skip),
+find_sources_1([P | Ps], Rec, Ext) ->
+ Dir = P,
+ Fs1 = find_sources_1(Ps, Rec, Ext),
case filelib:is_dir(Dir) of
true ->
- [find_sources_2(Dir, Pkg, Rec, Ext, Skip) | Fs1];
+ [find_sources_2(Dir, Rec, Ext) | Fs1];
false ->
Fs1
end;
-find_sources_1([], _Pkg, _Rec, _Ext, _Skip) ->
+find_sources_1([], _Rec, _Ext) ->
[].
-find_sources_2(Dir, Pkg, Rec, Ext, Skip) ->
- case lists:member(Pkg, Skip) of
- false ->
- Es = list_dir(Dir, false), % just warn if listing fails
- Es1 = [{Pkg, E, Dir} || E <- Es, is_source_file(E, Ext)],
- case Rec of
+find_sources_2(Dir, Rec, Ext) ->
+ Es = list_dir(Dir, false), % just warn if listing fails
+ Es1 = [{E, Dir} || E <- Es, is_source_file(E, Ext)],
+ case Rec of
true ->
- [find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) | Es1];
+ [find_sources_3(Es, Dir, Rec, Ext) | Es1];
false ->
- Es1
- end;
- true ->
- []
- end.
+ Es1
+ end.
-find_sources_3(Es, Dir, Pkg, Rec, Ext, Skip) ->
+find_sources_3(Es, Dir, Rec, Ext) ->
[find_sources_2(filename:join(Dir, E),
- to_atom(join(Pkg, E)), Rec, Ext, Skip)
- || E <- Es, is_package_dir(E, Dir)].
-
-join('', E) -> E;
-join(Pkg, E) -> filename:join(Pkg, E).
+ Rec, Ext)
+ || E <- Es, is_source_dir(E, Dir)].
is_source_file(Name, Ext) ->
(filename:extension(Name) == Ext)
andalso is_name(filename:rootname(Name, Ext)).
-is_package_dir(Name, Dir) ->
- is_name(filename:rootname(filename:basename(Name)))
- andalso filelib:is_dir(filename:join(Dir, Name)).
+is_source_dir(Name, Dir) ->
+ filelib:is_dir(filename:join(Dir, Name)).
%% @private
-find_file([P | Ps], []=Pkg, Name) ->
- Pkg = [],
+find_file([P | Ps], Name) ->
File = filename:join(P, Name),
case filelib:is_file(File) of
true ->
- File;
+ File;
false ->
- find_file(Ps, Pkg, Name)
- end;
-find_file([], [], _Name) ->
+ find_file(Ps, Name)
+ end;
+find_file([], _Name) ->
"".
%% @private
@@ -909,7 +876,7 @@ find_doc_dirs([P0 | Ps]) ->
File = filename:join(Dir, ?INFO_FILE),
case filelib:is_file(File) of
true ->
- [Dir | find_doc_dirs(Ps)];
+ [Dir | find_doc_dirs(Ps)];
false ->
find_doc_dirs(Ps)
end;
@@ -921,24 +888,23 @@ find_doc_dirs([]) ->
%% implies that we use the default app-path.
%% NEW-OPTIONS: doc_path
-%% DEFER-OPTIONS: get_doc_env/4
+%% DEFER-OPTIONS: get_doc_env/3
-get_doc_links(App, Packages, Modules, Opts) ->
+get_doc_links(App, Modules, Opts) ->
Path = proplists:append_values(doc_path, Opts) ++ find_doc_dirs(),
Ds = [{P, uri_get_info_file(P)} || P <- Path],
- Ds1 = [{"", {App, Packages, Modules}} | Ds],
+ Ds1 = [{"", {App, Modules}} | Ds],
D = dict:new(),
- make_links(Ds1, D, D, D).
+ make_links(Ds1, D, D).
-make_links([{Dir, {App, Ps, Ms}} | Ds], A, P, M) ->
+make_links([{Dir, {App, Ms}} | Ds], A, M) ->
A1 = if App == ?NO_APP -> A;
true -> add_new(App, Dir, A)
end,
F = fun (K, D) -> add_new(K, Dir, D) end,
- P1 = lists:foldl(F, P, Ps),
M1 = lists:foldl(F, M, Ms),
- make_links(Ds, A1, P1, M1);
-make_links([], A, P, M) ->
+ make_links(Ds, A1, M1);
+make_links([], A, M) ->
F = fun (D) ->
fun (K) ->
case dict:find(K, D) of
@@ -947,7 +913,7 @@ make_links([], A, P, M) ->
end
end
end,
- {F(A), F(P), F(M)}.
+ {F(A), F(M)}.
add_new(K, V, D) ->
case dict:is_key(K, D) of
@@ -958,15 +924,14 @@ add_new(K, V, D) ->
end.
%% @spec (Options::proplist()) -> edoc_env()
-%% @equiv get_doc_env([], [], [], Opts)
+%% @equiv get_doc_env([], [], Opts)
%% @private
get_doc_env(Opts) ->
- get_doc_env([], [], [], Opts).
+ get_doc_env([], [], Opts).
-%% @spec (App, Packages, Modules, Options::proplist()) -> edoc_env()
+%% @spec (App, Modules, Options::proplist()) -> edoc_env()
%% App = [] | atom()
-%% Packages = [atom()]
%% Modules = [atom()]
%% proplist() = [term()]
%%
@@ -975,7 +940,7 @@ get_doc_env(Opts) ->
%% generating references. The data representation is not documented.
%%
%% @doc Creates an environment data structure used by parts of EDoc for
-%% generating references, etc. See {@link edoc:run/3} for a description
+%% generating references, etc. See {@link edoc:run/2} for a description
%% of the options `file_suffix', `app_default' and `doc_path'.
%%
%% @see edoc_extract:source/4
@@ -983,19 +948,17 @@ get_doc_env(Opts) ->
%% NEW-OPTIONS: file_suffix, app_default
%% INHERIT-OPTIONS: get_doc_links/4
-%% DEFER-OPTIONS: edoc:run/3
+%% DEFER-OPTIONS: edoc:run/2
-get_doc_env(App, Packages, Modules, Opts) ->
+get_doc_env(App, Modules, Opts) ->
Suffix = proplists:get_value(file_suffix, Opts,
?DEFAULT_FILE_SUFFIX),
AppDefault = proplists:get_value(app_default, Opts, ?APP_DEFAULT),
Includes = proplists:append_values(includes, Opts),
- {A, P, M} = get_doc_links(App, Packages, Modules, Opts),
+ {A, M} = get_doc_links(App, Modules, Opts),
#env{file_suffix = Suffix,
- package_summary = ?PACKAGE_SUMMARY ++ Suffix,
apps = A,
- packages = P,
modules = M,
app_default = AppDefault,
includes = Includes
@@ -1004,10 +967,10 @@ get_doc_env(App, Packages, Modules, Opts) ->
%% ---------------------------------------------------------------------
%% Plug-in modules
-%% @doc See {@link edoc:run/3} for a description of the `doclet' option.
+%% @doc See {@link edoc:run/2} for a description of the `doclet' option.
%% NEW-OPTIONS: doclet
-%% DEFER-OPTIONS: edoc:run/3
+%% DEFER-OPTIONS: edoc:run/2
%% @private
run_doclet(Fun, Opts) ->
@@ -1049,7 +1012,7 @@ get_plugin(Key, Default, Opts) ->
%% ---------------------------------------------------------------------
%% Error handling
--type line() :: erl_scan:line().
+-type line() :: erl_anno:line().
-type err() :: 'eof'
| {'missing', char()}
| {line(), atom(), string()}
diff --git a/lib/edoc/src/edoc_macros.erl b/lib/edoc/src/edoc_macros.erl
index 8efbfd00c7..e1a54d5090 100644
--- a/lib/edoc/src/edoc_macros.erl
+++ b/lib/edoc/src/edoc_macros.erl
@@ -40,10 +40,6 @@ std_macros(Env) ->
true -> [{module, atom_to_list(Env#env.module)}]
end
++
- if Env#env.package =:= [] -> [];
- true -> [{package, atom_to_list(Env#env.package)}]
- end
- ++
[{date, fun date_macro/3},
{docRoot, Env#env.root},
{link, fun link_macro/3},
@@ -315,7 +311,7 @@ macro_content([C | Cs], As, L, N) ->
macro_content([], _As, _L, _N) ->
throw('end').
--type line() :: erl_scan:line().
+-type line() :: erl_anno:line().
-type err() :: 'unterminated_macro'
| 'macro_name'
| {'macro_name', string()}
diff --git a/lib/edoc/src/edoc_parser.yrl b/lib/edoc/src/edoc_parser.yrl
index c6f8a04775..835e7ccaa6 100644
--- a/lib/edoc/src/edoc_parser.yrl
+++ b/lib/edoc/src/edoc_parser.yrl
@@ -28,7 +28,7 @@
Nonterminals
start spec func_type utype_list utype_tuple utypes utype ptypes ptype
nutype function_name where_defs defs defs2 def typedef etype
-throws qname ref aref mref lref pref var_list vars fields field
+throws qname ref aref mref lref var_list vars fields field
utype_map utype_map_fields utype_map_field
futype_list bin_base_type bin_unit_type.
@@ -207,14 +207,11 @@ typedef -> atom var_list '=' utype where_defs:
ref -> aref: '$1'.
ref -> mref: '$1'.
ref -> lref: '$1'.
-ref -> pref: '$1'.
aref -> '//' atom:
edoc_refs:app(tok_val('$2')).
aref -> '//' atom '/' mref:
edoc_refs:app(tok_val('$2'), '$4').
-aref -> '//' atom '/' pref:
- edoc_refs:app(tok_val('$2'), '$4').
mref -> qname ':' atom '/' integer:
edoc_refs:function(qname('$1'), tok_val('$3'), tok_val('$5')).
@@ -223,9 +220,6 @@ mref -> qname ':' atom '(' ')':
mref -> qname:
edoc_refs:module(qname('$1')).
-pref -> qname '.' '*':
- edoc_refs:package(qname('$1')).
-
lref -> atom '/' integer:
edoc_refs:function(tok_val('$1'), tok_val('$3')).
lref -> atom '(' ')':
@@ -344,7 +338,7 @@ build_def(S, P, As, T) ->
args = lists:reverse(As)},
type = T};
false ->
- return_error(element(2, P), "variable expected after '('")
+ return_error(tok_line(P), "variable expected after '('")
end.
all_vars([#t_var{} | As]) ->
@@ -399,7 +393,7 @@ parse_typedef_1(S, L) ->
%% @doc Parses a <a
%% href="overview-summary.html#References">reference</a> to a module,
-%% package, function, type, or application
+%% function, type, or application
parse_ref(S, L) ->
case edoc_scanner:string(S, L) of
@@ -458,7 +452,7 @@ parse_throws(S, L) ->
%% ---------------------------------------------------------------------
--spec throw_error(term(), erl_scan:line()) -> no_return().
+-spec throw_error(term(), erl_anno:line()) -> no_return().
throw_error({parse_spec, E}, L) ->
throw_error({"specification", E}, L);
diff --git a/lib/edoc/src/edoc_refs.erl b/lib/edoc/src/edoc_refs.erl
index ea439490ed..b9a9391053 100644
--- a/lib/edoc/src/edoc_refs.erl
+++ b/lib/edoc/src/edoc_refs.erl
@@ -27,10 +27,9 @@
-module(edoc_refs).
--export([app/1, app/2, package/1, module/1, module/2, module/3,
+-export([app/1, app/2, module/1, module/2, module/3,
function/2, function/3, function/4, type/1, type/2, type/3,
- to_string/1, to_label/1, get_uri/2, is_top/2,
- relative_module_path/2, relative_package_path/2]).
+ to_string/1, to_label/1, get_uri/2, is_top/2]).
-import(edoc_lib, [join_uri/2, escape_uri/1]).
@@ -56,9 +55,6 @@ module(M, Ref) ->
module(App, M, Ref) ->
app(App, module(M, Ref)).
-package(P) ->
- {package, P}.
-
function(F, A) ->
{function, F, A}.
@@ -88,8 +84,6 @@ to_string({module, M}) ->
atom_to_list(M) ;
to_string({module, M, Ref}) ->
atom_to_list(M) ++ ":" ++ to_string(Ref);
-to_string({package, P}) ->
- atom_to_list(P) ++ ".*";
to_string({function, F, A}) ->
atom_to_list(F) ++ "/" ++ integer_to_list(A);
to_string({type, T}) ->
@@ -111,24 +105,19 @@ get_uri({module, M, Ref}, Env) ->
module_ref(M, Env) ++ "#" ++ to_label(Ref);
get_uri({module, M}, Env) ->
module_ref(M, Env);
-get_uri({package, P}, Env) ->
- package_ref(P, Env);
get_uri(Ref, _Env) ->
"#" ++ to_label(Ref).
abs_uri({module, M}, Env) ->
module_absref(M, Env);
abs_uri({module, M, Ref}, Env) ->
- module_absref(M, Env) ++ "#" ++ to_label(Ref);
-abs_uri({package, P}, Env) ->
- package_absref(P, Env).
+ module_absref(M, Env) ++ "#" ++ to_label(Ref).
module_ref(M, Env) ->
case (Env#env.modules)(M) of
"" ->
File = atom_to_list(M) ++ Env#env.file_suffix,
- Path = relative_module_path(M, Env#env.package),
- join_uri(Path, escape_uri(File));
+ escape_uri(File);
Base ->
join_uri(Base, module_absref(M, Env))
end.
@@ -136,19 +125,6 @@ module_ref(M, Env) ->
module_absref(M, Env) ->
escape_uri(atom_to_list(M)) ++ escape_uri(Env#env.file_suffix).
-package_ref(P, Env) ->
- case (Env#env.packages)(P) of
- "" ->
- join_uri(relative_package_path(P, Env#env.package),
- escape_uri(Env#env.package_summary));
- Base ->
- join_uri(Base, package_absref(P, Env))
- end.
-
-package_absref(P, Env) ->
- join_uri(escape_uri(atom_to_list(P)),
- escape_uri(Env#env.package_summary)).
-
app_ref(A, Env) ->
case (Env#env.apps)(A) of
"" ->
@@ -166,43 +142,3 @@ is_top({app, _App}, _Env) ->
is_top(_Ref, _Env) ->
false.
-%% Each segment of a path must be separately escaped before joining.
-
-join_segments([S]) ->
- escape_uri(S);
-join_segments([S | Ss]) ->
- join_uri(escape_uri(S), join_segments(Ss)).
-
-%% 'From' is always the "current package" here:
-
-%% The empty string is returned if the To module has only one segment,
-%% implying a local reference.
-
-relative_module_path(_To, _From) ->
- "".
-
-relative_package_path(To, From) ->
- relative_path([atom_to_list(To)], [atom_to_list(From)]).
-
-%% This takes two lists of path segments (From, To). Note that an empty
-%% string will be returned if the paths are the same. Empty leading
-%% segments are stripped from both paths.
-
-relative_path(Ts, ["" | Fs]) ->
- relative_path(Ts, Fs);
-relative_path(["" | Ts], Fs) ->
- relative_path(Ts, Fs);
-relative_path(Ts, Fs) ->
- relative_path_1(Ts, Fs).
-
-relative_path_1([T | Ts], [F | Fs]) when F == T ->
- relative_path_1(Ts, Fs);
-relative_path_1(Ts, Fs) ->
- relative_path_2(Fs, Ts).
-
-relative_path_2([_F | Fs], Ts) ->
- relative_path_2(Fs, [".." | Ts]);
-relative_path_2([], []) ->
- "";
-relative_path_2([], Ts) ->
- join_segments(Ts).
diff --git a/lib/edoc/src/edoc_run.erl b/lib/edoc/src/edoc_run.erl
index b5a1ef713d..9a569d0879 100644
--- a/lib/edoc/src/edoc_run.erl
+++ b/lib/edoc/src/edoc_run.erl
@@ -17,7 +17,7 @@
%% @copyright 2003 Richard Carlsson
%% @author Richard Carlsson <[email protected]>
%% @see edoc
-%% @end
+%% @end
%% =====================================================================
%% @doc Interface for calling EDoc from Erlang startup options.
@@ -38,7 +38,7 @@
-module(edoc_run).
--export([file/1, application/1, packages/1, files/1, toc/1]).
+-export([file/1, application/1, files/1, toc/1]).
-compile({no_auto_import,[error/1]}).
@@ -92,28 +92,6 @@ files(Args) ->
end,
run(F).
-%% @spec packages([string()]) -> none()
-%%
-%% @doc Calls {@link edoc:application/2} with the corresponding
-%% arguments. The strings in the list are parsed as Erlang constant
-%% terms. The list can be either `[Packages]' or `[Packages, Options]'.
-%% In the first case {@link edoc:application/1} is called instead.
-%%
-%% The function call never returns; instead, the emulator is
-%% automatically terminated when the call has completed, signalling
-%% success or failure to the operating system.
-
-packages(Args) ->
- F = fun () ->
- case parse_args(Args) of
- [Packages] -> edoc:packages(Packages);
- [Packages, Opts] -> edoc:packages(Packages, Opts);
- _ ->
- invalid_args("edoc_run:packages/1", Args)
- end
- end,
- run(F).
-
%% @hidden Not official yet
toc(Args) ->
F = fun () ->
@@ -131,8 +109,8 @@ toc(Args) ->
%%
%% @deprecated This is part of the old interface to EDoc and is mainly
%% kept for backwards compatibility. The preferred way of generating
-%% documentation is through one of the functions {@link application/1},
-%% {@link packages/1} and {@link files/1}.
+%% documentation is through one of the functions {@link application/1}
+%% and {@link files/1}.
%%
%% @doc Calls {@link edoc:file/2} with the corresponding arguments. The
%% strings in the list are parsed as Erlang constant terms. The list can
diff --git a/lib/edoc/src/edoc_specs.erl b/lib/edoc/src/edoc_specs.erl
index 3bf81c6503..59f6cb8ddf 100644
--- a/lib/edoc/src/edoc_specs.erl
+++ b/lib/edoc/src/edoc_specs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -58,7 +58,7 @@ type(Form, TypeDocs) ->
end,
{#t_name{name = N}, T, As, Doc0}
end,
- #tag{name = type, line = element(2, Type),
+ #tag{name = type, line = get_line(element(2, Type)),
origin = code,
data = {#t_typedef{name = TypeName,
args = d2e(Args),
@@ -71,7 +71,7 @@ type(Form, TypeDocs) ->
spec(Form, Clause) ->
{Name, _Arity, TypeSpecs} = get_spec(Form),
TypeSpec = lists:nth(Clause, TypeSpecs),
- #tag{name = spec, line = element(2, TypeSpec),
+ #tag{name = spec, line = get_line(element(2, TypeSpec)),
origin = code,
data = aspec(d2e(TypeSpec), Name)}.
@@ -83,7 +83,7 @@ dummy_spec(Form) ->
{#t_name{name = Name}, Arity, TypeSpecs} = get_spec(Form),
As = string:join(lists:duplicate(Arity, "_X"), ","),
S = lists:flatten(io_lib:format("~p(~s) -> true\n", [Name, As])),
- #tag{name = spec, line = element(2, hd(TypeSpecs)),
+ #tag{name = spec, line = get_line(element(2, hd(TypeSpecs))),
origin = code, data = S}.
-spec docs(Forms::[syntaxTree()],
@@ -140,7 +140,7 @@ find_type_docs([F | Fs], Cs, Fun) ->
%% Postcomments before the dot after the typespec are ignored.
C2 = [C1 | [C ||
C <- erl_syntax:get_postcomments(F),
- get_line(erl_syntax:get_pos(C)) >= LastTypeLine]],
+ erl_syntax:get_pos(C) >= LastTypeLine]],
C3 = collect_comments(Fs, LastTypeLine),
#tag{data = Doc0} = Fun(lists:reverse(C2 ++ C3), LastTypeLine),
case strip(Doc0) of % Strip away "f(). \n"
@@ -157,7 +157,7 @@ find_type_docs([F | Fs], Cs, Fun) ->
collect_comments([], _Line) ->
[];
collect_comments([F | Fs], Line) ->
- L1 = get_line(erl_syntax:get_pos(F)),
+ L1 = erl_syntax:get_pos(F),
if
L1 =:= Line + 1;
L1 =:= Line -> % a separate postcomment
@@ -190,29 +190,26 @@ get_name_and_last_line(F) ->
{Name, Data} = erl_syntax_lib:analyze_wild_attribute(F),
type = edoc_specs:tag(Name),
Attr = {attribute, erl_syntax:get_pos(F), Name, Data},
- Ref = make_ref(),
- Fun = fun(L) -> {Ref, get_line(L)} end,
+ Fun = fun(A) ->
+ Line = get_line(A),
+ case get('$max_line') of
+ Max when Max < Line ->
+ _ = put('$max_line', Line);
+ _ ->
+ ok
+ end
+ end,
+ undefined = put('$max_line', 0),
+ _ = erl_parse:map_anno(Fun, Attr),
+ Line = erase('$max_line'),
TypeName = case Data of
{N, _T, As} when is_atom(N) -> % skip records
{N, length(As)}
end,
- Line = gll(erl_lint:modify_line(Attr, Fun), Ref),
{TypeName, Line}.
-gll({Ref, Line}, Ref) ->
- Line;
-gll([], _Ref) ->
- 0;
-gll(List, Ref) when is_list(List) ->
- lists:max([gll(E, Ref) || E <- List]);
-gll(Tuple, Ref) when is_tuple(Tuple) ->
- gll(tuple_to_list(Tuple), Ref);
-gll(_, _) ->
- 0.
-
-get_line(Pos) ->
- {line, Line} = erl_scan:attributes_info(Pos, line),
- Line.
+get_line(Anno) ->
+ erl_anno:line(Anno).
%% Collect all Erlang types. Types in comments (@type) shadow Erlang
%% types (-spec/-opaque).
@@ -348,7 +345,7 @@ d2e({type,_,constraint,[Sub,Ts0]}) ->
Ts = [ST,T] = d2e([ST0,T0]),
#t_def{name = ST, type = typevar_anno(T, Ts)};
_ ->
- throw_error(element(2, Sub), "cannot handle guard", [])
+ throw_error(get_line(element(2, Sub)), "cannot handle guard", [])
end;
d2e({type,_,union,Ts0}) ->
Ts = d2e(Ts0),
diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl
index 82a1b72d84..9e2e41e902 100644
--- a/lib/edoc/src/edoc_tags.erl
+++ b/lib/edoc/src/edoc_tags.erl
@@ -42,7 +42,7 @@
%% Name = atom()
%% Parser = text | xml | (Text,Line,Where) -> term()
%% Flags = [Flag]
-%% Flag = module | function | package | overview | single
+%% Flag = module | function | overview | single
%%
%% Note that the pseudo-tag '@clear' is not listed here.
%% (Cf. the function 'filter_tags'.)
@@ -57,11 +57,11 @@
%% - @category (useless; superseded by keywords or free text search)
tags() ->
- All = [module,footer,function,package,overview],
- [{author, fun parse_contact/4, [module,package,overview]},
- {copyright, text, [module,package,overview,single]},
- {deprecated, xml, [module,function,package,single]},
- {doc, xml, [module,function,package,overview,single]},
+ All = [module,footer,function,overview],
+ [{author, fun parse_contact/4, [module,overview]},
+ {copyright, text, [module,overview,single]},
+ {deprecated, xml, [module,function,single]},
+ {doc, xml, [module,function,overview,single]},
{docfile, fun parse_file/4, All},
{'end', text, All},
{equiv, fun parse_expr/4, [function,single]},
@@ -69,17 +69,17 @@ tags() ->
{hidden, text, [module,function,single]},
{param, fun parse_param/4, [function]},
{private, text, [module,function,single]},
- {reference, xml, [module,footer,package,overview]},
+ {reference, xml, [module,footer,overview]},
{returns, xml, [function,single]},
- {see, fun parse_see/4, [module,function,package,overview]},
- {since, text, [module,function,package,overview,single]},
+ {see, fun parse_see/4, [module,function,overview]},
+ {since, text, [module,function,overview,single]},
{spec, fun parse_spec/4, [function,single]},
{throws, fun parse_throws/4, [function,single]},
{title, text, [overview,single]},
{'TODO', xml, All},
{todo, xml, All},
{type, fun parse_typedef/4, [module,footer,function]},
- {version, text, [module,package,overview,single]}].
+ {version, text, [module,overview,single]}].
aliases('TODO') -> todo;
aliases(return) -> returns;
@@ -342,7 +342,7 @@ parse_typedef(Data, Line, _Env, Where) ->
Def
end.
--type line() :: erl_scan:line().
+-type line() :: erl_anno:line().
-spec parse_file(_, line(), _, _) -> no_return().
@@ -369,7 +369,7 @@ parse_header(Data, Line, Env, Where) when is_list(Where) ->
{string, _, File} ->
Dir = filename:dirname(Where),
Path = Env#env.includes ++ [Dir],
- case edoc_lib:find_file(Path, "", File) of
+ case edoc_lib:find_file(Path, File) of
"" ->
throw_error(Line, {file_not_found, File});
File1 ->
diff --git a/lib/edoc/src/otpsgml_layout.erl b/lib/edoc/src/otpsgml_layout.erl
index 2c4cd919bb..052c75b9d4 100644
--- a/lib/edoc/src/otpsgml_layout.erl
+++ b/lib/edoc/src/otpsgml_layout.erl
@@ -28,7 +28,7 @@
-module(otpsgml_layout).
--export([module/2, package/2, overview/2,type/1]).
+-export([module/2, overview/2,type/1]).
-import(edoc_report, [report/2]).
@@ -811,27 +811,6 @@ xml(Title, CSS, Body) ->
xmerl:export_simple_content(t_utype_elem(E) ++ local_defs(Ds),
?SGML_EXPORT).
-
-package(E=#xmlElement{name = package, content = Es}, Options) ->
- Opts = init_opts(E, Options),
- Name = get_text(packageName, Es),
- Title = io_lib:fwrite("Package ~s", [Name]),
- Desc = get_content(description, Es),
-% ShortDesc = get_content(briefDescription, Desc),
- FullDesc = get_content(fullDescription, Desc),
- Body = ([?NL, {h1, [Title]}, ?NL]
-% ++ ShortDesc
- ++ copyright(Es)
- ++ deprecated(Es, "package")
- ++ version(Es)
- ++ since(Es)
- ++ authors(Es)
- ++ references(Es)
- ++ sees(Es)
- ++ FullDesc),
- XML = xml(Title, stylesheet(Opts), Body),
- xmerl:export_simple([XML], ?SGML_EXPORT, []).
-
overview(E=#xmlElement{name = overview, content = Es}, Options) ->
Opts = init_opts(E, Options),
Title = get_text(title, Es),
@@ -843,6 +822,7 @@ overview(E=#xmlElement{name = overview, content = Es}, Options) ->
++ copyright(Es)
++ version(Es)
++ since(Es)
+ ++ deprecated(Es, "application")
++ authors(Es)
++ references(Es)
++ sees(Es)
diff --git a/lib/edoc/test/edoc_SUITE.erl b/lib/edoc/test/edoc_SUITE.erl
index c63660c8c0..6b23054ce3 100644
--- a/lib/edoc/test/edoc_SUITE.erl
+++ b/lib/edoc/test/edoc_SUITE.erl
@@ -22,12 +22,12 @@
init_per_group/2,end_per_group/2]).
%% Test cases
--export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1]).
+-export([app/1,appup/1,build_std/1,build_map_module/1,otp_12008/1, build_app/1]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [app,appup,build_std,build_map_module,otp_12008].
+ [app,appup,build_std,build_map_module,otp_12008, build_app].
groups() ->
[].
@@ -95,3 +95,20 @@ otp_12008(Config) when is_list(Config) ->
ok = edoc:files([Un2], Opts2),
{'EXIT', error} = (catch edoc:files([Un3], Opts2)),
ok.
+
+build_app(suite) -> [];
+build_app(doc) -> ["Build a local app with nested source directories"];
+build_app(Config) ->
+ DataDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ OutDir = filename:join(PrivDir, "myapp"),
+ Src = filename:join(DataDir, "myapp"),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}, {subpackages, false}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ false = filelib:is_regular(filename:join(OutDir, "b.html")),
+
+ ok = edoc:application(myapp, Src, [{dir, OutDir}]),
+ true = filelib:is_regular(filename:join(OutDir, "a.html")),
+ true = filelib:is_regular(filename:join(OutDir, "b.html")),
+ ok.
diff --git a/lib/common_test/priv/bin/.gitignore b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
index e69de29bb2..e69de29bb2 100644
--- a/lib/common_test/priv/bin/.gitignore
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/doc/.dummy
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
new file mode 100644
index 0000000000..1b5b704551
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/a.erl
@@ -0,0 +1 @@
+-module(a).
diff --git a/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
new file mode 100644
index 0000000000..6d6f15dfe5
--- /dev/null
+++ b/lib/edoc/test/edoc_SUITE_data/myapp/src/src_1/b.erl
@@ -0,0 +1 @@
+-module(b).
diff --git a/lib/edoc/vsn.mk b/lib/edoc/vsn.mk
index 24cfbf16d5..49a73331c6 100644
--- a/lib/edoc/vsn.mk
+++ b/lib/edoc/vsn.mk
@@ -1 +1 @@
-EDOC_VSN = 0.7.16
+EDOC_VSN = 0.7.17
diff --git a/lib/eldap/doc/src/eldap.xml b/lib/eldap/doc/src/eldap.xml
index 718a8afeec..253ba7c2ff 100644
--- a/lib/eldap/doc/src/eldap.xml
+++ b/lib/eldap/doc/src/eldap.xml
@@ -103,23 +103,27 @@ filter() See present/1, substrings/2,
<type>
<v>Handle = handle()</v>
<v>Options = ssl:ssl_options()</v>
- <v>Timeout = inifinity | positive_integer()</v>
+ <v>Timeout = infinity | positive_integer()</v>
</type>
<desc>
<p>Upgrade the connection associated with <c>Handle</c> to a tls connection if possible.</p>
- <p>The upgrade is done in two phases: first the server is asked for permission to upgrade. Second, if the request is acknowledged, the upgrade is performed.</p>
- <p>Error responese from phase one will not affect the current encryption state of the connection. Those responses are:</p>
+ <p>The upgrade is done in two phases: first the server is asked for permission to upgrade. Second, if the request is acknowledged, the upgrade to tls is performed.</p>
+ <p>Error responses from phase one will not affect the current encryption state of the connection. Those responses are:</p>
<taglist>
<tag><c>tls_already_started</c></tag>
<item>The connection is already encrypted. The connection is not affected.</item>
<tag><c>{response,ResponseFromServer}</c></tag>
<item>The upgrade was refused by the LDAP server. The <c>ResponseFromServer</c> is an atom delivered byt the LDAP server explained in section 2.3 of rfc 2830. The connection is not affected, so it is still un-encrypted.</item>
</taglist>
- <p>Errors in the seconde phase will however end the connection:</p>
+ <p>Errors in the second phase will however end the connection:</p>
<taglist>
<tag><c>Error</c></tag>
<item>Any error responded from ssl:connect/3</item>
</taglist>
+ <p>The <c>Timeout</c> parameter is for the actual tls upgrade (phase 2) while the timeout in
+ <seealso marker="#open/2">eldap:open/2</seealso> is used for the initial negotiation about
+ upgrade (phase 1).
+ </p>
</desc>
</func>
<func>
@@ -264,9 +268,9 @@ filter() See present/1, substrings/2,
</type>
<desc>
<p> Modify the DN of an entry. <c>DeleteOldRDN</c> indicates
- whether the current RDN should be removed after operation.
- <c>NewSupDN</c> should be "" if the RDN should not be moved or the new parent which
- the RDN will be moved to.</p>
+ whether the current RDN should be removed from the attribute list after the after operation.
+ <c>NewSupDN</c> is the new parent that the RDN shall be moved to. If the old parent should
+ remain as parent, <c>NewSupDN</c> shall be "".</p>
<pre>
modify_dn(Handle, "cn=Bill Valentine, ou=people, o=Example Org, dc=example, dc=com ",
"cn=Bill Jr Valentine", true, "")
@@ -293,6 +297,10 @@ filter() See present/1, substrings/2,
Filter = eldap:substrings("cn", [{any,"V"}]),
search(Handle, [{base, "dc=example, dc=com"}, {filter, Filter}, {attributes, ["cn"]}]),
</pre>
+ <p>The <c>timeout</c> option in the <c>SearchOptions</c> is for the ldap server, while
+ the timeout in <seealso marker="#open/2">eldap:open/2</seealso> is used for each
+ individual request in the search operation.
+ </p>
</desc>
</func>
@@ -395,7 +403,11 @@ filter() See present/1, substrings/2,
<v>OptionalAttrs = [Attr]</v>
<v>Attr = {matchingRule,string()} | {type,string()} | {dnAttributes,boolean()}</v>
</type>
- <desc> <p>Creates an extensible match filter. For example, <c>eldap:extensibleMatch("Bar",[{type,"sn"},{matchingRule,"caseExactMatch"}]))</c> creates a filter which performs a <c>caseExactMatch</c> on the attribute <c>sn</c> and matches with the value <c>"Bar"</c>. The default value of <c>dnAttributes</c> is <c>false</c>.</p> </desc>
+ <desc> <p>Creates an extensible match filter. For example, </p>
+ <code>
+ eldap:extensibleMatch("Bar", [{type,"sn"}, {matchingRule,"caseExactMatch"}]))
+ </code>
+ <p>creates a filter which performs a <c>caseExactMatch</c> on the attribute <c>sn</c> and matches with the value <c>"Bar"</c>. The default value of <c>dnAttributes</c> is <c>false</c>.</p> </desc>
</func>
<func>
<name>'and'([Filter]) -> filter()</name>
diff --git a/lib/eldap/doc/src/notes.xml b/lib/eldap/doc/src/notes.xml
index f92d100757..e76101c30e 100644
--- a/lib/eldap/doc/src/notes.xml
+++ b/lib/eldap/doc/src/notes.xml
@@ -30,6 +30,71 @@
</header>
<p>This document describes the changes made to the Eldap application.</p>
+<section><title>Eldap 1.1.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Corrects that <c>eldap:close/1</c> returned a tuple
+ instead of the specified atom <c>ok</c>.</p>
+ <p>
+ Own Id: OTP-12349</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Clarification in the reference manual for
+ <c>eldap:modify_dn/5</c>, <c>eldap:search/2</c> and
+ <c>eldap:start_tls/3</c>.</p>
+ <p>
+ Own Id: OTP-12354</p>
+ </item>
+ <item>
+ <p>
+ The eldap test suites are extended and re-organized.</p>
+ <p>
+ Own Id: OTP-12355</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Eldap 1.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed that eldap:open did not use the Timeout parameter
+ when calling ssl:connect. (Thanks Wiesław Bieniek for
+ reporting)</p>
+ <p>
+ Own Id: OTP-12311</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Added the LDAP filter <c>extensibleMatch</c>.</p>
+ <p>
+ Own Id: OTP-12174</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Eldap 1.0.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/eldap/src/eldap.erl b/lib/eldap/src/eldap.erl
index c636e0e0cd..ae47c815c9 100644
--- a/lib/eldap/src/eldap.erl
+++ b/lib/eldap/src/eldap.erl
@@ -125,7 +125,8 @@ getopts(Handle, OptNames) when is_pid(Handle), is_list(OptNames) ->
%%% --------------------------------------------------------------------
close(Handle) when is_pid(Handle) ->
- send(Handle, close).
+ send(Handle, close),
+ ok.
%%% --------------------------------------------------------------------
%%% Set who we should link ourselves to
@@ -412,7 +413,7 @@ parse_args([{port, Port}|T], Cpid, Data) when is_integer(Port) ->
parse_args([{timeout, Timeout}|T], Cpid, Data) when is_integer(Timeout),Timeout>0 ->
parse_args(T, Cpid, Data#eldap{timeout = Timeout});
parse_args([{anon_auth, true}|T], Cpid, Data) ->
- parse_args(T, Cpid, Data#eldap{anon_auth = false});
+ parse_args(T, Cpid, Data#eldap{anon_auth = true});
parse_args([{anon_auth, _}|T], Cpid, Data) ->
parse_args(T, Cpid, Data);
parse_args([{ssl, true}|T], Cpid, Data) ->
diff --git a/lib/eldap/test/Makefile b/lib/eldap/test/Makefile
index 24e71cebaa..28a7a107e1 100644
--- a/lib/eldap/test/Makefile
+++ b/lib/eldap/test/Makefile
@@ -28,8 +28,9 @@ INCLUDES= -I. -I ../include
# ----------------------------------------------------
MODULES= \
- eldap_connections_SUITE \
- eldap_basic_SUITE
+ eldap_basic_SUITE \
+ make_certs
+
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/eldap/test/eldap_basic_SUITE.erl b/lib/eldap/test/eldap_basic_SUITE.erl
index 7f2be54b71..8d754e934c 100644
--- a/lib/eldap/test/eldap_basic_SUITE.erl
+++ b/lib/eldap/test/eldap_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2012-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2012-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -24,323 +24,923 @@
%%-include_lib("common_test/include/ct.hrl").
-include_lib("test_server/include/test_server.hrl").
-include_lib("eldap/include/eldap.hrl").
+-include_lib("eldap/ebin/ELDAPv3.hrl").
+
-define(TIMEOUT, 120000). % 2 min
+all() ->
+ [app,
+ appup,
+ {group, encode_decode},
+ {group, return_values},
+ {group, v4_connections},
+ {group, v6_connections},
+ {group, plain_api},
+ {group, ssl_api},
+ {group, start_tls_api}
+ ].
+
+groups() ->
+ [{encode_decode, [], [encode,
+ decode
+ ]},
+ {plain_api, [], [{group,api}]},
+ {ssl_api, [], [{group,api}, start_tls_on_ssl_should_fail]},
+ {start_tls_api, [], [{group,api}, start_tls_twice_should_fail]},
+
+ {api, [], [{group,api_not_bound},
+ {group,api_bound}]},
+
+ {api_not_bound, [], [elementary_search, search_non_existant,
+ add_when_not_bound,
+ bind]},
+ {api_bound, [], [add_when_bound,
+ add_already_exists,
+ more_add,
+ search_filter_equalityMatch,
+ search_filter_substring_any,
+ search_filter_initial,
+ search_filter_final,
+ search_filter_and,
+ search_filter_or,
+ search_filter_and_not,
+ search_two_hits,
+ modify,
+ delete,
+ modify_dn_delete_old,
+ modify_dn_keep_old]},
+ {v4_connections, [], connection_tests()},
+ {v6_connections, [], connection_tests()},
+ {return_values, [], [open_ret_val_success,
+ open_ret_val_error,
+ close_ret_val]}
+ ].
+
+connection_tests() ->
+ [tcp_connection,
+ tcp_connection_option,
+ ssl_connection,
+ client_side_start_tls_timeout,
+ client_side_bind_timeout,
+ client_side_add_timeout,
+ client_side_search_timeout
+ ].
+
+
+
init_per_suite(Config) ->
- StartSsl = try ssl:start()
- catch
- Error:Reason ->
- {skip, lists:flatten(io_lib:format("eldap init_per_suite failed to start ssl Error=~p Reason=~p", [Error, Reason]))}
- end,
- case StartSsl of
- ok ->
- chk_config(ldap_server, {"localhost",9876},
- chk_config(ldaps_server, {"localhost",9877},
- Config));
- _ ->
- StartSsl
- end.
+ SSL_available = init_ssl_certs_et_al(Config),
+ LDAP_server = find_first_server(false, [{config,eldap_server}, {config,ldap_server}, {"localhost",9876}]),
+ LDAPS_server =
+ case SSL_available of
+ true ->
+ find_first_server(true, [{config,ldaps_server}, {"localhost",9877}]);
+ false ->
+ undefined
+ end,
+ [{ssl_available, SSL_available},
+ {ldap_server, LDAP_server},
+ {ldaps_server, LDAPS_server} | Config].
end_per_suite(_Config) ->
- ok.
-
-init_per_testcase(_TestCase, Config0) ->
- {EldapHost,Port} = proplists:get_value(ldap_server,Config0),
- try
- {ok, Handle} = eldap:open([EldapHost], [{port,Port}]),
- ok = eldap:simple_bind(Handle, "cn=Manager,dc=ericsson,dc=se", "hejsan"),
- {ok, MyHost} = inet:gethostname(),
- Path = "dc="++MyHost++",dc=ericsson,dc=se",
- eldap:add(Handle,"dc=ericsson,dc=se",
- [{"objectclass", ["dcObject", "organization"]},
- {"dc", ["ericsson"]}, {"o", ["Testing"]}]),
- eldap:add(Handle,Path,
- [{"objectclass", ["dcObject", "organization"]},
- {"dc", [MyHost]}, {"o", ["Test machine"]}]),
- [{eldap_path,Path}|Config0]
- catch error:{badmatch,Error} ->
- io:format("Eldap init error ~p~n ~p~n",[Error, erlang:get_stacktrace()]),
- {skip, lists:flatten(io_lib:format("Ldap init failed with host ~p:~p. Error=~p", [EldapHost,Port,Error]))}
+ ssl:stop().
+
+
+init_per_group(return_values, Config) ->
+ case ?config(ldap_server,Config) of
+ undefined ->
+ {skip, "LDAP server not availble"};
+ {Host,Port} ->
+ ct:comment("ldap://~s:~p",[Host,Port]),
+ Config
+ end;
+init_per_group(plain_api, Config0) ->
+ case ?config(ldap_server,Config0) of
+ undefined ->
+ {skip, "LDAP server not availble"};
+ Server = {Host,Port} ->
+ ct:comment("ldap://~s:~p",[Host,Port]),
+ initialize_db([{server,Server}, {ssl_flag,false}, {start_tls,false} | Config0])
+ end;
+init_per_group(ssl_api, Config0) ->
+ case ?config(ldaps_server,Config0) of
+ undefined ->
+ {skip, "LDAPS server not availble"};
+ Server = {Host,Port} ->
+ ct:comment("ldaps://~s:~p",[Host,Port]),
+ initialize_db([{server,Server}, {ssl_flag,true}, {start_tls,false} | Config0])
+ end;
+init_per_group(start_tls_api, Config0) ->
+ case {?config(ldap_server,Config0), ?config(ssl_available,Config0)} of
+ {undefined,true} ->
+ {skip, "LDAP server not availble"};
+ {_,false} ->
+ {skip, "TLS not availble"};
+ {Server={Host,Port}, true} ->
+ ct:comment("ldap://~s:~p + start_tls",[Host,Port]),
+ Config = [{server,Server}, {ssl_flag,false} | Config0],
+ case supported_extension("1.3.6.1.4.1.1466.20037", Config) of
+ true -> initialize_db([{start_tls,true} | Config]);
+ false -> {skip, "start_tls not supported according to the server"}
+ end
+ end;
+init_per_group(v4_connections, Config) ->
+ [{tcp_listen_opts, [{reuseaddr, true}]},
+ {listen_host, "localhost"},
+ {tcp_connect_opts, []}
+ | Config];
+init_per_group(v6_connections, Config) ->
+ {ok, Hostname} = inet:gethostname(),
+ case lists:member(list_to_atom(Hostname), ct:get_config(ipv6_hosts,[])) of
+ true ->
+ [{tcp_listen_opts, [inet6,{reuseaddr, true}]},
+ {listen_host, "::"},
+ {tcp_connect_opts, [{tcpopts,[inet6]}]}
+ | Config];
+ false ->
+ {skip, io_lib:format("~p is not an ipv6_host",[Hostname])}
+ end;
+init_per_group(_, Config) ->
+ Config.
+
+end_per_group(plain_api, Config) -> clear_db(Config);
+end_per_group(ssl_api, Config) -> clear_db(Config);
+end_per_group(start_tls_api, Config) -> clear_db(Config);
+end_per_group(_Group, Config) -> Config.
+
+
+init_per_testcase(ssl_connection, Config) ->
+ case ?config(ssl_available,Config) of
+ true ->
+ SSL_Port = 9999,
+ CertFile = filename:join(?config(data_dir,Config), "certs/server/cert.pem"),
+ KeyFile = filename:join(?config(data_dir,Config), "certs/server/key.pem"),
+
+ Parent = self(),
+ Listener = spawn_link(
+ fun() ->
+ case ssl:listen(SSL_Port, [{certfile, CertFile},
+ {keyfile, KeyFile}
+ | ?config(tcp_listen_opts,Config)
+ ]) of
+ {ok,SSL_LSock} ->
+ Parent ! {ok,self()},
+ (fun L() ->
+ ct:log("ssl server waiting for connections...",[]),
+ {ok, S} = ssl:transport_accept(SSL_LSock),
+ ct:log("ssl:transport_accept/1 ok",[]),
+ ok = ssl:ssl_accept(S),
+ ct:log("ssl:ssl_accept/1 ok",[]),
+ L()
+ end)();
+ Other ->
+ Parent ! {not_ok,Other,self()}
+ end
+ end),
+ receive
+ {ok,Listener} ->
+ ct:log("SSL listening to port ~p (process ~p)",[SSL_Port, Listener]),
+ [{ssl_listener,Listener},
+ {ssl_listen_port,SSL_Port},
+ {ssl_connect_opts,[]}
+ | Config];
+ {no_ok,SSL_Other,Listener} ->
+ ct:log("ssl:listen on port ~p failed: ~p",[SSL_Port,SSL_Other]),
+ {fail, "ssl:listen/2 failed"}
+ after 5000 ->
+ {fail, "Waiting for ssl:listen timeout"}
+ end;
+ false ->
+ {skip, "ssl not available"}
+ end;
+
+init_per_testcase(TC, Config) ->
+ case lists:member(TC,connection_tests()) of
+ true ->
+ case gen_tcp:listen(0, proplists:get_value(tcp_listen_opts,Config)) of
+ {ok,LSock} ->
+ {ok,{_,Port}} = inet:sockname(LSock),
+ [{listen_socket,LSock},
+ {listen_port,Port}
+ | Config];
+ Other ->
+ {fail, Other}
+ end;
+
+ false ->
+ case proplists:get_value(name,?config(tc_group_properties, Config)) of
+ api_not_bound ->
+ {ok,H} = open(Config),
+ [{handle,H} | Config];
+ api_bound ->
+ {ok,H} = open(Config),
+ ok = eldap:simple_bind(H,
+ "cn=Manager,dc=ericsson,dc=se",
+ "hejsan"),
+ [{handle,H} | Config];
+ _Name ->
+ Config
+ end
end.
-end_per_testcase(_TestCase, Config) ->
- {EHost, Port} = proplists:get_value(ldap_server, Config),
- Path = proplists:get_value(eldap_path, Config),
- {ok, H} = eldap:open([EHost], [{port, Port}]),
- ok = eldap:simple_bind(H, "cn=Manager,dc=ericsson,dc=se", "hejsan"),
- case eldap:search(H, [{base, Path},
- {filter, eldap:present("objectclass")},
- {scope, eldap:wholeSubtree()}])
- of
- {ok, {eldap_search_result, Entries, _}} ->
- [ok = eldap:delete(H, Entry) || {eldap_entry, Entry, _} <- Entries];
- _ -> ignore
- end,
-
- ok.
+end_per_testcase(_, Config) ->
+ catch gen_tcp:close( proplists:get_value(listen_socket, Config) ),
+ catch eldap:close( proplists:get_value(handle,Config) ).
-%% suite() ->
-all() ->
- [app,
- appup,
- api,
- ssl_api,
- start_tls,
- tls_operations,
- start_tls_twice,
- start_tls_on_ssl
- ].
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%
+%%% Test cases
+%%%
-app(doc) -> "Test that the eldap app file is ok";
-app(suite) -> [];
+%%%----------------------------------------------------------------
+%%% Test that the eldap app file is ok
app(Config) when is_list(Config) ->
ok = test_server:app_test(eldap).
-%% Test that the eldap appup file is ok
+%%%----------------------------------------------------------------
+%%% Test that the eldap appup file is ok
appup(Config) when is_list(Config) ->
ok = test_server:appup_test(eldap).
-api(doc) -> "Basic test that all api functions works as expected";
-api(suite) -> [];
-api(Config) ->
- {Host,Port} = proplists:get_value(ldap_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}
- ,{log,fun(Lvl,Fmt,Args)-> io:format("~p: ~s",[Lvl,io_lib:format(Fmt,Args)]) end}
- ]),
- %% {ok, H} = eldap:open([Host], [{port,Port+1}, {ssl, true}]),
- do_api_checks(H, Config),
- eldap:close(H),
- ok.
+%%%----------------------------------------------------------------
+open_ret_val_success(Config) ->
+ {Host,Port} = ?config(ldap_server,Config),
+ {ok,H} = eldap:open([Host], [{port,Port}]),
+ catch eldap:close(H).
+
+%%%----------------------------------------------------------------
+open_ret_val_error(_Config) ->
+ {error,_} = eldap:open(["nohost.example.com"], [{port,65535}]).
+
+%%%----------------------------------------------------------------
+close_ret_val(Config) ->
+ {Host,Port} = ?config(ldap_server,Config),
+ {ok,H} = eldap:open([Host], [{port,Port}]),
+ ok = eldap:close(H).
+
+%%%----------------------------------------------------------------
+tcp_connection(Config) ->
+ Host = proplists:get_value(listen_host, Config),
+ Port = proplists:get_value(listen_port, Config),
+ Opts = proplists:get_value(tcp_connect_opts, Config),
+ case eldap:open([Host], [{port,Port}|Opts]) of
+ {ok,_H} ->
+ Sl = proplists:get_value(listen_socket, Config),
+ case gen_tcp:accept(Sl,1000) of
+ {ok,_S} -> ok;
+ {error,timeout} -> ct:fail("server side accept timeout",[]);
+ Other -> ct:fail("gen_tdp:accept failed: ~p",[Other])
+ end;
+ Other -> ct:fail("eldap:open failed: ~p",[Other])
+ end.
+%%%----------------------------------------------------------------
+ssl_connection(Config) ->
+ Host = proplists:get_value(listen_host, Config),
+ Port = proplists:get_value(ssl_listen_port, Config),
+ Opts = proplists:get_value(tcp_connect_opts, Config),
+ SSLOpts = proplists:get_value(ssl_connect_opts, Config),
+ case eldap:open([Host], [{port,Port},
+ {ssl,true},
+ {timeout,5000},
+ {sslopts,SSLOpts}|Opts]) of
+ {ok,_H} -> ok;
+ Other -> ct:fail("eldap:open failed: ~p",[Other])
+ end.
-ssl_api(doc) -> "Basic test that all api functions works as expected";
-ssl_api(suite) -> [];
-ssl_api(Config) ->
- {Host,Port} = proplists:get_value(ldaps_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}, {ssl,true}]),
- do_api_checks(H, Config),
- eldap:close(H),
- ok.
+%%%----------------------------------------------------------------
+client_side_add_timeout(Config) ->
+ client_timeout(
+ fun(H) ->
+ eldap:add(H, "cn=Foo Bar,dc=host,dc=ericsson,dc=se",
+ [{"objectclass", ["person"]},
+ {"cn", ["Foo Bar"]},
+ {"sn", ["Bar"]},
+ {"telephoneNumber", ["555-1232", "555-5432"]}])
+ end, Config).
+
+%%%----------------------------------------------------------------
+client_side_bind_timeout(Config) ->
+ client_timeout(
+ fun(H) ->
+ eldap:simple_bind(H, anon, anon)
+ end, Config).
+
+%%%----------------------------------------------------------------
+client_side_search_timeout(Config) ->
+ client_timeout(
+ fun(H) ->
+ eldap:search(H, [{base,"dc=host,dc=ericsson,dc=se"},
+ {filter, eldap:present("objectclass")},
+ {scope, eldap:wholeSubtree()}])
+ end, Config).
+
+%%%----------------------------------------------------------------
+client_side_start_tls_timeout(Config) ->
+ client_timeout(
+ fun(H) ->
+ eldap:start_tls(H, [])
+ end, Config).
+
+%%%----------------------------------------------------------------
+tcp_connection_option(Config) ->
+ Host = proplists:get_value(listen_host, Config),
+ Port = proplists:get_value(listen_port, Config),
+ Opts = proplists:get_value(tcp_connect_opts, Config),
+ Sl = proplists:get_value(listen_socket, Config),
+
+ %% Make an option value to test. The option must be implemented on all
+ %% platforms that we test on. Must check what the default value is
+ %% so we don't happen to choose that particular value.
+ {ok,[{linger,DefaultLinger}]} = inet:getopts(Sl, [linger]),
+ TestLinger = case DefaultLinger of
+ {false,_} -> {true,5};
+ {true,_} -> {false,0}
+ end,
+
+ case catch eldap:open([Host],
+ [{port,Port},{tcpopts,[{linger,TestLinger}]}|Opts]) of
+ {ok,H} ->
+ case gen_tcp:accept(Sl,1000) of
+ {ok,_} ->
+ case eldap:getopts(H, [{tcpopts,[linger]}]) of
+ {ok,[{tcpopts,[{linger,ActualLinger}]}]} ->
+ case ActualLinger of
+ TestLinger ->
+ ok;
+ DefaultLinger ->
+ ct:fail("eldap:getopts: 'linger' didn't change,"
+ " got ~p (=default) expected ~p",
+ [ActualLinger,TestLinger]);
+ _ ->
+ ct:fail("eldap:getopts: bad 'linger', got ~p expected ~p",
+ [ActualLinger,TestLinger])
+ end;
+ Other ->
+ ct:fail("eldap:getopts: bad result ~p",[Other])
+ end;
+ {error,timeout} ->
+ ct:fail("server side accept timeout",[])
+ end;
+
+ Other ->
+ ct:fail("eldap:open failed: ~p",[Other])
+ end.
-start_tls(doc) -> "Test that an existing (tcp) connection can be upgraded to tls";
-start_tls(suite) -> [];
-start_tls(Config) ->
- {Host,Port} = proplists:get_value(ldap_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}]),
- ok = eldap:start_tls(H, [
- {keyfile, filename:join([proplists:get_value(data_dir,Config),
- "certs/client/key.pem"])}
- ]),
- eldap:close(H).
+%%%----------------------------------------------------------------
+%%% Basic test that all api functions works as expected
+
+%%%----------------------------------------------------------------
+elementary_search(Config) ->
+ {ok, #eldap_search_result{entries=[_]}} =
+ eldap:search(?config(handle,Config),
+ #eldap_search{base = ?config(eldap_path, Config),
+ filter= eldap:present("objectclass"),
+ scope = eldap:wholeSubtree()}).
+
+%%%----------------------------------------------------------------
+search_non_existant(Config) ->
+ {error, noSuchObject} =
+ eldap:search(?config(handle,Config),
+ #eldap_search{base = "cn=Bar," ++ ?config(eldap_path, Config),
+ filter= eldap:present("objectclass"),
+ scope = eldap:wholeSubtree()}).
+
+%%%----------------------------------------------------------------
+add_when_not_bound(Config) ->
+ {error, _} = eldap:add(?config(handle,Config),
+ "cn=Jonas Jonsson," ++ ?config(eldap_path, Config),
+ [{"objectclass", ["person"]},
+ {"cn", ["Jonas Jonsson"]},
+ {"sn", ["Jonsson"]}]).
+
+%%%----------------------------------------------------------------
+bind(Config) ->
+ ok = eldap:simple_bind(?config(handle,Config),
+ "cn=Manager,dc=ericsson,dc=se",
+ "hejsan").
+
+%%%----------------------------------------------------------------
+add_when_bound(Config) ->
+ ok = eldap:add(?config(handle, Config),
+ "cn=Jonas Jonsson," ++ ?config(eldap_path, Config),
+ [{"objectclass", ["person"]},
+ {"cn", ["Jonas Jonsson"]},
+ {"sn", ["Jonsson"]}]).
+
+%%%----------------------------------------------------------------
+add_already_exists(Config) ->
+ {error, entryAlreadyExists} =
+ eldap:add(?config(handle, Config),
+ "cn=Jonas Jonsson," ++ ?config(eldap_path, Config),
+ [{"objectclass", ["person"]},
+ {"cn", ["Jonas Jonsson"]},
+ {"sn", ["Jonsson"]}]).
+
+%%%----------------------------------------------------------------
+more_add(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ ok = eldap:add(H, "cn=Foo Bar," ++ BasePath,
+ [{"objectclass", ["person"]},
+ {"cn", ["Foo Bar"]},
+ {"sn", ["Bar"]},
+ {"telephoneNumber", ["555-1232", "555-5432"]}]),
+ ok = eldap:add(H, "ou=Team," ++ BasePath,
+ [{"objectclass", ["organizationalUnit"]},
+ {"ou", ["Team"]}]).
-tls_operations(doc) -> "Test that an upgraded connection is usable for ldap stuff";
-tls_operations(suite) -> [];
-tls_operations(Config) ->
- {Host,Port} = proplists:get_value(ldap_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}]),
- ok = eldap:start_tls(H, [
- {keyfile, filename:join([proplists:get_value(data_dir,Config),
- "certs/client/key.pem"])}
- ]),
- do_api_checks(H, Config),
+%%%----------------------------------------------------------------
+search_filter_equalityMatch(Config) ->
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDN = "cn=Jonas Jonsson," ++ BasePath,
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=ExpectedDN}]}} =
+ eldap:search(?config(handle, Config),
+ #eldap_search{base = BasePath,
+ filter = eldap:equalityMatch("sn", "Jonsson"),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_filter_substring_any(Config) ->
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDN = "cn=Jonas Jonsson," ++ BasePath,
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=ExpectedDN}]}} =
+ eldap:search(?config(handle, Config),
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{any, "ss"}]),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_filter_initial(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDN = "cn=Foo Bar," ++ BasePath,
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=ExpectedDN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{initial, "B"}]),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_filter_final(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDN = "cn=Foo Bar," ++ BasePath,
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=ExpectedDN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{final, "r"}]),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_filter_and(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDN = "cn=Foo Bar," ++ BasePath,
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=ExpectedDN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:'and'([eldap:substrings("sn", [{any, "a"}]),
+ eldap:equalityMatch("cn","Foo Bar")]),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_filter_or(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ ExpectedDNs = lists:sort(["cn=Foo Bar," ++ BasePath,
+ "ou=Team," ++ BasePath]),
+ {ok, #eldap_search_result{entries=Es}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:'or'([eldap:substrings("sn", [{any, "a"}]),
+ eldap:equalityMatch("ou","Team")]),
+ scope=eldap:singleLevel()}),
+ ExpectedDNs = lists:sort([DN || #eldap_entry{object_name=DN} <- Es]).
+
+%%%----------------------------------------------------------------
+search_filter_and_not(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ {ok, #eldap_search_result{entries=[]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:'and'([eldap:substrings("sn", [{any, "a"}]),
+ eldap:'not'(
+ eldap:equalityMatch("cn","Foo Bar")
+ )]),
+ scope=eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+search_two_hits(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ DN1 = "cn=Santa Claus," ++ BasePath,
+ DN2 = "cn=Jultomten," ++ BasePath,
+ %% Add two objects:
+ ok = eldap:add(H, DN1,
+ [{"objectclass", ["person"]},
+ {"cn", ["Santa Claus"]},
+ {"sn", ["Santa"]},
+ {"description", ["USA"]}]),
+ ok = eldap:add(H, DN2,
+ [{"objectclass", ["person"]},
+ {"cn", ["Jultomten"]},
+ {"sn", ["Tomten"]},
+ {"description", ["Sweden"]}]),
+
+ %% Search for them:
+ {ok, #eldap_search_result{entries=Es}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:present("description"),
+ scope=eldap:singleLevel()}),
+
+ %% And check that they are the expected ones:
+ ExpectedDNs = lists:sort([DN1, DN2]),
+ ExpectedDNs = lists:sort([D || #eldap_entry{object_name=D} <- Es]),
+
+ %% Restore the database:
+ [ok=eldap:delete(H,DN) || DN <- ExpectedDNs].
+
+%%%----------------------------------------------------------------
+modify(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ %% The object to modify
+ DN = "cn=Foo Bar," ++ BasePath,
+
+ %% Save a copy to restore later:
+ {ok,OriginalAttrs} = attributes(H, DN),
+
+ %% Do a change
+ Mod = [eldap:mod_replace("telephoneNumber", ["555-12345"]),
+ eldap:mod_add("description", ["Nice guy"])],
+ ok = eldap:modify(H, DN, Mod),
+
+ %% Check that the object was changed
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=DN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:equalityMatch("telephoneNumber", "555-12345"),
+ scope=eldap:singleLevel()}),
+
+ %% Do another type of change
+ ok = eldap:modify(H, DN, [eldap:mod_delete("telephoneNumber", [])]),
+ %% and check that it worked by repeating the test above
+ {ok, #eldap_search_result{entries=[]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:equalityMatch("telephoneNumber", "555-12345"),
+ scope=eldap:singleLevel()}),
+ %% restore the orignal version:
+ restore_original_object(H, DN, OriginalAttrs).
+
+%%%----------------------------------------------------------------
+delete(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ %% The element to play with:
+ DN = "cn=Jonas Jonsson," ++ BasePath,
+
+ %% Prove that the element is present before deletion
+ {ok,OriginalAttrs} = attributes(H, DN),
+
+ %% Do what the test has to do:
+ ok = eldap:delete(H, DN),
+ %% check that it really was deleted:
+ {error, noSuchObject} = eldap:delete(H, DN),
+
+ %% And restore the object for subsequent tests
+ restore_original_object(H, DN, OriginalAttrs).
+
+%%%----------------------------------------------------------------
+modify_dn_delete_old(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ OrigCN = "Foo Bar",
+ OriginalRDN = "cn="++OrigCN,
+ DN = OriginalRDN ++ "," ++ BasePath,
+ NewCN = "Niclas Andre",
+ NewRDN = "cn="++NewCN,
+ NewDN = NewRDN ++ "," ++BasePath,
+
+ %% Check that the object to modify_dn of exists:
+ {ok,OriginalAttrs} = attributes(H, DN),
+ CN_orig = lists:sort(proplists:get_value("cn",OriginalAttrs)),
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=DN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{any, "a"}]),
+ scope = eldap:singleLevel()}),
+
+ %% Modify and delete the old one:
+ ok = eldap:modify_dn(H, DN, NewRDN, true, ""),
+
+ %% Check that DN was modified and the old one was deleted:
+ {ok,NewAttrs} = attributes(H, NewDN),
+ CN_new = lists:sort(proplists:get_value("cn",NewAttrs)),
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=NewDN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{any, "a"}]),
+ scope = eldap:singleLevel()}),
+ %% What we expect:
+ CN_new = lists:sort([NewCN | CN_orig -- [OrigCN]]),
+
+ %% Change back:
+ ok = eldap:modify_dn(H, NewDN, OriginalRDN, true, ""),
+
+ %% Check that DN was modified and the new one was deleted:
+ {ok,SameAsOriginalAttrs} = attributes(H, DN),
+ CN_orig = lists:sort(proplists:get_value("cn",SameAsOriginalAttrs)),
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=DN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{any, "a"}]),
+ scope = eldap:singleLevel()}).
+
+%%%----------------------------------------------------------------
+modify_dn_keep_old(Config) ->
+ H = ?config(handle, Config),
+ BasePath = ?config(eldap_path, Config),
+ OriginalRDN = "cn=Foo Bar",
+ DN = OriginalRDN ++ "," ++ BasePath,
+ NewCN = "Niclas Andre",
+ NewRDN = "cn="++NewCN,
+ NewDN = NewRDN ++ "," ++BasePath,
+
+ %% Check that the object to modify_dn of exists but the new one does not:
+ {ok,OriginalAttrs} = attributes(H, DN),
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=DN}]}} =
+ eldap:search(H,
+ #eldap_search{base = BasePath,
+ filter = eldap:substrings("sn", [{any, "a"}]),
+ scope = eldap:singleLevel()}),
+
+ %% Modify but keep the old "cn" attr:
+ ok = eldap:modify_dn(H, DN, NewRDN, false, ""),
+
+ %% Check that DN was modified and the old CN entry is not deleted:
+ {ok,NewAttrs} = attributes(H, NewDN),
+ CN_orig = proplists:get_value("cn",OriginalAttrs),
+ CN_new = proplists:get_value("cn",NewAttrs),
+ Expected = lists:sort([NewCN|CN_orig]),
+ Expected = lists:sort(CN_new),
+
+ %% Restore db:
+ ok = eldap:delete(H, NewDN),
+ restore_original_object(H, DN, OriginalAttrs).
+
+%%%----------------------------------------------------------------
+%%% Test that start_tls on an already upgraded connection makes no noise
+start_tls_twice_should_fail(Config) ->
+ {ok,H} = open_bind(Config),
+ {error,tls_already_started} = eldap:start_tls(H, []),
eldap:close(H).
-start_tls_twice(doc) -> "Test that start_tls on an already upgraded connection fails";
-start_tls_twice(suite) -> [];
-start_tls_twice(Config) ->
- {Host,Port} = proplists:get_value(ldap_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}]),
- ok = eldap:start_tls(H, []),
+%%%----------------------------------------------------------------
+%%% Test that start_tls on an ldaps connection fails
+start_tls_on_ssl_should_fail(Config) ->
+ {ok,H} = open_bind(Config),
{error,tls_already_started} = eldap:start_tls(H, []),
- do_api_checks(H, Config),
eldap:close(H).
+%%%----------------------------------------------------------------
+encode(_Config) ->
+ {ok,Bin} = 'ELDAPv3':encode('AddRequest', #'AddRequest'{entry="hejHopp" ,attributes=[]} ),
+ Expected = <<104,11,4,7,104,101,106,72,111,112,112,48,0>>,
+ case Bin of
+ Expected -> ok;
+ _ -> ct:log("Encoded erroneously to:~n~p~nExpected:~n~p",[Bin,Expected]),
+ {fail, "Bad encode"}
+ end.
+
+%%%----------------------------------------------------------------
+decode(_Config) ->
+ {ok,Res} = 'ELDAPv3':decode('AddRequest', <<104,11,4,7,104,101,106,72,111,112,112,48,0>>),
+ ct:log("Res = ~p", [Res]),
+ Expected = #'AddRequest'{entry = "hejHopp",attributes = []},
+ case Res of
+ Expected -> ok;
+ #'AddRequest'{entry= <<"hejHopp">>, attributes=[]} ->
+ {fail, "decoded to (correct) binary!!"};
+ _ ->
+ {fail, "Bad decode"}
+ end.
+
-start_tls_on_ssl(doc) -> "Test that start_tls on an ldaps connection fails";
-start_tls_on_ssl(suite) -> [];
-start_tls_on_ssl(Config) ->
- {Host,Port} = proplists:get_value(ldaps_server, Config),
- {ok, H} = eldap:open([Host], [{port,Port}, {ssl,true}]),
- {error,tls_already_started} = eldap:start_tls(H, []),
- do_api_checks(H, Config),
- eldap:close(H).
+%%%****************************************************************
+%%% Private
-%%%--------------------------------------------------------------------------------
-chk_config(Key, Default, Config) ->
- case catch ct:get_config(ldap_server, undefined) of
- undefined -> [{Key,Default} | Config ];
- {'EXIT',_} -> [{Key,Default} | Config ];
- Value -> [{Key,Value} | Config]
+attributes(H, DN) ->
+ case eldap:search(H,
+ #eldap_search{base = DN,
+ filter= eldap:present("objectclass"),
+ scope = eldap:wholeSubtree()}) of
+ {ok, #eldap_search_result{entries=[#eldap_entry{object_name=DN,
+ attributes=OriginalAttrs}]}} ->
+ {ok, OriginalAttrs};
+ Other ->
+ Other
end.
+restore_original_object(H, DN, Attrs) ->
+ eldap:delete(H, DN),
+ ok = eldap:add(H, DN, Attrs).
+
+
+find_first_server(UseSSL, [{config,Key}|Ss]) ->
+ case ct:get_config(Key) of
+ {Host,Port} ->
+ ct:log("find_first_server config ~p -> ~p",[Key,{Host,Port}]),
+ find_first_server(UseSSL, [{Host,Port}|Ss]);
+ undefined ->
+ ct:log("find_first_server config ~p is undefined",[Key]),
+ find_first_server(UseSSL, Ss)
+ end;
+find_first_server(UseSSL, [{Host,Port}|Ss]) ->
+ case eldap:open([Host],[{port,Port},{ssl,UseSSL}]) of
+ {ok,H} when UseSSL==false, Ss=/=[] ->
+ case eldap:start_tls(H,[]) of
+ ok ->
+ ct:log("find_first_server ~p UseSSL=~p -> ok",[{Host,Port},UseSSL]),
+ eldap:close(H),
+ {Host,Port};
+ Res ->
+ ct:log("find_first_server ~p UseSSL=~p failed with~n~p~nSave as spare host.",[{Host,Port},UseSSL,Res]),
+ eldap:close(H),
+ find_first_server(UseSSL, Ss++[{spare_host,Host,Port}])
+ end;
+ {ok,H} ->
+ ct:log("find_first_server ~p UseSSL=~p -> ok",[{Host,Port},UseSSL]),
+ eldap:close(H),
+ {Host,Port};
+ Res ->
+ ct:log("find_first_server ~p UseSSL=~p failed with~n~p",[{Host,Port},UseSSL,Res]),
+ find_first_server(UseSSL, Ss)
+ end;
+find_first_server(false, [{spare_host,Host,Port}|_]) ->
+ ct:log("find_first_server can't find start_tls host, use the spare non-start_tls host for plain ldap: ~p",[{Host,Port}]),
+ {Host,Port};
+find_first_server(_, []) ->
+ ct:log("find_first_server, nothing left to try",[]),
+ undefined.
+
+initialize_db(Config) ->
+ case {open_bind(Config), inet:gethostname()} of
+ {{ok,H}, {ok,MyHost}} ->
+ Path = "dc="++MyHost++",dc=ericsson,dc=se",
+ delete_old_contents(H, Path),
+ add_new_contents(H, Path, MyHost),
+ eldap:close(H),
+ [{eldap_path,Path}|Config];
+ Other ->
+ ct:fail("initialize_db failed: ~p",[Other])
+ end.
+clear_db(Config) ->
+ {ok,H} = open_bind(Config),
+ Path = ?config(eldap_path, Config),
+ delete_old_contents(H, Path),
+ eldap:close(H),
+ Config.
-do_api_checks(H, Config) ->
- BasePath = proplists:get_value(eldap_path, Config),
+delete_old_contents(H, Path) ->
+ case eldap:search(H, [{base, Path},
+ {filter, eldap:present("objectclass")},
+ {scope, eldap:wholeSubtree()}])
+ of
+ {ok, #eldap_search_result{entries=Entries}} ->
+ [ok = eldap:delete(H,DN) || #eldap_entry{object_name=DN} <- Entries];
+ _Res ->
+ ignore
+ end.
- All = fun(Where) ->
- eldap:search(H, #eldap_search{base=Where,
- filter=eldap:present("objectclass"),
- scope= eldap:wholeSubtree()})
- end,
- {ok, #eldap_search_result{entries=[_XYZ]}} = All(BasePath),
-%% ct:log("XYZ=~p",[_XYZ]),
- {error, noSuchObject} = All("cn=Bar,"++BasePath),
+add_new_contents(H, Path, MyHost) ->
+ ok(eldap:add(H,"dc=ericsson,dc=se",
+ [{"objectclass", ["dcObject", "organization"]},
+ {"dc", ["ericsson"]},
+ {"o", ["Testing"]}])),
+ ok(eldap:add(H,Path,
+ [{"objectclass", ["dcObject", "organization"]},
+ {"dc", [MyHost]},
+ {"o", ["Test machine"]}])).
- {error, _} = eldap:add(H, "cn=Jonas Jonsson," ++ BasePath,
- [{"objectclass", ["person"]},
- {"cn", ["Jonas Jonsson"]}, {"sn", ["Jonsson"]}]),
- eldap:simple_bind(H, "cn=Manager,dc=ericsson,dc=se", "hejsan"),
- chk_add(H, BasePath),
- {ok,FB} = chk_search(H, BasePath),
- chk_modify(H, FB),
- chk_modify_password(H, FB),
- chk_delete(H, BasePath),
- chk_modify_dn(H, FB).
+ok({error,entryAlreadyExists}) -> ok;
+ok(X) -> ok=X.
-chk_add(H, BasePath) ->
- ok = eldap:add(H, "cn=Jonas Jonsson," ++ BasePath,
- [{"objectclass", ["person"]},
- {"cn", ["Jonas Jonsson"]}, {"sn", ["Jonsson"]}]),
- {error, entryAlreadyExists} = eldap:add(H, "cn=Jonas Jonsson," ++ BasePath,
- [{"objectclass", ["person"]},
- {"cn", ["Jonas Jonsson"]}, {"sn", ["Jonsson"]}]),
- ok = eldap:add(H, "cn=Foo Bar," ++ BasePath,
- [{"objectclass", ["person"]},
- {"cn", ["Foo Bar"]}, {"sn", ["Bar"]}, {"telephoneNumber", ["555-1232", "555-5432"]}]),
- ok = eldap:add(H, "ou=Team," ++ BasePath,
- [{"objectclass", ["organizationalUnit"]},
- {"ou", ["Team"]}]).
-chk_search(H, BasePath) ->
- Search = fun(Filter) ->
- eldap:search(H, #eldap_search{base=BasePath,
- filter=Filter,
- scope=eldap:singleLevel()})
- end,
- JJSR = {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(eldap:equalityMatch("sn", "Jonsson")),
- JJSR = Search(eldap:substrings("sn", [{any, "ss"}])),
- FBSR = {ok, #eldap_search_result{entries=[#eldap_entry{object_name=FB}]}} =
- Search(eldap:substrings("sn", [{any, "a"}])),
- FBSR = Search(eldap:substrings("sn", [{initial, "B"}])),
- FBSR = Search(eldap:substrings("sn", [{final, "r"}])),
- F_AND = eldap:'and'([eldap:present("objectclass"), eldap:present("ou")]),
- {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(F_AND),
- F_NOT = eldap:'and'([eldap:present("objectclass"), eldap:'not'(eldap:present("ou"))]),
- {ok, #eldap_search_result{entries=[#eldap_entry{}, #eldap_entry{}]}} = Search(F_NOT),
- {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(eldap:extensibleMatch("Bar",[{type,"sn"},{matchingRule,"caseExactMatch"}])),
- {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(eldap:extensibleMatch("Bar",[{type,"sn"},{matchingRule,"2.5.13.5"}])),
- {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(eldap:extensibleMatch("Bar",[{type,"sn"},{matchingRule,"caseIgnoreMatch"}])),
- {ok, #eldap_search_result{entries=[#eldap_entry{}]}} = Search(eldap:extensibleMatch("bar",[{type,"sn"},{matchingRule,"caseIgnoreMatch"}])),
- {ok, #eldap_search_result{entries=[]}} = Search(eldap:extensibleMatch("bar",[{type,"sn"},{matchingRule,"gluffgluff"}])),
- {ok, #eldap_search_result{entries=[]}} = Search(eldap:extensibleMatch("bar",[{type,"sn"},{matchingRule,"caseExactMatch"}])),
- {ok,FB}. %% FIXME
-
-chk_modify(H, FB) ->
- Mod = [eldap:mod_replace("telephoneNumber", ["555-12345"]),
- eldap:mod_add("description", ["Nice guy"])],
- %% io:format("MOD ~p ~p ~n",[FB, Mod]),
- ok = eldap:modify(H, FB, Mod),
- %% DELETE ATTR
- ok = eldap:modify(H, FB, [eldap:mod_delete("telephoneNumber", [])]).
+cond_start_tls(H, Config) ->
+ case ?config(start_tls,Config) of
+ true -> start_tls(H,Config);
+ _ -> Config
+ end.
-chk_modify_password(H, FB) ->
- %% Change password, and ensure we can bind with it.
- ok = eldap:simple_bind(H, "cn=Manager,dc=ericsson,dc=se", "hejsan"),
- ok = eldap:modify_password(H, FB, "example"),
- ok = eldap:simple_bind(H, FB, "example"),
- %% Change password to a server generated value.
+start_tls(H, Config) ->
+ KeyFile = filename:join([?config(data_dir,Config),
+ "certs/client/key.pem"
+ ]),
+ case eldap:start_tls(H, [{keyfile, KeyFile}]) of
+ ok ->
+ [{start_tls_success,true} | Config];
+ Error ->
+ ct:log("Start_tls on ~p failed: ~p",[?config(url,Config) ,Error]),
+ ct:fail("start_tls failed")
+ end.
+
+
+%%%----------------------------------------------------------------
+open_bind(Config) ->
+ {ok,H} = open(Config),
ok = eldap:simple_bind(H, "cn=Manager,dc=ericsson,dc=se", "hejsan"),
- {ok, Passwd} = eldap:modify_password(H, FB, []),
- ok = eldap:simple_bind(H, FB, Passwd),
- %% Change own password to server generated value.
- {ok, NewPasswd} = eldap:modify_password(H, [], [], Passwd),
- ok = eldap:simple_bind(H, FB, NewPasswd),
- %% Change own password to explicit value.
- ok = eldap:modify_password(H, [], "example", NewPasswd),
- ok = eldap:simple_bind(H, FB, "example"),
- %% Restore original binding.
- ok = eldap:simple_bind(H, "cn=Manager,dc=ericsson,dc=se", "hejsan").
-
-chk_delete(H, BasePath) ->
- {error, entryAlreadyExists} = eldap:add(H, "cn=Jonas Jonsson," ++ BasePath,
- [{"objectclass", ["person"]},
- {"cn", ["Jonas Jonsson"]}, {"sn", ["Jonsson"]}]),
- ok = eldap:delete(H, "cn=Jonas Jonsson," ++ BasePath),
- {error, noSuchObject} = eldap:delete(H, "cn=Jonas Jonsson," ++ BasePath).
-
-chk_modify_dn(H, FB) ->
- ok = eldap:modify_dn(H, FB, "cn=Niclas Andre", true, "").
- %%io:format("Res ~p~n ~p~n",[R, All(BasePath)]).
-
-
-%%%----------------
-add(H, Attr, Value, Path0, Attrs, Class) ->
- Path = case Path0 of
- [] -> Attr ++ "=" ++ Value;
- _ -> Attr ++ "=" ++ Value ++ "," ++ Path0
- end,
- case eldap:add(H, Path, [{"objectclass", Class}, {Attr, [Value]}] ++ Attrs)
- of
- ok -> {ok, Path};
- {error, E = entryAlreadyExists} -> {E, Path};
- R = {error, Reason} ->
- io:format("~p:~p: ~s,~s =>~n ~p~n",
- [?MODULE,?LINE, Attr, Value, R]),
- exit({ldap, add, Reason})
+ {ok,H}.
+
+open(Config) ->
+ {Host,Port} = ?config(server,Config),
+ SSLflag = ?config(ssl_flag,Config),
+ {ok,H} = eldap:open([Host], [{port,Port},{ssl,SSLflag}]),
+ cond_start_tls(H, Config),
+ {ok,H}.
+
+%%%----------------------------------------------------------------
+supported_extension(OID, Config) ->
+ {ok,H} = open_bind(Config),
+ case eldap:search(H, [{scope, eldap:baseObject()},
+ {filter, eldap:present("objectclass")},
+ {deref, eldap:neverDerefAliases()},
+ {attributes, ["+"]}]) of
+ {ok,R=#eldap_search_result{}} ->
+ eldap:close(H),
+ lists:member(OID,
+ [SE || EE <- R#eldap_search_result.entries,
+ {"supportedExtension",SEs} <- EE#eldap_entry.attributes,
+ SE<-SEs]);
+ _ ->
+ eldap:close(H),
+ false
end.
+%%%----------------------------------------------------------------
+client_timeout(Fun, Config) ->
+ Host = proplists:get_value(listen_host, Config),
+ Port = proplists:get_value(listen_port, Config),
+ Opts = proplists:get_value(tcp_connect_opts, Config),
+ T = 1000,
+ case eldap:open([Host], [{timeout,T},{port,Port}|Opts]) of
+ {ok,H} ->
+ T0 = erlang:monotonic_time(),
+ {error,{gen_tcp_error,timeout}} = Fun(H),
+ T_op = ms_passed(T0),
+ ct:log("Time = ~p, Timeout spec = ~p",[T_op,T]),
+ if
+ T_op < T ->
+ {fail, "Timeout too early"};
+ true ->
+ ok
+ end;
+
+ Other -> ct:fail("eldap:open failed: ~p",[Other])
+ end.
+%% Help function, elapsed milliseconds since T0
+ms_passed(T0) ->
+ %% OTP 18
+ erlang:convert_time_unit(erlang:monotonic_time() - T0,
+ native,
+ micro_seconds) / 1000.
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Develop
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-test() ->
- run().
-
-run() ->
- Cases = all(),
- run(Cases).
-
-run(Case) when is_atom(Case) ->
- run([Case]);
-run(Cases) when is_list(Cases) ->
- Run = fun(Test, Config0) ->
- Config = init_per_testcase(Test, Config0),
- try
- io:format("~nTest ~p ... ",[Test]),
- ?MODULE:Test(Config),
- end_per_testcase(Test, Config),
- io:format("ok~n",[])
- catch _:Reason ->
- io:format("~n FAIL (~p): ~p~n ~p~n",
- [Test, Reason, erlang:get_stacktrace()])
- end
- end,
- process_flag(trap_exit, true),
- Pid = spawn_link(fun() ->
- case init_per_suite([]) of
- {skip, Reason} -> io:format("Skip ~s~n",[Reason]);
- Config ->
- try
- [Run(Test, Config) || Test <- Cases]
- catch _:Err ->
- io:format("Error ~p in ~p~n",[Err, erlang:get_stacktrace()])
- end,
- end_per_suite(Config)
- end
- end),
- receive
- {'EXIT', Pid, normal} -> ok;
- Msg -> io:format("Received ~p (~p)~n",[Msg, Pid])
- after 100 -> ok end,
- process_flag(trap_exit, false),
- ok.
+%%%----------------------------------------------------------------
+init_ssl_certs_et_al(Config) ->
+ try ssl:start()
+ of
+ R when R==ok ; R=={error,{already_started,ssl}} ->
+ try make_certs:all("/dev/null",
+ filename:join(?config(data_dir,Config), "certs"))
+ of
+ {ok,_} -> true;
+ Other ->
+ ct:comment("make_certs failed"),
+ ct:log("make_certs failed ~p", [Other]),
+ false
+ catch
+ C:E ->
+ ct:comment("make_certs crashed"),
+ ct:log("make_certs failed ~p:~p", [C,E]),
+ false
+ end;
+ _ ->
+ false
+ catch
+ Error:Reason ->
+ ct:comment("ssl failed to start"),
+ ct:log("init_per_suite failed to start ssl Error=~p Reason=~p", [Error, Reason]),
+ false
+ end.
diff --git a/lib/eldap/test/eldap_basic_SUITE_data/RAND b/lib/eldap/test/eldap_basic_SUITE_data/RAND
new file mode 100644
index 0000000000..70997bd01f
--- /dev/null
+++ b/lib/eldap/test/eldap_basic_SUITE_data/RAND
Binary files differ
diff --git a/lib/eldap/test/eldap_connections_SUITE.erl b/lib/eldap/test/eldap_connections_SUITE.erl
deleted file mode 100644
index c5460fef09..0000000000
--- a/lib/eldap/test/eldap_connections_SUITE.erl
+++ /dev/null
@@ -1,147 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2012-2014. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(eldap_connections_SUITE).
-
--compile(export_all).
-
--include_lib("common_test/include/ct.hrl").
-%-include_lib("eldap/include/eldap.hrl").
-
-
-all() ->
- [
- {group, v4},
- {group, v6}
- ].
-
-
-init_per_group(v4, Config) ->
- [{listen_opts, []},
- {listen_host, "localhost"},
- {connect_opts, []}
- | Config];
-init_per_group(v6, Config) ->
- {ok, Hostname} = inet:gethostname(),
- case lists:member(list_to_atom(Hostname), ct:get_config(ipv6_hosts,[])) of
- true ->
- [{listen_opts, [inet6]},
- {listen_host, "::"},
- {connect_opts, [{tcpopts,[inet6]}]}
- | Config];
- false ->
- {skip, io_lib:format("~p is not an ipv6_host",[Hostname])}
- end.
-
-
-end_per_group(_GroupName, Config) ->
- Config.
-
-
-groups() ->
- [{v4, [], [tcp_connection, tcp_connection_option]},
- {v6, [], [tcp_connection, tcp_connection_option]}
- ].
-
-
-init_per_suite(Config) -> Config.
-
-
-end_per_suite(_Config) -> ok.
-
-
-init_per_testcase(_TestCase, Config) ->
- case gen_tcp:listen(0, proplists:get_value(listen_opts,Config)) of
- {ok,LSock} ->
- {ok,{_,Port}} = inet:sockname(LSock),
- [{listen_socket,LSock},
- {listen_port,Port}
- | Config];
- Other ->
- {fail, Other}
- end.
-
-
-end_per_testcase(_TestCase, Config) ->
- catch gen_tcp:close( proplists:get_value(listen_socket, Config) ).
-
-%%%================================================================
-%%%
-%%% Test cases
-%%%
-%%%----------------------------------------------------------------
-tcp_connection(Config) ->
- Host = proplists:get_value(listen_host, Config),
- Port = proplists:get_value(listen_port, Config),
- Opts = proplists:get_value(connect_opts, Config),
- case eldap:open([Host], [{port,Port}|Opts]) of
- {ok,_H} ->
- Sl = proplists:get_value(listen_socket, Config),
- case gen_tcp:accept(Sl,1000) of
- {ok,_S} -> ok;
- {error,timeout} -> ct:fail("server side accept timeout",[])
- end;
- Other -> ct:fail("eldap:open failed: ~p",[Other])
- end.
-
-
-%%%----------------------------------------------------------------
-tcp_connection_option(Config) ->
- Host = proplists:get_value(listen_host, Config),
- Port = proplists:get_value(listen_port, Config),
- Opts = proplists:get_value(connect_opts, Config),
- Sl = proplists:get_value(listen_socket, Config),
-
- %% Make an option value to test. The option must be implemented on all
- %% platforms that we test on. Must check what the default value is
- %% so we don't happen to choose that particular value.
- {ok,[{linger,DefaultLinger}]} = inet:getopts(Sl, [linger]),
- TestLinger = case DefaultLinger of
- {false,_} -> {true,5};
- {true,_} -> {false,0}
- end,
-
- case catch eldap:open([Host],
- [{port,Port},{tcpopts,[{linger,TestLinger}]}|Opts]) of
- {ok,H} ->
- case gen_tcp:accept(Sl,1000) of
- {ok,_} ->
- case eldap:getopts(H, [{tcpopts,[linger]}]) of
- {ok,[{tcpopts,[{linger,ActualLinger}]}]} ->
- case ActualLinger of
- TestLinger ->
- ok;
- DefaultLinger ->
- ct:fail("eldap:getopts: 'linger' didn't change,"
- " got ~p (=default) expected ~p",
- [ActualLinger,TestLinger]);
- _ ->
- ct:fail("eldap:getopts: bad 'linger', got ~p expected ~p",
- [ActualLinger,TestLinger])
- end;
- Other ->
- ct:fail("eldap:getopts: bad result ~p",[Other])
- end;
- {error,timeout} ->
- ct:fail("server side accept timeout",[])
- end;
-
- Other ->
- ct:fail("eldap:open failed: ~p",[Other])
- end.
diff --git a/lib/eldap/test/eldap_misc_SUITE.erl b/lib/eldap/test/eldap_misc_SUITE.erl
deleted file mode 100644
index ca810ee33c..0000000000
--- a/lib/eldap/test/eldap_misc_SUITE.erl
+++ /dev/null
@@ -1,51 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2012-2014. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(eldap_misc_SUITE).
-
--compile(export_all). %% Use this only in test suites...
-
--include_lib("common_test/include/ct.hrl").
--include_lib("eldap/include/eldap.hrl").
--include_lib("eldap/ebin/ELDAPv3.hrl").
-
-all() ->
- [
- encode,
- decode
- ].
-
-
-encode(_Config) ->
- {ok,Bin} = 'ELDAPv3':encode('AddRequest', #'AddRequest'{entry="hejHopp" ,attributes=[]} ),
- Expected = <<104,11,4,7,104,101,106,72,111,112,112,48,0>>,
- Expected = Bin.
-
-decode(_Config) ->
- {ok,Res} = 'ELDAPv3':decode('AddRequest', <<104,11,4,7,104,101,106,72,111,112,112,48,0>>),
- ct:log("Res = ~p", [Res]),
- Expected = #'AddRequest'{entry = "hejHopp",attributes = []},
- case Res of
- Expected -> ok;
- #'AddRequest'{entry= <<"hejHopp">>, attributes=[]} ->
- {fail, "decoded to (correct) binary!!"};
- _ ->
- {fail, "Bad decode"}
- end.
-
diff --git a/lib/eldap/test/make_certs.erl b/lib/eldap/test/make_certs.erl
index f963af180d..15a7e118ff 100644
--- a/lib/eldap/test/make_certs.erl
+++ b/lib/eldap/test/make_certs.erl
@@ -1,41 +1,89 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2007-2012. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(make_certs).
+-compile([export_all]).
--export([all/2]).
+%-export([all/1, all/2, rootCA/2, intermediateCA/3, endusers/3, enduser/3, revoke/3, gencrl/2, verify/3]).
--record(dn, {commonName,
+-record(config, {commonName,
organizationalUnitName = "Erlang OTP",
organizationName = "Ericsson AB",
localityName = "Stockholm",
countryName = "SE",
- emailAddress = "[email protected]"}).
+ emailAddress = "[email protected]",
+ default_bits = 2048,
+ v2_crls = true,
+ ecc_certs = false,
+ issuing_distribution_point = false,
+ crl_port = 8000,
+ openssl_cmd = "openssl"}).
+
+
+default_config() ->
+ #config{}.
+
+make_config(Args) ->
+ make_config(Args, #config{}).
+
+make_config([], C) ->
+ C;
+make_config([{organizationalUnitName, Name}|T], C) when is_list(Name) ->
+ make_config(T, C#config{organizationalUnitName = Name});
+make_config([{organizationName, Name}|T], C) when is_list(Name) ->
+ make_config(T, C#config{organizationName = Name});
+make_config([{localityName, Name}|T], C) when is_list(Name) ->
+ make_config(T, C#config{localityName = Name});
+make_config([{countryName, Name}|T], C) when is_list(Name) ->
+ make_config(T, C#config{countryName = Name});
+make_config([{emailAddress, Name}|T], C) when is_list(Name) ->
+ make_config(T, C#config{emailAddress = Name});
+make_config([{default_bits, Bits}|T], C) when is_integer(Bits) ->
+ make_config(T, C#config{default_bits = Bits});
+make_config([{v2_crls, Bool}|T], C) when is_boolean(Bool) ->
+ make_config(T, C#config{v2_crls = Bool});
+make_config([{crl_port, Port}|T], C) when is_integer(Port) ->
+ make_config(T, C#config{crl_port = Port});
+make_config([{ecc_certs, Bool}|T], C) when is_boolean(Bool) ->
+ make_config(T, C#config{ecc_certs = Bool});
+make_config([{issuing_distribution_point, Bool}|T], C) when is_boolean(Bool) ->
+ make_config(T, C#config{issuing_distribution_point = Bool});
+make_config([{openssl_cmd, Cmd}|T], C) when is_list(Cmd) ->
+ make_config(T, C#config{openssl_cmd = Cmd}).
+
+
+all([DataDir, PrivDir]) ->
+ all(DataDir, PrivDir).
all(DataDir, PrivDir) ->
- OpenSSLCmd = "openssl",
+ all(DataDir, PrivDir, #config{}).
+
+all(DataDir, PrivDir, C) when is_list(C) ->
+ all(DataDir, PrivDir, make_config(C));
+all(DataDir, PrivDir, C = #config{}) ->
+ ok = filelib:ensure_dir(filename:join(PrivDir, "erlangCA")),
create_rnd(DataDir, PrivDir), % For all requests
- rootCA(PrivDir, OpenSSLCmd, "erlangCA"),
- intermediateCA(PrivDir, OpenSSLCmd, "otpCA", "erlangCA"),
- endusers(PrivDir, OpenSSLCmd, "otpCA", ["client", "server"]),
- collect_certs(PrivDir, ["erlangCA", "otpCA"], ["client", "server"]),
- %% Create keycert files
+ rootCA(PrivDir, "erlangCA", C),
+ intermediateCA(PrivDir, "otpCA", "erlangCA", C),
+ endusers(PrivDir, "otpCA", ["client", "server", "revoked"], C),
+ endusers(PrivDir, "erlangCA", ["localhost"], C),
+ %% Create keycert files
SDir = filename:join([PrivDir, "server"]),
SC = filename:join([SDir, "cert.pem"]),
SK = filename:join([SDir, "key.pem"]),
@@ -46,7 +94,14 @@ all(DataDir, PrivDir) ->
CK = filename:join([CDir, "key.pem"]),
CKC = filename:join([CDir, "keycert.pem"]),
append_files([CK, CC], CKC),
- remove_rnd(PrivDir).
+ RDir = filename:join([PrivDir, "revoked"]),
+ RC = filename:join([RDir, "cert.pem"]),
+ RK = filename:join([RDir, "key.pem"]),
+ RKC = filename:join([RDir, "keycert.pem"]),
+ revoke(PrivDir, "otpCA", "revoked", C),
+ append_files([RK, RC], RKC),
+ remove_rnd(PrivDir),
+ {ok, C}.
append_files(FileNames, ResultFileName) ->
{ok, ResultFile} = file:open(ResultFileName, [write]),
@@ -59,117 +114,182 @@ do_append_files([F|Fs], RF) ->
ok = file:write(RF, Data),
do_append_files(Fs, RF).
-rootCA(Root, OpenSSLCmd, Name) ->
- create_ca_dir(Root, Name, ca_cnf(Name)),
- DN = #dn{commonName = Name},
- create_self_signed_cert(Root, OpenSSLCmd, Name, req_cnf(DN)),
- ok.
+rootCA(Root, Name, C) ->
+ create_ca_dir(Root, Name, ca_cnf(C#config{commonName = Name})),
+ create_self_signed_cert(Root, Name, req_cnf(C#config{commonName = Name}), C),
+ file:copy(filename:join([Root, Name, "cert.pem"]), filename:join([Root, Name, "cacerts.pem"])),
+ gencrl(Root, Name, C).
-intermediateCA(Root, OpenSSLCmd, CA, ParentCA) ->
- CA = "otpCA",
- create_ca_dir(Root, CA, ca_cnf(CA)),
+intermediateCA(Root, CA, ParentCA, C) ->
+ create_ca_dir(Root, CA, ca_cnf(C#config{commonName = CA})),
CARoot = filename:join([Root, CA]),
- DN = #dn{commonName = CA},
CnfFile = filename:join([CARoot, "req.cnf"]),
- file:write_file(CnfFile, req_cnf(DN)),
- KeyFile = filename:join([CARoot, "private", "key.pem"]),
- ReqFile = filename:join([CARoot, "req.pem"]),
- create_req(Root, OpenSSLCmd, CnfFile, KeyFile, ReqFile),
+ file:write_file(CnfFile, req_cnf(C#config{commonName = CA})),
+ KeyFile = filename:join([CARoot, "private", "key.pem"]),
+ ReqFile = filename:join([CARoot, "req.pem"]),
+ create_req(Root, CnfFile, KeyFile, ReqFile, C),
CertFile = filename:join([CARoot, "cert.pem"]),
- sign_req(Root, OpenSSLCmd, ParentCA, "ca_cert", ReqFile, CertFile).
-
-endusers(Root, OpenSSLCmd, CA, Users) ->
- lists:foreach(fun(User) -> enduser(Root, OpenSSLCmd, CA, User) end, Users).
-
-enduser(Root, OpenSSLCmd, CA, User) ->
+ sign_req(Root, ParentCA, "ca_cert", ReqFile, CertFile, C),
+ CACertsFile = filename:join(CARoot, "cacerts.pem"),
+ file:copy(filename:join([Root, ParentCA, "cacerts.pem"]), CACertsFile),
+ %% append this CA's cert to the cacerts file
+ {ok, Bin} = file:read_file(CertFile),
+ {ok, FD} = file:open(CACertsFile, [append]),
+ file:write(FD, ["\n", Bin]),
+ file:close(FD),
+ gencrl(Root, CA, C).
+
+endusers(Root, CA, Users, C) ->
+ [enduser(Root, CA, User, C) || User <- Users].
+
+enduser(Root, CA, User, C) ->
UsrRoot = filename:join([Root, User]),
file:make_dir(UsrRoot),
CnfFile = filename:join([UsrRoot, "req.cnf"]),
- DN = #dn{commonName = User},
- file:write_file(CnfFile, req_cnf(DN)),
- KeyFile = filename:join([UsrRoot, "key.pem"]),
- ReqFile = filename:join([UsrRoot, "req.pem"]),
- create_req(Root, OpenSSLCmd, CnfFile, KeyFile, ReqFile),
+ file:write_file(CnfFile, req_cnf(C#config{commonName = User})),
+ KeyFile = filename:join([UsrRoot, "key.pem"]),
+ ReqFile = filename:join([UsrRoot, "req.pem"]),
+ create_req(Root, CnfFile, KeyFile, ReqFile, C),
+ %create_req(Root, CnfFile, KeyFile, ReqFile),
CertFileAllUsage = filename:join([UsrRoot, "cert.pem"]),
- sign_req(Root, OpenSSLCmd, CA, "user_cert", ReqFile, CertFileAllUsage),
+ sign_req(Root, CA, "user_cert", ReqFile, CertFileAllUsage, C),
CertFileDigitalSigOnly = filename:join([UsrRoot, "digital_signature_only_cert.pem"]),
- sign_req(Root, OpenSSLCmd, CA, "user_cert_digital_signature_only", ReqFile, CertFileDigitalSigOnly).
-
-collect_certs(Root, CAs, Users) ->
- Bins = lists:foldr(
- fun(CA, Acc) ->
- File = filename:join([Root, CA, "cert.pem"]),
- {ok, Bin} = file:read_file(File),
- [Bin, "\n" | Acc]
- end, [], CAs),
- lists:foreach(
- fun(User) ->
- File = filename:join([Root, User, "cacerts.pem"]),
- file:write_file(File, Bins)
- end, Users).
+ sign_req(Root, CA, "user_cert_digital_signature_only", ReqFile, CertFileDigitalSigOnly, C),
+ CACertsFile = filename:join(UsrRoot, "cacerts.pem"),
+ file:copy(filename:join([Root, CA, "cacerts.pem"]), CACertsFile),
+ ok.
-create_self_signed_cert(Root, OpenSSLCmd, CAName, Cnf) ->
+revoke(Root, CA, User, C) ->
+ UsrCert = filename:join([Root, User, "cert.pem"]),
+ CACnfFile = filename:join([Root, CA, "ca.cnf"]),
+ Cmd = [C#config.openssl_cmd, " ca"
+ " -revoke ", UsrCert,
+ [" -crl_reason keyCompromise" || C#config.v2_crls ],
+ " -config ", CACnfFile],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env),
+ gencrl(Root, CA, C).
+
+gencrl(Root, CA, C) ->
+ CACnfFile = filename:join([Root, CA, "ca.cnf"]),
+ CACRLFile = filename:join([Root, CA, "crl.pem"]),
+ Cmd = [C#config.openssl_cmd, " ca"
+ " -gencrl ",
+ " -crlhours 24",
+ " -out ", CACRLFile,
+ " -config ", CACnfFile],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env).
+
+verify(Root, CA, User, C) ->
+ CAFile = filename:join([Root, User, "cacerts.pem"]),
+ CACRLFile = filename:join([Root, CA, "crl.pem"]),
+ CertFile = filename:join([Root, User, "cert.pem"]),
+ Cmd = [C#config.openssl_cmd, " verify"
+ " -CAfile ", CAFile,
+ " -CRLfile ", CACRLFile, %% this is undocumented, but seems to work
+ " -crl_check ",
+ CertFile],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ try cmd(Cmd, Env) catch
+ exit:{eval_cmd, _, _} ->
+ invalid
+ end.
+
+create_self_signed_cert(Root, CAName, Cnf, C = #config{ecc_certs = true}) ->
CARoot = filename:join([Root, CAName]),
CnfFile = filename:join([CARoot, "req.cnf"]),
file:write_file(CnfFile, Cnf),
- KeyFile = filename:join([CARoot, "private", "key.pem"]),
- CertFile = filename:join([CARoot, "cert.pem"]),
- Cmd = [OpenSSLCmd, " req"
+ KeyFile = filename:join([CARoot, "private", "key.pem"]),
+ CertFile = filename:join([CARoot, "cert.pem"]),
+ Cmd = [C#config.openssl_cmd, " ecparam"
+ " -out ", KeyFile,
+ " -name secp521r1 ",
+ %" -name sect283k1 ",
+ " -genkey "],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env),
+
+ Cmd2 = [C#config.openssl_cmd, " req"
+ " -new"
+ " -x509"
+ " -config ", CnfFile,
+ " -key ", KeyFile,
+ " -outform PEM ",
+ " -out ", CertFile],
+ cmd(Cmd2, Env);
+create_self_signed_cert(Root, CAName, Cnf, C) ->
+ CARoot = filename:join([Root, CAName]),
+ CnfFile = filename:join([CARoot, "req.cnf"]),
+ file:write_file(CnfFile, Cnf),
+ KeyFile = filename:join([CARoot, "private", "key.pem"]),
+ CertFile = filename:join([CARoot, "cert.pem"]),
+ Cmd = [C#config.openssl_cmd, " req"
" -new"
" -x509"
" -config ", CnfFile,
" -keyout ", KeyFile,
- " -out ", CertFile],
- Env = [{"ROOTDIR", Root}],
- cmd(Cmd, Env),
- fix_key_file(OpenSSLCmd, KeyFile).
-
-% openssl 1.0 generates key files in pkcs8 format by default and we don't handle this format
-fix_key_file(OpenSSLCmd, KeyFile) ->
- KeyFileTmp = KeyFile ++ ".tmp",
- Cmd = [OpenSSLCmd, " rsa",
- " -in ",
- KeyFile,
- " -out ",
- KeyFileTmp],
- cmd(Cmd, []),
- ok = file:rename(KeyFileTmp, KeyFile).
+ " -outform PEM",
+ " -out ", CertFile],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env).
+
create_ca_dir(Root, CAName, Cnf) ->
CARoot = filename:join([Root, CAName]),
+ ok = filelib:ensure_dir(CARoot),
file:make_dir(CARoot),
create_dirs(CARoot, ["certs", "crl", "newcerts", "private"]),
create_rnd(Root, filename:join([CAName, "private"])),
create_files(CARoot, [{"serial", "01\n"},
+ {"crlnumber", "01"},
{"index.txt", ""},
{"ca.cnf", Cnf}]).
-create_req(Root, OpenSSLCmd, CnfFile, KeyFile, ReqFile) ->
- Cmd = [OpenSSLCmd, " req"
+create_req(Root, CnfFile, KeyFile, ReqFile, C = #config{ecc_certs = true}) ->
+ Cmd = [C#config.openssl_cmd, " ecparam"
+ " -out ", KeyFile,
+ " -name secp521r1 ",
+ %" -name sect283k1 ",
+ " -genkey "],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env),
+ Cmd2 = [C#config.openssl_cmd, " req"
+ " -new ",
+ " -key ", KeyFile,
+ " -outform PEM ",
+ " -out ", ReqFile,
+ " -config ", CnfFile],
+ cmd(Cmd2, Env);
+ %fix_key_file(KeyFile).
+create_req(Root, CnfFile, KeyFile, ReqFile, C) ->
+ Cmd = [C#config.openssl_cmd, " req"
" -new"
" -config ", CnfFile,
- " -keyout ", KeyFile,
- " -out ", ReqFile],
- Env = [{"ROOTDIR", Root}],
- cmd(Cmd, Env),
- fix_key_file(OpenSSLCmd, KeyFile).
+ " -outform PEM ",
+ " -keyout ", KeyFile,
+ " -out ", ReqFile],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
+ cmd(Cmd, Env).
+ %fix_key_file(KeyFile).
+
-sign_req(Root, OpenSSLCmd, CA, CertType, ReqFile, CertFile) ->
+sign_req(Root, CA, CertType, ReqFile, CertFile, C) ->
CACnfFile = filename:join([Root, CA, "ca.cnf"]),
- Cmd = [OpenSSLCmd, " ca"
+ Cmd = [C#config.openssl_cmd, " ca"
" -batch"
" -notext"
- " -config ", CACnfFile,
+ " -config ", CACnfFile,
" -extensions ", CertType,
- " -in ", ReqFile,
+ " -in ", ReqFile,
" -out ", CertFile],
- Env = [{"ROOTDIR", Root}],
+ Env = [{"ROOTDIR", filename:absname(Root)}],
cmd(Cmd, Env).
-
+
%%
%% Misc
%%
-
+
create_dirs(Root, Dirs) ->
lists:foreach(fun(Dir) ->
file:make_dir(filename:join([Root, Dir])) end,
@@ -192,30 +312,30 @@ remove_rnd(Dir) ->
cmd(Cmd, Env) ->
FCmd = lists:flatten(Cmd),
- Port = open_port({spawn, FCmd}, [stream, eof, exit_status, stderr_to_stdout,
+ Port = open_port({spawn, FCmd}, [stream, eof, exit_status, stderr_to_stdout,
{env, Env}]),
- eval_cmd(Port).
+ eval_cmd(Port, FCmd).
-eval_cmd(Port) ->
- receive
+eval_cmd(Port, Cmd) ->
+ receive
{Port, {data, _}} ->
- eval_cmd(Port);
+ eval_cmd(Port, Cmd);
{Port, eof} ->
ok
end,
receive
{Port, {exit_status, Status}} when Status /= 0 ->
%% io:fwrite("exit status: ~w~n", [Status]),
- exit({eval_cmd, Status})
+ exit({eval_cmd, Cmd, Status})
after 0 ->
ok
end.
%%
-%% Contents of configuration files
+%% Contents of configuration files
%%
-req_cnf(DN) ->
+req_cnf(C) ->
["# Purpose: Configuration for requests (end users and CAs)."
"\n"
"ROOTDIR = $ENV::ROOTDIR\n"
@@ -224,10 +344,10 @@ req_cnf(DN) ->
"[req]\n"
"input_password = secret\n"
"output_password = secret\n"
- "default_bits = 1024\n"
+ "default_bits = ", integer_to_list(C#config.default_bits), "\n"
"RANDFILE = $ROOTDIR/RAND\n"
"encrypt_key = no\n"
- "default_md = sha1\n"
+ "default_md = md5\n"
"#string_mask = pkix\n"
"x509_extensions = ca_ext\n"
"prompt = no\n"
@@ -235,12 +355,12 @@ req_cnf(DN) ->
"\n"
"[name]\n"
- "commonName = ", DN#dn.commonName, "\n"
- "organizationalUnitName = ", DN#dn.organizationalUnitName, "\n"
- "organizationName = ", DN#dn.organizationName, "\n"
- "localityName = ", DN#dn.localityName, "\n"
- "countryName = ", DN#dn.countryName, "\n"
- "emailAddress = ", DN#dn.emailAddress, "\n"
+ "commonName = ", C#config.commonName, "\n"
+ "organizationalUnitName = ", C#config.organizationalUnitName, "\n"
+ "organizationName = ", C#config.organizationName, "\n"
+ "localityName = ", C#config.localityName, "\n"
+ "countryName = ", C#config.countryName, "\n"
+ "emailAddress = ", C#config.emailAddress, "\n"
"\n"
"[ca_ext]\n"
@@ -249,8 +369,7 @@ req_cnf(DN) ->
"subjectKeyIdentifier = hash\n"
"subjectAltName = email:copy\n"].
-
-ca_cnf(CA) ->
+ca_cnf(C) ->
["# Purpose: Configuration for CAs.\n"
"\n"
"ROOTDIR = $ENV::ROOTDIR\n"
@@ -258,21 +377,23 @@ ca_cnf(CA) ->
"\n"
"[ca]\n"
- "dir = $ROOTDIR/", CA, "\n"
+ "dir = $ROOTDIR/", C#config.commonName, "\n"
"certs = $dir/certs\n"
"crl_dir = $dir/crl\n"
"database = $dir/index.txt\n"
"new_certs_dir = $dir/newcerts\n"
"certificate = $dir/cert.pem\n"
"serial = $dir/serial\n"
- "crl = $dir/crl.pem\n"
+ "crl = $dir/crl.pem\n",
+ ["crlnumber = $dir/crlnumber\n" || C#config.v2_crls],
"private_key = $dir/private/key.pem\n"
"RANDFILE = $dir/private/RAND\n"
"\n"
- "x509_extensions = user_cert\n"
+ "x509_extensions = user_cert\n",
+ ["crl_extensions = crl_ext\n" || C#config.v2_crls],
"unique_subject = no\n"
"default_days = 3600\n"
- "default_md = sha1\n"
+ "default_md = md5\n"
"preserve = no\n"
"policy = policy_match\n"
"\n"
@@ -286,6 +407,13 @@ ca_cnf(CA) ->
"emailAddress = supplied\n"
"\n"
+ "[crl_ext]\n"
+ "authorityKeyIdentifier=keyid:always,issuer:always\n",
+ ["issuingDistributionPoint=critical, @idpsec\n" || C#config.issuing_distribution_point],
+
+ "[idpsec]\n"
+ "fullname=URI:http://localhost:8000/",C#config.commonName,"/crl.pem\n"
+
"[user_cert]\n"
"basicConstraints = CA:false\n"
"keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n"
@@ -293,6 +421,12 @@ ca_cnf(CA) ->
"authorityKeyIdentifier = keyid,issuer:always\n"
"subjectAltName = email:copy\n"
"issuerAltName = issuer:copy\n"
+ "crlDistributionPoints=@crl_section\n"
+
+ "[crl_section]\n"
+ %% intentionally invalid
+ "URI.1=http://localhost/",C#config.commonName,"/crl.pem\n"
+ "URI.2=http://localhost:",integer_to_list(C#config.crl_port),"/",C#config.commonName,"/crl.pem\n"
"\n"
"[user_cert_digital_signature_only]\n"
@@ -310,4 +444,7 @@ ca_cnf(CA) ->
"subjectKeyIdentifier = hash\n"
"authorityKeyIdentifier = keyid:always,issuer:always\n"
"subjectAltName = email:copy\n"
- "issuerAltName = issuer:copy\n"].
+ "issuerAltName = issuer:copy\n"
+ "crlDistributionPoints=@crl_section\n"
+ ].
+
diff --git a/lib/eldap/vsn.mk b/lib/eldap/vsn.mk
index 432ba2e742..105a2bcdbb 100644
--- a/lib/eldap/vsn.mk
+++ b/lib/eldap/vsn.mk
@@ -1 +1 @@
-ELDAP_VSN = 1.1
+ELDAP_VSN = 1.2
diff --git a/lib/erl_docgen/priv/bin/specs_gen.escript b/lib/erl_docgen/priv/bin/specs_gen.escript
index 156311565c..e8a8f14e3a 100644
--- a/lib/erl_docgen/priv/bin/specs_gen.escript
+++ b/lib/erl_docgen/priv/bin/specs_gen.escript
@@ -97,7 +97,7 @@ read_file(File, Opts) ->
edoc:read_source(File, Opts).
extract(File, Forms, Opts) ->
- Env = edoc_lib:get_doc_env([], [], [], _Opts=[]),
+ Env = edoc_lib:get_doc_env([], [], _Opts=[]),
{_Module, Doc} = edoc_extract:source(Forms, File, Env, Opts),
Doc.
diff --git a/lib/erl_docgen/priv/bin/xml_from_edoc.escript b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
index 65a580dca2..007546e7ba 100755
--- a/lib/erl_docgen/priv/bin/xml_from_edoc.escript
+++ b/lib/erl_docgen/priv/bin/xml_from_edoc.escript
@@ -117,7 +117,7 @@ users_guide(File, Args) ->
Text = edoc_lib:run_layout(F, Opts),
OutFile = "chapter" ++ Args#args.suffix,
- edoc_lib:write_file(Text, ".", OutFile, '', Encoding);
+ edoc_lib:write_file(Text, ".", OutFile, Encoding);
false ->
io:format("~s: not a regular file\n", [File]),
usage()
diff --git a/lib/erl_docgen/priv/css/otp_doc.css b/lib/erl_docgen/priv/css/otp_doc.css
index c56de378f4..0b531db701 100644
--- a/lib/erl_docgen/priv/css/otp_doc.css
+++ b/lib/erl_docgen/priv/css/otp_doc.css
@@ -66,7 +66,7 @@ a:visited { color: blue; text-decoration: none }
span.bold_code { font-family: Courier, monospace; font-weight: bold }
span.code { font-family: Courier, monospace; font-weight: normal }
-.note, .warning {
+.note, .warning, .do, .dont {
border: solid black 1px;
margin: 1em 3em;
}
@@ -96,10 +96,41 @@ span.code { font-family: Courier, monospace; font-weight: normal }
font-size: 90%;
padding: 5px 10px;
}
+.do .label {
+ background: #30d42a;
+ color: white;
+ font-weight: bold;
+ padding: 5px 10px;
+}
+.do .content {
+ background: #eafeea;
+ color: black;
+ line-height: 120%;
+ font-size: 90%;
+ padding: 5px 10px;
+}
+.dont .label {
+ background: #C00;
+ color: white;
+ font-weight: bold;
+ padding: 5px 10px;
+}
+.dont .content {
+ background: #FFF0F0;
+ color: black;
+ line-height: 120%;
+ font-size: 90%;
+ padding: 5px 10px;
+}
.example {
background-color:#eeeeff;
padding: 0px 10px;
}
+.extrafrontpageinfo {
+ color: #C00;
+ font-weight: bold;
+ font-size: 120%;
+}
pre { font-family: Courier, monospace; font-weight: normal }
diff --git a/lib/erl_docgen/priv/dtd/application.dtd b/lib/erl_docgen/priv/dtd/application.dtd
index 8a1e8832ec..fcadaced72 100644
--- a/lib/erl_docgen/priv/dtd/application.dtd
+++ b/lib/erl_docgen/priv/dtd/application.dtd
@@ -24,6 +24,6 @@
%common.header;
<!ELEMENT application (header,description?,include+) >
-<!ELEMENT description (%block;|quote|br|marker|warning|note)* >
+<!ELEMENT description (%block;|quote|br|marker|warning|note|dont|do)* >
<!ELEMENT include EMPTY >
<!ATTLIST include file CDATA #REQUIRED>
diff --git a/lib/erl_docgen/priv/dtd/book.dtd b/lib/erl_docgen/priv/dtd/book.dtd
index bb89a6d255..ef723a9eed 100644
--- a/lib/erl_docgen/priv/dtd/book.dtd
+++ b/lib/erl_docgen/priv/dtd/book.dtd
@@ -38,7 +38,7 @@
<!ELEMENT pagetext (#PCDATA) >
<!ELEMENT preamble (contents?,preface?) >
-<!ELEMENT preface (title?,(%block;|quote|br|marker|warning|note|table)*) >
+<!ELEMENT preface (title?,(%block;|quote|br|marker|warning|note|dont|do|table)*) >
<!ELEMENT insidecover (#PCDATA|br|theheader|vfill|vspace|tt|bold|
include)* >
@@ -67,7 +67,7 @@
<!ELEMENT onepart (title?,description?,include+) >
<!ATTLIST onepart lift (yes|no) "no" >
-<!ELEMENT description (%block;|quote|br|marker|warning|note)* >
+<!ELEMENT description (%block;|quote|br|marker|warning|note|dont|do)* >
<!ELEMENT include EMPTY >
<!ATTLIST include file CDATA #REQUIRED>
diff --git a/lib/erl_docgen/priv/dtd/chapter.dtd b/lib/erl_docgen/priv/dtd/chapter.dtd
index eb2c96b04f..4beff6cc54 100644
--- a/lib/erl_docgen/priv/dtd/chapter.dtd
+++ b/lib/erl_docgen/priv/dtd/chapter.dtd
@@ -29,8 +29,8 @@
<!-- Structure -->
-<!ELEMENT chapter (header,(%block;|quote|warning|note|br|
+<!ELEMENT chapter (header,(%block;|quote|warning|note|dont|do|br|
image|marker|table)*,section+) >
<!ELEMENT section (marker*,title,
- (%block;|quote|warning|note|br|image|marker|
+ (%block;|quote|warning|note|dont|do|br|image|marker|
table|section)*) >
diff --git a/lib/erl_docgen/priv/dtd/common.dtd b/lib/erl_docgen/priv/dtd/common.dtd
index f999ef8ea4..92d814e0f1 100644
--- a/lib/erl_docgen/priv/dtd/common.dtd
+++ b/lib/erl_docgen/priv/dtd/common.dtd
@@ -34,6 +34,8 @@
<!ELEMENT quote (p)* >
<!ELEMENT warning (%block;|quote|br|marker)* >
<!ELEMENT note (%block;|quote|br|marker)* >
+<!ELEMENT dont (%block;|quote|br|marker)* >
+<!ELEMENT do (%block;|quote|br|marker)* >
<!ELEMENT c (#PCDATA) >
<!ELEMENT em (#PCDATA|c)* >
diff --git a/lib/erl_docgen/priv/dtd/common.refs.dtd b/lib/erl_docgen/priv/dtd/common.refs.dtd
index 93592607df..a08b9e89d4 100644
--- a/lib/erl_docgen/priv/dtd/common.refs.dtd
+++ b/lib/erl_docgen/priv/dtd/common.refs.dtd
@@ -24,7 +24,7 @@
<!ENTITY % common.header SYSTEM "common.header.dtd" >
%common.header;
-<!ELEMENT description (%block;|quote|br|marker|warning|note)* >
+<!ELEMENT description (%block;|quote|br|marker|warning|note|dont|do)* >
<!ELEMENT funcs (func)+ >
<!ELEMENT func (name+,type_desc*,fsummary,type?,desc?) >
<!-- ELEMENT name is defined in each ref dtd -->
@@ -34,12 +34,12 @@
name_i CDATA #IMPLIED>
<!ELEMENT v (#PCDATA) >
<!ELEMENT d (#PCDATA|c|em)* >
-<!ELEMENT desc (%block;|quote|br|marker|warning|note|anno)* >
+<!ELEMENT desc (%block;|quote|br|marker|warning|note|dont|do|anno)* >
<!ELEMENT authors (aname,email)+ >
<!ELEMENT aname (#PCDATA) >
<!ELEMENT email (#PCDATA) >
<!ELEMENT section (marker*,title,(%block;|quote|br|marker|
- warning|note)*) >
+ warning|note|dont|do)*) >
<!ELEMENT datatypes (datatype)+ >
<!ELEMENT datatype (name+,desc?) >
<!ELEMENT type_desc (#PCDATA) >
diff --git a/lib/erl_docgen/priv/dtd/part.dtd b/lib/erl_docgen/priv/dtd/part.dtd
index 3f97199042..79f68c415d 100644
--- a/lib/erl_docgen/priv/dtd/part.dtd
+++ b/lib/erl_docgen/priv/dtd/part.dtd
@@ -24,6 +24,6 @@
%common.header;
<!ELEMENT part (header,description?,include+) >
-<!ELEMENT description (%block;|quote|br|marker|warning|note)* >
+<!ELEMENT description (%block;|quote|br|marker|warning|note|dont|do)* >
<!ELEMENT include EMPTY >
<!ATTLIST include file CDATA #REQUIRED>
diff --git a/lib/erl_docgen/priv/dtd/report.dtd b/lib/erl_docgen/priv/dtd/report.dtd
index 3d07e6e5a7..eb463f8867 100644
--- a/lib/erl_docgen/priv/dtd/report.dtd
+++ b/lib/erl_docgen/priv/dtd/report.dtd
@@ -48,7 +48,7 @@
<!ELEMENT file (#PCDATA) >
<!ELEMENT section (marker*,title,
- (%block;|quote|warning|note|br|image|marker|
+ (%block;|quote|warning|note|dont|do|br|image|marker|
table|section)*) >
<!ELEMENT p (%inline;|index)* >
<!ELEMENT pre (#PCDATA|seealso|url|input)* >
@@ -58,6 +58,8 @@
<!ELEMENT quote (p)* >
<!ELEMENT warning (%block;|quote|br|image|marker|table)* >
<!ELEMENT note (%block;|quote|br|image|marker|table)* >
+<!ELEMENT dont (%block;|quote|br|image|marker|table)* >
+<!ELEMENT do (%block;|quote|br|image|marker|table)* >
<!ELEMENT i (#PCDATA|b|c|em)* >
<!ELEMENT b (#PCDATA|i|c|em)* >
<!ELEMENT c (#PCDATA) >
diff --git a/lib/erl_docgen/priv/xsl/db_html.xsl b/lib/erl_docgen/priv/xsl/db_html.xsl
index ab5f24c406..3529924957 100644
--- a/lib/erl_docgen/priv/xsl/db_html.xsl
+++ b/lib/erl_docgen/priv/xsl/db_html.xsl
@@ -952,6 +952,36 @@
</div>
</xsl:template>
+ <!-- Do -->
+ <xsl:template match="do">
+ <xsl:param name="chapnum"/>
+ <div class="do">
+ <div class="label">Do</div>
+ <div class="content">
+ <p>
+ <xsl:apply-templates>
+ <xsl:with-param name="chapnum" select="$chapnum"/>
+ </xsl:apply-templates>
+ </p>
+ </div>
+ </div>
+ </xsl:template>
+
+ <!-- Dont -->
+ <xsl:template match="dont">
+ <xsl:param name="chapnum"/>
+ <div class="dont">
+ <div class="label">Don't</div>
+ <div class="content">
+ <p>
+ <xsl:apply-templates>
+ <xsl:with-param name="chapnum" select="$chapnum"/>
+ </xsl:apply-templates>
+ </p>
+ </div>
+ </div>
+ </xsl:template>
+
<!-- Paragraph -->
<xsl:template match="p">
<p>
@@ -1102,6 +1132,9 @@
<center><h4>Version <xsl:value-of select="$appver"/></h4></center>
<center><h4><xsl:value-of select="$gendate"/></h4></center>
+ <div class="extrafrontpageinfo">
+ <center><xsl:value-of select="$extra_front_page_info"/></center>
+ </div>
<xsl:apply-templates select="chapter"/>
@@ -1267,6 +1300,9 @@
<center><h4>Version <xsl:value-of select="$appver"/></h4></center>
<center><h4><xsl:value-of select="$gendate"/></h4></center>
+ <div class="extrafrontpageinfo">
+ <center><xsl:value-of select="$extra_front_page_info"/></center>
+ </div>
<xsl:apply-templates select="erlref|cref|comref|fileref|appref"/>
@@ -2089,6 +2125,9 @@
<center><h4>Version <xsl:value-of select="$appver"/></h4></center>
<center><h4><xsl:value-of select="$gendate"/></h4></center>
+ <div class="extrafrontpageinfo">
+ <center><xsl:value-of select="$extra_front_page_info"/></center>
+ </div>
<xsl:apply-templates select="chapter"/>
diff --git a/lib/erl_docgen/priv/xsl/db_man.xsl b/lib/erl_docgen/priv/xsl/db_man.xsl
index 3bcdd11c35..0caaba560f 100644
--- a/lib/erl_docgen/priv/xsl/db_man.xsl
+++ b/lib/erl_docgen/priv/xsl/db_man.xsl
@@ -543,7 +543,29 @@
<xsl:text>&#10;</xsl:text>
</xsl:template>
- <xsl:template match="warning/p | note/p">
+ <!-- Do -->
+ <xsl:template match="do">
+ <xsl:text>&#10;.LP&#10;</xsl:text>
+ <xsl:text>&#10;.RS -4</xsl:text>
+ <xsl:text>&#10;.B&#10;</xsl:text>
+ <xsl:text>Do:</xsl:text>
+ <xsl:text>&#10;.RE</xsl:text>
+ <xsl:apply-templates/>
+ <xsl:text>&#10;</xsl:text>
+ </xsl:template>
+
+ <!-- Dont -->
+ <xsl:template match="dont">
+ <xsl:text>&#10;.LP&#10;</xsl:text>
+ <xsl:text>&#10;.RS -4</xsl:text>
+ <xsl:text>&#10;.B&#10;</xsl:text>
+ <xsl:text>Dont:</xsl:text>
+ <xsl:text>&#10;.RE</xsl:text>
+ <xsl:apply-templates/>
+ <xsl:text>&#10;</xsl:text>
+ </xsl:template>
+
+ <xsl:template match="warning/p | note/p | dont/p | do/p">
<xsl:variable name="content">
<xsl:text>&#10;</xsl:text>
<xsl:apply-templates/>
diff --git a/lib/erl_docgen/priv/xsl/db_pdf.xsl b/lib/erl_docgen/priv/xsl/db_pdf.xsl
index c15b16eb5b..ccf96053aa 100644
--- a/lib/erl_docgen/priv/xsl/db_pdf.xsl
+++ b/lib/erl_docgen/priv/xsl/db_pdf.xsl
@@ -671,6 +671,10 @@
<fo:block xsl:use-attribute-sets="cover.version">
<xsl:value-of select="$gendate"/>
</fo:block>
+ <fo:block xsl:use-attribute-sets="cover.extrainfo">
+ <xsl:value-of select="$extra_front_page_info"/>
+ </fo:block>
+
<!-- Inner cover (copyright notice) -->
<fo:block break-before="page"
@@ -1138,6 +1142,31 @@
</fo:block>
</xsl:template>
+ <!-- Do -->
+ <xsl:template match="do">
+ <xsl:param name="partnum"/>
+ <fo:block xsl:use-attribute-sets="do">
+ <fo:block xsl:use-attribute-sets="note-warning-title">
+ <xsl:text>Do:</xsl:text>
+ </fo:block>
+ <xsl:apply-templates>
+ <xsl:with-param name="partnum" select="$partnum"/>
+ </xsl:apply-templates>
+ </fo:block>
+ </xsl:template>
+
+ <!-- Dont -->
+ <xsl:template match="dont">
+ <xsl:param name="partnum"/>
+ <fo:block xsl:use-attribute-sets="dont">
+ <fo:block xsl:use-attribute-sets="note-warning-title">
+ <xsl:text>Don't:</xsl:text>
+ </fo:block>
+ <xsl:apply-templates>
+ <xsl:with-param name="partnum" select="$partnum"/>
+ </xsl:apply-templates>
+ </fo:block>
+ </xsl:template>
<!-- Paragraph -->
<xsl:template match="p">
diff --git a/lib/erl_docgen/priv/xsl/db_pdf_params.xsl b/lib/erl_docgen/priv/xsl/db_pdf_params.xsl
index 2e3b22acf4..a4814581c2 100644
--- a/lib/erl_docgen/priv/xsl/db_pdf_params.xsl
+++ b/lib/erl_docgen/priv/xsl/db_pdf_params.xsl
@@ -98,6 +98,14 @@
<xsl:attribute name="text-align">end</xsl:attribute>
</xsl:attribute-set>
+ <xsl:attribute-set name="cover.extrainfo">
+ <xsl:attribute name="padding-before">2.5em</xsl:attribute>
+ <xsl:attribute name="font-size">1.33em</xsl:attribute>
+ <xsl:attribute name="font-weight">bold</xsl:attribute>
+ <xsl:attribute name="color">#C00</xsl:attribute>
+ <xsl:attribute name="text-align">end</xsl:attribute>
+ </xsl:attribute-set>
+
<xsl:attribute-set name="cover.inner.copyright">
<xsl:attribute name="border-before-style">solid</xsl:attribute>
<xsl:attribute name="border-before-width">1pt</xsl:attribute>
@@ -289,6 +297,33 @@
<xsl:attribute name="keep-together.within-page">always</xsl:attribute>
</xsl:attribute-set>
+<xsl:attribute-set name="do">
+ <xsl:attribute name="background-color">#d0fed0</xsl:attribute>
+ <xsl:attribute name="space-after">1em</xsl:attribute>
+ <xsl:attribute name="space-before">2em</xsl:attribute>
+ <xsl:attribute name="text-align">justify</xsl:attribute>
+ <xsl:attribute name="padding-before">1em</xsl:attribute>
+ <xsl:attribute name="padding-after">0.3em</xsl:attribute>
+ <xsl:attribute name="padding-left">0.5em</xsl:attribute>
+ <xsl:attribute name="padding-right">0.5em</xsl:attribute>
+ <xsl:attribute name="margin-left">0.5em</xsl:attribute>
+ <xsl:attribute name="margin-right">0.5em</xsl:attribute>
+ <xsl:attribute name="keep-together.within-page">always</xsl:attribute>
+ </xsl:attribute-set>
+
+<xsl:attribute-set name="dont">
+ <xsl:attribute name="background-color">#ffd6d6</xsl:attribute>
+ <xsl:attribute name="space-after">1em</xsl:attribute>
+ <xsl:attribute name="space-before">2em</xsl:attribute>
+ <xsl:attribute name="text-align">justify</xsl:attribute>
+ <xsl:attribute name="padding-before">1em</xsl:attribute>
+ <xsl:attribute name="padding-after">0.3em</xsl:attribute>
+ <xsl:attribute name="padding-left">0.5em</xsl:attribute>
+ <xsl:attribute name="padding-right">0.5em</xsl:attribute>
+ <xsl:attribute name="margin-left">0.5em</xsl:attribute>
+ <xsl:attribute name="margin-right">0.5em</xsl:attribute>
+ <xsl:attribute name="keep-together.within-page">always</xsl:attribute>
+ </xsl:attribute-set>
<xsl:attribute-set name="note-warning-title">
<xsl:attribute name="font-size">1.33em</xsl:attribute>
diff --git a/lib/erl_docgen/src/docgen_otp_specs.erl b/lib/erl_docgen/src/docgen_otp_specs.erl
index e2eee2b3c0..b62e69529b 100644
--- a/lib/erl_docgen/src/docgen_otp_specs.erl
+++ b/lib/erl_docgen/src/docgen_otp_specs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -202,7 +202,8 @@ t_clause(Name, Type) ->
pp_clause(Pre, Type) ->
Types = ot_utype([Type]),
Atom = lists:duplicate(iolist_size(Pre), $a),
- L1 = erl_pp:attribute({attribute,0,spec,{{list_to_atom(Atom),0},[Types]}}),
+ Attr = {attribute,0,spec,{{list_to_atom(Atom),0},[Types]}},
+ L1 = erl_pp:attribute(erl_parse:new_anno(Attr)),
"-spec " ++ L2 = lists:flatten(L1),
L3 = Pre ++ lists:nthtail(length(Atom), L2),
re:replace(L3, "\n ", "\n", [{return,list},global]).
@@ -222,7 +223,8 @@ format_type(_Name, Type, _Opts) ->
pp_type(Prefix, Type) ->
Atom = list_to_atom(lists:duplicate(iolist_size(Prefix), $a)),
- L1 = erl_pp:attribute({attribute,0,type,{Atom,ot_utype(Type),[]}}),
+ Attr = {attribute,0,type,{Atom,ot_utype(Type),[]}},
+ L1 = erl_pp:attribute(erl_parse:new_anno(Attr)),
{L2,N} = case lists:dropwhile(fun(C) -> C =/= $: end, lists:flatten(L1)) of
":: " ++ L3 -> {L3,9}; % compensation for extra "()" and ":"
"::\n" ++ L3 -> {"\n"++L3,6}
@@ -569,8 +571,8 @@ ot_var(E) ->
{var,0,list_to_atom(get_attrval(name, E))}.
ot_atom(E) ->
- {ok, [Atom], _} = erl_scan:string(get_attrval(value, E), 0),
- Atom.
+ {ok, [{atom,A,Name}], _} = erl_scan:string(get_attrval(value, E), 0),
+ {atom,erl_anno:line(A),Name}.
ot_integer(E) ->
{integer,0,list_to_integer(get_attrval(value, E))}.
diff --git a/lib/erl_docgen/src/erl_docgen.app.src b/lib/erl_docgen/src/erl_docgen.app.src
index e2830b2692..d63d880d89 100644
--- a/lib/erl_docgen/src/erl_docgen.app.src
+++ b/lib/erl_docgen/src/erl_docgen.app.src
@@ -9,6 +9,6 @@
{registered,[]},
{applications, [kernel,stdlib]},
{env, []},
- {runtime_dependencies, ["xmerl-1.3.7","stdlib-2.0","edoc-0.7.13","erts-6.0"]}
+ {runtime_dependencies, ["xmerl-1.3.7","stdlib-2.5","edoc-0.7.13","erts-6.0"]}
]
}.
diff --git a/lib/erl_docgen/vsn.mk b/lib/erl_docgen/vsn.mk
index 8957d6ac40..2abd3d2b7e 100644
--- a/lib/erl_docgen/vsn.mk
+++ b/lib/erl_docgen/vsn.mk
@@ -1 +1 @@
-ERL_DOCGEN_VSN = 0.3.7
+ERL_DOCGEN_VSN = 0.4
diff --git a/lib/erl_interface/doc/src/ei.xml b/lib/erl_interface/doc/src/ei.xml
index 90495eebd6..32e0e0e2d8 100644
--- a/lib/erl_interface/doc/src/ei.xml
+++ b/lib/erl_interface/doc/src/ei.xml
@@ -202,6 +202,9 @@ typedef enum {
<desc>
<p>Encodes a double-precision (64 bit) floating point number in
the binary format.</p>
+ <p>
+ The function returns <c><![CDATA[-1]]></c> if the floating point number is not finite.
+ </p>
</desc>
</func>
<func>
diff --git a/lib/erl_interface/doc/src/erl_eterm.xml b/lib/erl_interface/doc/src/erl_eterm.xml
index 429f77501c..2152192696 100644
--- a/lib/erl_interface/doc/src/erl_eterm.xml
+++ b/lib/erl_interface/doc/src/erl_eterm.xml
@@ -371,9 +371,11 @@ iohead ::= Binary
<p><c><![CDATA[f]]></c> is a value to be converted to an Erlang float.</p>
<p></p>
<p>The function returns an Erlang float object with the value
- specified in <c><![CDATA[f]]></c>.</p>
+ specified in <c><![CDATA[f]]></c> or <c><![CDATA[NULL]]></c> if
+ <c><![CDATA[f]]></c> is not finite.
+ </p>
<p><c><![CDATA[ERL_FLOAT_VALUE(t)]]></c> can be used to retrieve the
- value from an Erlang float.</p>
+ value from an Erlang float.</p>
</desc>
</func>
<func>
diff --git a/lib/erl_interface/src/decode/decode_big.c b/lib/erl_interface/src/decode/decode_big.c
index 477880b331..016ed2eac2 100644
--- a/lib/erl_interface/src/decode/decode_big.c
+++ b/lib/erl_interface/src/decode/decode_big.c
@@ -150,27 +150,6 @@ int ei_big_comp(erlang_big *x, erlang_big *y)
#define INLINED_FP_CONVERSION 1
#endif
-#ifdef USE_ISINF_ISNAN /* simulate finite() */
-# define isfinite(f) (!isinf(f) && !isnan(f))
-# define HAVE_ISFINITE
-#elif defined(__GNUC__) && defined(HAVE_FINITE)
-/* We use finite in gcc as it emits assembler instead of
- the function call that isfinite emits. The assembler is
- significantly faster. */
-# ifdef isfinite
-# undef isfinite
-# endif
-# define isfinite finite
-# ifndef HAVE_ISFINITE
-# define HAVE_ISFINITE
-# endif
-#elif defined(isfinite) && !defined(HAVE_ISFINITE)
-# define HAVE_ISFINITE
-#elif !defined(HAVE_ISFINITE) && defined(HAVE_FINITE)
-# define isfinite finite
-# define HAVE_ISFINITE
-#endif
-
#ifdef NO_FPE_SIGNALS
# define ERTS_FP_CHECK_INIT() do {} while (0)
# define ERTS_FP_ERROR(f, Action) if (!isfinite(f)) { Action; } else {}
diff --git a/lib/erl_interface/src/encode/encode_double.c b/lib/erl_interface/src/encode/encode_double.c
index 148a49f73a..72a1c60808 100644
--- a/lib/erl_interface/src/encode/encode_double.c
+++ b/lib/erl_interface/src/encode/encode_double.c
@@ -21,12 +21,24 @@
#include "eidef.h"
#include "eiext.h"
#include "putget.h"
+#if defined(HAVE_ISFINITE)
+#include <math.h>
+#endif
int ei_encode_double(char *buf, int *index, double p)
{
char *s = buf + *index;
char *s0 = s;
+ /* Erlang does not handle Inf and NaN, so we return an error rather
+ * than letting the Erlang VM complain about a bad external
+ * term. */
+#if defined(HAVE_ISFINITE)
+ if(!isfinite(p)) {
+ return -1;
+ }
+#endif
+
if (!buf)
s += 9;
else {
diff --git a/lib/erl_interface/src/legacy/erl_eterm.c b/lib/erl_interface/src/legacy/erl_eterm.c
index 636d26b24b..66cca7decf 100644
--- a/lib/erl_interface/src/legacy/erl_eterm.c
+++ b/lib/erl_interface/src/legacy/erl_eterm.c
@@ -26,6 +26,9 @@
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
+#if defined(HAVE_ISFINITE)
+#include <math.h>
+#endif
#include "ei_locking.h"
#include "ei_resolve.h"
@@ -125,6 +128,15 @@ ETERM *erl_mk_float (double d)
{
ETERM *ep;
+#if defined(HAVE_ISFINITE)
+ /* Erlang does not handle Inf and NaN, so we return an error
+ * rather than letting the Erlang VM complain about a bad external
+ * term. */
+ if(!isfinite(d)) {
+ return NULL;
+ }
+#endif
+
ep = erl_alloc_eterm(ERL_FLOAT);
ERL_COUNT(ep) = 1;
ERL_FLOAT_VALUE(ep) = d;
diff --git a/lib/erl_interface/src/misc/eidef.h b/lib/erl_interface/src/misc/eidef.h
index bd3d0bf631..e0dc325b48 100644
--- a/lib/erl_interface/src/misc/eidef.h
+++ b/lib/erl_interface/src/misc/eidef.h
@@ -41,6 +41,27 @@
typedef int socklen_t;
#endif
+#ifdef USE_ISINF_ISNAN /* simulate finite() */
+# define isfinite(f) (!isinf(f) && !isnan(f))
+# define HAVE_ISFINITE
+#elif defined(__GNUC__) && defined(HAVE_FINITE)
+/* We use finite in gcc as it emits assembler instead of
+ the function call that isfinite emits. The assembler is
+ significantly faster. */
+# ifdef isfinite
+# undef isfinite
+# endif
+# define isfinite finite
+# ifndef HAVE_ISFINITE
+# define HAVE_ISFINITE
+# endif
+#elif defined(isfinite) && !defined(HAVE_ISFINITE)
+# define HAVE_ISFINITE
+#elif !defined(HAVE_ISFINITE) && defined(HAVE_FINITE)
+# define isfinite finite
+# define HAVE_ISFINITE
+#endif
+
typedef unsigned char uint8; /* FIXME use configure */
typedef unsigned short uint16;
typedef unsigned int uint32;
diff --git a/lib/erl_interface/test/ei_encode_SUITE.erl b/lib/erl_interface/test/ei_encode_SUITE.erl
index 50dc8b6a3c..86e0d8cd08 100644
--- a/lib/erl_interface/test/ei_encode_SUITE.erl
+++ b/lib/erl_interface/test/ei_encode_SUITE.erl
@@ -174,7 +174,7 @@ test_ei_encode_ulonglong(Config) when is_list(Config) ->
%% ######################################################################## %%
-%% A "character" for us is an 8 bit integer, alwasy positive, i.e.
+%% A "character" for us is an 8 bit integer, always positive, i.e.
%% it is unsigned.
%% FIXME maybe the API should change to use "unsigned char" to be clear?!
diff --git a/lib/et/src/Makefile b/lib/et/src/Makefile
index 377e593712..b6873371ed 100644
--- a/lib/et/src/Makefile
+++ b/lib/et/src/Makefile
@@ -65,7 +65,7 @@ APPUP_TARGET = $(EBIN)/$(APPUP_FILE)
# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-ERL_COMPILE_FLAGS += -pa $(ERL_TOP)/lib/et/ebin -I../include
+ERL_COMPILE_FLAGS += -pa $(ERL_TOP)/lib/et/ebin -I../include -Werror
# ----------------------------------------------------
# Special Build Targets
diff --git a/lib/et/src/et_collector.erl b/lib/et/src/et_collector.erl
index e05c67be60..1f60dee8ca 100644
--- a/lib/et/src/et_collector.erl
+++ b/lib/et/src/et_collector.erl
@@ -64,6 +64,8 @@
-export([init/1,terminate/2, code_change/3,
handle_call/3, handle_cast/2, handle_info/2]).
+-compile([{nowarn_deprecated_function,[{erlang,now,0}]}]).
+
-include("et_internal.hrl").
-include("../include/et.hrl").
diff --git a/lib/et/src/et_selector.erl b/lib/et/src/et_selector.erl
index c8e9c907b2..5497096377 100644
--- a/lib/et/src/et_selector.erl
+++ b/lib/et/src/et_selector.erl
@@ -28,6 +28,8 @@
parse_event/2
]).
+-compile([{nowarn_deprecated_function,[{erlang,now,0}]}]).
+
-include("../include/et.hrl").
%%----------------------------------------------------------------------
diff --git a/lib/eunit/doc/overview.edoc b/lib/eunit/doc/overview.edoc
index 872a017440..df716cdeea 100644
--- a/lib/eunit/doc/overview.edoc
+++ b/lib/eunit/doc/overview.edoc
@@ -569,6 +569,9 @@ Examples:
```?assertMatch({found, {fred, _}}, lookup(bloggs, Table))'''
```?assertMatch([X|_] when X > 0, binary_to_list(B))'''
</dd>
+<dt>`assertNotMatch(GuardedPattern, Expr)'</dt>
+<dd>The inverse case of assertMatch, for convenience.
+</dd>
<dt>`assertEqual(Expect, Expr)'</dt>
<dd>Evaluates the expressions `Expect' and `Expr' and compares the
results for equality, if testing is enabled. If the values are not
@@ -583,6 +586,9 @@ Examples:
```?assertEqual("b" ++ "a", lists:reverse("ab"))'''
```?assertEqual(foo(X), bar(Y))'''
</dd>
+<dt>`assertNotEqual(Unexpected, Expr)'</dt>
+<dd>The inverse case of assertEqual, for convenience.
+</dd>
<dt>`assertException(ClassPattern, TermPattern, Expr)'</dt>
<dt>`assertError(TermPattern, Expr)'</dt>
<dt>`assertExit(TermPattern, Expr)'</dt>
diff --git a/lib/eunit/include/eunit.hrl b/lib/eunit/include/eunit.hrl
index 9e8d34567a..53d291430d 100644
--- a/lib/eunit/include/eunit.hrl
+++ b/lib/eunit/include/eunit.hrl
@@ -414,7 +414,7 @@
-else.
-define(debugMsg(S),
begin
- io:fwrite(user, <<"~s:~w:~w: ~s\n">>,
+ io:fwrite(user, <<"~ts:~w:~w: ~ts\n">>,
[?FILE, ?LINE, self(), S]),
ok
end).
@@ -423,7 +423,7 @@
-define(debugVal(E),
begin
((fun (__V) ->
- ?debugFmt(<<"~s = ~P">>, [(??E), __V, 15]),
+ ?debugFmt(<<"~ts = ~tP">>, [(??E), __V, 15]),
__V
end)(E))
end).
@@ -433,7 +433,7 @@
{__T0, _} = statistics(wall_clock),
__V = (E),
{__T1, _} = statistics(wall_clock),
- ?debugFmt(<<"~s: ~.3f s">>, [(S), (__T1-__T0)/1000]),
+ ?debugFmt(<<"~ts: ~.3f s">>, [(S), (__T1-__T0)/1000]),
__V
end)())
end).
diff --git a/lib/eunit/src/eunit.app.src b/lib/eunit/src/eunit.app.src
index 7a3978e200..b4ff6c9242 100644
--- a/lib/eunit/src/eunit.app.src
+++ b/lib/eunit/src/eunit.app.src
@@ -19,4 +19,4 @@
{registered,[]},
{applications, [kernel,stdlib]},
{env, []},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}]}.
+ {runtime_dependencies, ["stdlib-2.5","kernel-3.0","erts-6.0"]}]}.
diff --git a/lib/eunit/src/eunit.erl b/lib/eunit/src/eunit.erl
index 9c589dfa86..fbfd123c43 100644
--- a/lib/eunit/src/eunit.erl
+++ b/lib/eunit/src/eunit.erl
@@ -231,7 +231,7 @@ event_logger(LogFile) ->
event_logger_loop(Reference, FD) ->
receive
{status, _Id, _Info}=Msg ->
- io:fwrite(FD, "~p.\n", [Msg]),
+ io:fwrite(FD, "~tp.\n", [Msg]),
event_logger_loop(Reference, FD);
{stop, Reference, _ReplyTo} ->
%% no need to reply, just exit
diff --git a/lib/eunit/src/eunit_autoexport.erl b/lib/eunit/src/eunit_autoexport.erl
index 36ae3b71d7..7bb78f5ea8 100644
--- a/lib/eunit/src/eunit_autoexport.erl
+++ b/lib/eunit/src/eunit_autoexport.erl
@@ -79,11 +79,12 @@ rewrite([{function,_,test,0,_}=F | Fs], As, Module, _Test) ->
rewrite([F | Fs], As, Module, Test) ->
rewrite(Fs, [F | As], Module, Test);
rewrite([], As, Module, Test) ->
+ L = erl_anno:new(0),
{if Test ->
- [{function,0,test,0,
- [{clause,0,[],[],
- [{call,0,{remote,0,{atom,0,eunit},{atom,0,test}},
- [{atom,0,Module}]}]}]}
+ [{function,L,test,0,
+ [{clause,L,[],[],
+ [{call,L,{remote,L,{atom,L,eunit},{atom,L,test}},
+ [{atom,L,Module}]}]}]}
| As];
true ->
As
@@ -96,4 +97,4 @@ module_decl(Name, M, Fs, Exports) ->
Es = if Test -> [{test,0} | Exports];
true -> Exports
end,
- [M, {attribute,0,export,Es} | lists:reverse(Fs1)].
+ [M, {attribute,erl_anno:new(0),export,Es} | lists:reverse(Fs1)].
diff --git a/lib/eunit/src/eunit_data.erl b/lib/eunit/src/eunit_data.erl
index cbbc6fbc15..8b53a3681d 100644
--- a/lib/eunit/src/eunit_data.erl
+++ b/lib/eunit/src/eunit_data.erl
@@ -391,7 +391,7 @@ parse({with, X, As}=T) when is_list(As) ->
parse({S, T1} = T) when is_list(S) ->
case eunit_lib:is_string(S) of
true ->
- group(#group{tests = T1, desc = list_to_binary(S)});
+ group(#group{tests = T1, desc = unicode:characters_to_binary(S)});
false ->
bad_test(T)
end;
diff --git a/lib/eunit/src/eunit_internal.hrl b/lib/eunit/src/eunit_internal.hrl
index 92694ec39b..8e1e27811f 100644
--- a/lib/eunit/src/eunit_internal.hrl
+++ b/lib/eunit/src/eunit_internal.hrl
@@ -14,8 +14,8 @@
-define(DEFAULT_MODULE_WRAPPER_NAME, eunit_wrapper_).
-ifdef(DEBUG).
--define(debugmsg(S),io:fwrite("\n* ~s: ~s\n", [?MODULE,S])).
--define(debugmsg1(S,As),io:fwrite("\n* ~s: " ++ S ++ "\n", [?MODULE] ++ As)).
+-define(debugmsg(S),io:fwrite("\n* ~ts: ~ts\n", [?MODULE,S])).
+-define(debugmsg1(S,As),io:fwrite("\n* ~ts: " ++ S ++ "\n", [?MODULE] ++ As)).
-else.
-define(debugmsg(S),ok).
-define(debugmsg1(S,As),ok).
diff --git a/lib/eunit/src/eunit_lib.erl b/lib/eunit/src/eunit_lib.erl
index 40bae93298..d8f98cffa5 100644
--- a/lib/eunit/src/eunit_lib.erl
+++ b/lib/eunit/src/eunit_lib.erl
@@ -57,7 +57,7 @@ format_exception({Class,Term,Trace}, Depth)
when is_atom(Class), is_list(Trace) ->
case is_stacktrace(Trace) of
true ->
- io_lib:format("~s**~w:~s",
+ io_lib:format("~ts**~w:~ts",
[format_stacktrace(Trace), Class,
format_term(Term, Depth)]);
false ->
@@ -67,11 +67,11 @@ format_exception(Term, Depth) ->
format_term(Term, Depth).
format_term(Term, Depth) ->
- io_lib:format("~P\n", [Term, Depth]).
+ io_lib:format("~tP\n", [Term, Depth]).
format_exit_term(Term) ->
{Reason, Trace} = analyze_exit_term(Term),
- io_lib:format("~P~s", [Reason, 15, Trace]).
+ io_lib:format("~tP~ts", [Reason, 15, Trace]).
analyze_exit_term({Reason, [_|_]=Trace}=Term) ->
case is_stacktrace(Trace) of
@@ -102,7 +102,7 @@ format_stacktrace(Trace) ->
format_stacktrace(Trace, "in function", "in call from").
format_stacktrace([{M,F,A,L}|Fs], Pre, Pre1) when is_integer(A) ->
- [io_lib:fwrite("~s ~w:~w/~w~s\n",
+ [io_lib:fwrite("~ts ~w:~w/~w~ts\n",
[Pre, M, F, A, format_stacktrace_location(L)])
| format_stacktrace(Fs, Pre1, Pre1)];
format_stacktrace([{M,F,As,L}|Fs], Pre, Pre1) when is_list(As) ->
@@ -110,15 +110,15 @@ format_stacktrace([{M,F,As,L}|Fs], Pre, Pre1) when is_list(As) ->
C = case is_op(M,F,A) of
true when A =:= 1 ->
[A1] = As,
- io_lib:fwrite("~s ~s", [F,format_arg(A1)]);
+ io_lib:fwrite("~ts ~ts", [F,format_arg(A1)]);
true when A =:= 2 ->
[A1, A2] = As,
- io_lib:fwrite("~s ~s ~s",
+ io_lib:fwrite("~ts ~ts ~ts",
[format_arg(A1),F,format_arg(A2)]);
false ->
- io_lib:fwrite("~w(~s)", [F,format_arglist(As)])
+ io_lib:fwrite("~w(~ts)", [F,format_arglist(As)])
end,
- [io_lib:fwrite("~s ~w:~w/~w~s\n called as ~s\n",
+ [io_lib:fwrite("~ts ~w:~w/~w~ts\n called as ~ts\n",
[Pre,M,F,A,format_stacktrace_location(L),C])
| format_stacktrace(Fs,Pre1,Pre1)];
format_stacktrace([{M,F,As}|Fs], Pre, Pre1) ->
@@ -130,18 +130,18 @@ format_stacktrace_location(Location) ->
File = proplists:get_value(file, Location),
Line = proplists:get_value(line, Location),
if File =/= undefined, Line =/= undefined ->
- io_lib:format(" (~s, line ~w)", [File, Line]);
+ io_lib:format(" (~ts, line ~w)", [File, Line]);
true ->
""
end.
format_arg(A) ->
- io_lib:format("~P",[A,15]).
+ io_lib:format("~tP",[A,15]).
format_arglist([A]) ->
format_arg(A);
format_arglist([A|As]) ->
- [io_lib:format("~P,",[A,15]) | format_arglist(As)];
+ [io_lib:format("~tP,",[A,15]) | format_arglist(As)];
format_arglist([]) ->
"".
@@ -155,41 +155,41 @@ is_op(_M, _F, _A) ->
false.
format_error({bad_test, Term}) ->
- error_msg("bad test descriptor", "~P", [Term, 15]);
+ error_msg("bad test descriptor", "~tP", [Term, 15]);
format_error({bad_generator, {{M,F,A}, Term}}) ->
error_msg(io_lib:format("result from generator ~w:~w/~w is not a test",
[M,F,A]),
- "~P", [Term, 15]);
+ "~tP", [Term, 15]);
format_error({generator_failed, {{M,F,A}, Exception}}) ->
error_msg(io_lib:format("test generator ~w:~w/~w failed",[M,F,A]),
- "~s", [format_exception(Exception)]);
+ "~ts", [format_exception(Exception)]);
format_error({no_such_function, {M,F,A}})
when is_atom(M), is_atom(F), is_integer(A) ->
error_msg(io_lib:format("no such function: ~w:~w/~w", [M,F,A]),
"", []);
format_error({module_not_found, M}) ->
- error_msg("test module not found", "~p", [M]);
+ error_msg("test module not found", "~tp", [M]);
format_error({application_not_found, A}) when is_atom(A) ->
error_msg("application not found", "~w", [A]);
format_error({file_read_error, {_R, Msg, F}}) ->
- error_msg("error reading file", "~s: ~s", [Msg, F]);
+ error_msg("error reading file", "~ts: ~ts", [Msg, F]);
format_error({setup_failed, Exception}) ->
- error_msg("context setup failed", "~s",
+ error_msg("context setup failed", "~ts",
[format_exception(Exception)]);
format_error({cleanup_failed, Exception}) ->
- error_msg("context cleanup failed", "~s",
+ error_msg("context cleanup failed", "~ts",
[format_exception(Exception)]);
format_error({{bad_instantiator, {{M,F,A}, Term}}, _DummyException}) ->
error_msg(io_lib:format("result from instantiator ~w:~w/~w is not a test",
[M,F,A]),
- "~P", [Term, 15]);
+ "~tP", [Term, 15]);
format_error({instantiation_failed, Exception}) ->
- error_msg("instantiation of subtests failed", "~s",
+ error_msg("instantiation of subtests failed", "~ts",
[format_exception(Exception)]).
error_msg(Title, Fmt, Args) ->
Msg = io_lib:format("**"++Fmt, Args), % gets indentation right
- io_lib:fwrite("*** ~s ***\n~s\n\n", [Title, Msg]).
+ io_lib:fwrite("*** ~ts ***\n~ts\n\n", [Title, Msg]).
-ifdef(TEST).
format_exception_test_() ->
diff --git a/lib/eunit/src/eunit_proc.erl b/lib/eunit/src/eunit_proc.erl
index 03d1a18321..98ae31d54b 100644
--- a/lib/eunit/src/eunit_proc.erl
+++ b/lib/eunit/src/eunit_proc.erl
@@ -230,7 +230,7 @@ insulator_wait(Child, Parent, Buf, St) ->
message_super(Id, {progress, 'begin', {Type, Data}}, St),
insulator_wait(Child, Parent, [[] | Buf], St);
{child, Child, Id, {'end', Status, Time}} ->
- Data = [{time, Time}, {output, buffer_to_binary(hd(Buf))}],
+ Data = [{time, Time}, {output, lists:reverse(hd(Buf))}],
message_super(Id, {progress, 'end', {Status, Data}}, St),
insulator_wait(Child, Parent, tl(Buf), St);
{child, Child, Id, {skipped, Reason}} ->
@@ -272,9 +272,6 @@ kill_task(Child, St) ->
exit(Child, kill),
terminate_insulator(St).
-buffer_to_binary([B]) when is_binary(B) -> B; % avoid unnecessary copying
-buffer_to_binary(Buf) -> list_to_binary(lists:reverse(Buf)).
-
%% Unlinking before exit avoids polluting the parent process with exit
%% signals from the insulator. The child process is already dead here.
@@ -597,7 +594,7 @@ group_leader_loop(Runner, Wait, Buf) ->
%% no more messages and nothing to wait for; we ought to
%% have collected all immediately pending output now
process_flag(priority, normal),
- Runner ! {self(), buffer_to_binary(Buf)}
+ Runner ! {self(), lists:reverse(Buf)}
end.
group_leader_sync(G) ->
diff --git a/lib/eunit/src/eunit_surefire.erl b/lib/eunit/src/eunit_surefire.erl
index c0931588ae..f3e58a3d1c 100644
--- a/lib/eunit/src/eunit_surefire.erl
+++ b/lib/eunit/src/eunit_surefire.erl
@@ -279,7 +279,7 @@ write_report_to(TestSuite, FileDescriptor) ->
%% Write the XML header.
%% ----------------------------------------------------------------------------
write_header(FileDescriptor) ->
- file:write(FileDescriptor, [<<"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>">>, ?NEWLINE]).
+ io:format(FileDescriptor, "~ts~ts", [<<"<?xml version=\"1.0\" encoding=\"UTF-8\" ?>">>, ?NEWLINE]).
%% ----------------------------------------------------------------------------
%% Write the testsuite start tag, with attributes describing the statistics
@@ -303,7 +303,7 @@ write_start_tag(
<<"\" time=\"">>, format_time(Time),
<<"\" name=\"">>, escape_attr(Name),
<<"\">">>, ?NEWLINE],
- file:write(FileDescriptor, StartTag).
+ io:format(FileDescriptor, "~ts", [StartTag]).
%% ----------------------------------------------------------------------------
%% Recursive function to write the test cases.
@@ -317,7 +317,7 @@ write_testcases([TestCase| Tail], FileDescriptor) ->
%% Write the testsuite end tag.
%% ----------------------------------------------------------------------------
write_end_tag(FileDescriptor) ->
- file:write(FileDescriptor, [<<"</testsuite>">>, ?NEWLINE]).
+ io:format(FileDescriptor, "~ts~ts", [<<"</testsuite>">>, ?NEWLINE]).
%% ----------------------------------------------------------------------------
%% Write a test case, as a testcase tag.
@@ -343,7 +343,7 @@ write_testcase(
{ok, <<>>} -> [<<"/>">>, ?NEWLINE];
_ -> [<<">">>, ?NEWLINE, format_testcase_result(Result), format_testcase_output(Output), ?INDENT, <<"</testcase>">>, ?NEWLINE]
end,
- file:write(FileDescriptor, [StartTag, ContentAndEndTag]).
+ io:format(FileDescriptor, "~ts~ts", [StartTag, ContentAndEndTag]).
%% ----------------------------------------------------------------------------
%% Format the result of the test.
@@ -426,7 +426,7 @@ escape_suitename([Char | Tail], Acc) -> escape_suitename(Tail, [Char | Acc]).
%% Replace < with &lt;, > with &gt; and & with &amp;
%% ----------------------------------------------------------------------------
escape_text(Text) when is_binary(Text) -> escape_text(binary_to_list(Text));
-escape_text(Text) -> escape_xml(lists:flatten(Text), [], false).
+escape_text(Text) -> escape_xml(to_utf8(lists:flatten(Text)), [], false).
%% ----------------------------------------------------------------------------
@@ -434,7 +434,7 @@ escape_text(Text) -> escape_xml(lists:flatten(Text), [], false).
%% Replace < with &lt;, > with &gt; and & with &amp;
%% ----------------------------------------------------------------------------
escape_attr(Text) when is_binary(Text) -> escape_attr(binary_to_list(Text));
-escape_attr(Text) -> escape_xml(lists:flatten(Text), [], true).
+escape_attr(Text) -> escape_xml(to_utf8(lists:flatten(Text)), [], true).
escape_xml([], Acc, _ForAttr) -> lists:reverse(Acc);
escape_xml([$< | Tail], Acc, ForAttr) -> escape_xml(Tail, [$;, $t, $l, $& | Acc], ForAttr);
@@ -442,3 +442,17 @@ escape_xml([$> | Tail], Acc, ForAttr) -> escape_xml(Tail, [$;, $t, $g, $& | Acc]
escape_xml([$& | Tail], Acc, ForAttr) -> escape_xml(Tail, [$;, $p, $m, $a, $& | Acc], ForAttr);
escape_xml([$" | Tail], Acc, true) -> escape_xml(Tail, [$;, $t, $o, $u, $q, $& | Acc], true); % "
escape_xml([Char | Tail], Acc, ForAttr) when is_integer(Char) -> escape_xml(Tail, [Char | Acc], ForAttr).
+
+%% the input may be utf8 or latin1; the resulting list is unicode
+to_utf8(Desc) when is_binary(Desc) ->
+ case unicode:characters_to_list(Desc) of
+ {_,_,_} -> unicode:characters_to_list(Desc, latin1);
+ X -> X
+ end;
+to_utf8(Desc) when is_list(Desc) ->
+ try
+ to_utf8(list_to_binary(Desc))
+ catch
+ _:_ ->
+ Desc
+ end.
diff --git a/lib/eunit/src/eunit_tty.erl b/lib/eunit/src/eunit_tty.erl
index f21b2da3d3..699d2adaca 100644
--- a/lib/eunit/src/eunit_tty.erl
+++ b/lib/eunit/src/eunit_tty.erl
@@ -83,7 +83,7 @@ terminate({ok, Data}, St) ->
sync_end(error)
end;
terminate({error, Reason}, _St) ->
- fwrite("Internal error: ~P.\n", [Reason, 25]),
+ fwrite("Internal error: ~tP.\n", [Reason, 25]),
sync_end(error).
sync_end(Result) ->
@@ -177,7 +177,7 @@ indent(_N) ->
print_group_start(I, Desc) ->
indent(I),
- fwrite("~s\n", [Desc]).
+ fwrite("~ts\n", [Desc]).
print_group_end(I, Time) ->
if Time > 0 ->
@@ -195,13 +195,13 @@ print_test_begin(I, Data) ->
true -> io_lib:fwrite("~w:", [Line])
end,
D = if Desc =:= "" ; Desc =:= undefined -> "";
- true -> io_lib:fwrite(" (~s)", [Desc])
+ true -> io_lib:fwrite(" (~ts)", [Desc])
end,
case proplists:get_value(source, Data) of
{Module, Name, _Arity} ->
- fwrite("~s:~s ~s~s...", [Module, L, Name, D]);
+ fwrite("~ts:~ts ~ts~ts...", [Module, L, Name, D]);
_ ->
- fwrite("~s~s...", [L, D])
+ fwrite("~ts~ts...", [L, D])
end.
print_test_end(Data) ->
@@ -209,21 +209,21 @@ print_test_end(Data) ->
T = if Time > 0 -> io_lib:fwrite("[~.3f s] ", [Time/1000]);
true -> ""
end,
- fwrite("~sok\n", [T]).
+ fwrite("~tsok\n", [T]).
print_test_error({error, Exception}, Data) ->
Output = proplists:get_value(output, Data),
- fwrite("*failed*\n~s", [eunit_lib:format_exception(Exception)]),
+ fwrite("*failed*\n~ts", [eunit_lib:format_exception(Exception)]),
case Output of
<<>> ->
fwrite("\n\n");
<<Text:800/binary, _:1/binary, _/binary>> ->
- fwrite(" output:<<\"~s\">>...\n\n", [Text]);
+ fwrite(" output:<<\"~ts\">>...\n\n", [Text]);
_ ->
- fwrite(" output:<<\"~s\">>\n\n", [Output])
+ fwrite(" output:<<\"~ts\">>\n\n", [Output])
end;
print_test_error({skipped, Reason}, _) ->
- fwrite("*did not run*\n::~s\n", [format_skipped(Reason)]).
+ fwrite("*did not run*\n::~ts\n", [format_skipped(Reason)]).
format_skipped({module_not_found, M}) ->
io_lib:fwrite("missing module: ~w", [M]);
@@ -244,12 +244,12 @@ format_cancel(undefined) ->
format_cancel(timeout) ->
"*timed out*\n";
format_cancel({startup, Reason}) ->
- io_lib:fwrite("*could not start test process*\n::~P\n\n",
+ io_lib:fwrite("*could not start test process*\n::~tP\n\n",
[Reason, 15]);
format_cancel({blame, _SubId}) ->
"*cancelled because of subtask*\n";
format_cancel({exit, Reason}) ->
- io_lib:fwrite("*unexpected termination of test process*\n::~P\n\n",
+ io_lib:fwrite("*unexpected termination of test process*\n::~tP\n\n",
[Reason, 15]);
format_cancel({abort, Reason}) ->
eunit_lib:format_error(Reason).
diff --git a/lib/eunit/test/Makefile b/lib/eunit/test/Makefile
index e4ddf4e42c..b0dde64c67 100644
--- a/lib/eunit/test/Makefile
+++ b/lib/eunit/test/Makefile
@@ -20,7 +20,9 @@ include $(ERL_TOP)/make/target.mk
include $(ERL_TOP)/make/$(TARGET)/otp.mk
MODULES = \
- eunit_SUITE
+ eunit_SUITE \
+ tlatin \
+ tutf8
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/eunit/test/eunit_SUITE.erl b/lib/eunit/test/eunit_SUITE.erl
index d13dc73923..2ac6fafe5d 100644
--- a/lib/eunit/test/eunit_SUITE.erl
+++ b/lib/eunit/test/eunit_SUITE.erl
@@ -1,35 +1,35 @@
%%
%% %CopyrightBegin%
-%%
+%%
%% Copyright Ericsson AB 2010-2011. All Rights Reserved.
-%%
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at http://www.erlang.org/.
-%%
+%%
%% Software distributed under the License is distributed on an "AS IS"
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-module(eunit_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
- app_test/1,appup_test/1,eunit_test/1]).
-
+ app_test/1,appup_test/1,eunit_test/1,surefire_utf8_test/1,surefire_latin_test/1]).
+
-include_lib("common_test/include/ct.hrl").
suite() -> [{ct_hooks,[ts_install_cth]}].
-all() ->
- [app_test, appup_test, eunit_test].
+all() ->
+ [app_test, appup_test, eunit_test, surefire_utf8_test, surefire_latin_test].
-groups() ->
+groups() ->
[].
init_per_suite(Config) ->
@@ -54,3 +54,21 @@ eunit_test(Config) when is_list(Config) ->
ok = file:set_cwd(code:lib_dir(eunit)),
ok = eunit:test(eunit).
+surefire_latin_test(Config) when is_list(Config) ->
+ ok = file:set_cwd(proplists:get_value(priv_dir, Config, ".")),
+ check_surefire(tlatin),
+ ok.
+
+surefire_utf8_test(Config) when is_list(Config) ->
+ ok = file:set_cwd(proplists:get_value(priv_dir, Config, ".")),
+ check_surefire(tutf8),
+ ok.
+
+check_surefire(Module) ->
+ File = "TEST-"++atom_to_list(Module)++".xml",
+ file:delete(File),
+ % ignore test result, some fail on purpose
+ eunit:test(Module, [{report,{eunit_surefire,[{dir,"."}]}}]),
+ {ok, Bin} = file:read_file(File),
+ [_|_] = unicode:characters_to_list(Bin, unicode),
+ ok. \ No newline at end of file
diff --git a/lib/eunit/test/tlatin.erl b/lib/eunit/test/tlatin.erl
new file mode 100644
index 0000000000..a42e67d581
--- /dev/null
+++ b/lib/eunit/test/tlatin.erl
@@ -0,0 +1,15 @@
+% coding: latin-1
+
+-module(tlatin).
+
+-include_lib("eunit/include/eunit.hrl").
+
+'foo_�_test_'() ->
+ [
+ {"1�1", fun() -> io:format("1�1 ~s ~w",[<<"a�">>, 'Z�k']), io:format([128,64,255,255]), ?assert("g�"=="g�") end}
+ ,{<<"2�2">>, fun() -> io:format("2�2 ~s",[<<"b�">>]), io:format([128,64]), ?assert("g�"=="g�") end}
+ ,{<<"3�3"/utf8>>, fun() -> io:format("3�3 ~ts",[<<"c�"/utf8>>]), io:format([128,64]), ?assert("g�"=="g�") end}
+ ,{"1�1", fun() -> io:format("1�1 ~s ~w",[<<"a�">>,'Zb�d']), io:format([128,64,255,255]), ?assert("w�"=="w�") end}
+ ,{<<"2�2">>, fun() -> io:format("2�2 ~s",[<<"b�">>]), io:format([128,64]), ?assert("w�"=="w�") end}
+ ,{<<"3�3"/utf8>>, fun() -> io:format("3�3 ~ts",[<<"c�"/utf8>>]), io:format([128,64]), ?assert("w�"=="w�") end}
+ ].
diff --git a/lib/eunit/test/tutf8.erl b/lib/eunit/test/tutf8.erl
new file mode 100644
index 0000000000..c902f3ad18
--- /dev/null
+++ b/lib/eunit/test/tutf8.erl
@@ -0,0 +1,15 @@
+%% coding: utf-8
+
+-module(tutf8).
+
+-include_lib("eunit/include/eunit.hrl").
+
+'foo_ö_test_'() ->
+ [
+ {"1ö汉1", fun() -> io:format("1å汉1 ~s ~w",[<<"aö汉">>, 'Zök']), io:format([128,64,255,255]), ?assert("gö汉"=="gö汉") end}
+ ,{<<"2ö汉2">>, fun() -> io:format("2å汉2 ~s",[<<"bö汉">>]), io:format([128,64]), ?assert("gö汉"=="gö汉") end}
+ ,{<<"3ö汉3"/utf8>>, fun() -> io:format("3å汉3 ~ts",[<<"cö汉"/utf8>>]), io:format([128,64]), ?assert("gö汉"=="gö汉") end}
+ ,{"1ä汉1", fun() -> io:format("1ä汉1 ~s ~w",[<<"aä汉">>, 'Zbäd']), io:format([128,64,255,255]), ?assert("wå汉"=="wä汉") end}
+ ,{<<"2ä汉2">>, fun() -> io:format("2ä汉2 ~s",[<<"bä汉">>]), io:format([128,64]), ?assert("wå汉"=="wä汉") end}
+ ,{<<"3ä汉"/utf8>>, fun() -> io:format("3ä汉3 ~ts",[<<"cä汉"/utf8>>]), io:format([128,64]), ?assert("wå汉"=="wä汉") end}
+ ].
diff --git a/lib/eunit/vsn.mk b/lib/eunit/vsn.mk
index 8b489bdc04..b551ee6eb6 100644
--- a/lib/eunit/vsn.mk
+++ b/lib/eunit/vsn.mk
@@ -1 +1 @@
-EUNIT_VSN = 2.2.10
+EUNIT_VSN = 2.3
diff --git a/lib/hipe/cerl/cerl_pmatch.erl b/lib/hipe/cerl/cerl_pmatch.erl
index 3bc93e80dd..4f04b0a7ed 100644
--- a/lib/hipe/cerl/cerl_pmatch.erl
+++ b/lib/hipe/cerl/cerl_pmatch.erl
@@ -31,7 +31,7 @@
-module(cerl_pmatch).
--define(NO_UNUSED, true).
+%%-define(NO_UNUSED, true).
-export([clauses/2]).
-ifndef(NO_UNUSED).
@@ -59,6 +59,8 @@
%% @see transform/2
-ifndef(NO_UNUSED).
+-spec core_transform(cerl:c_module(), [_]) -> cerl:c_module().
+
core_transform(M, Opts) ->
cerl:to_records(transform(cerl:from_records(M), Opts)).
-endif. % NO_UNUSED
@@ -76,6 +78,8 @@ core_transform(M, Opts) ->
%% @see core_transform/2
-ifndef(NO_UNUSED).
+-spec transform(cerl:cerl(), [_]) -> cerl:cerl().
+
transform(M, _Opts) ->
expr(M, env__empty()).
-endif. % NO_UNUSED
@@ -109,7 +113,7 @@ transform(M, _Opts) ->
%% @see expr/2
%% @see transform/2
--spec clauses([cerl:cerl()], rec_env:environment()) ->
+-spec clauses([cerl:cerl(),...], rec_env:environment()) ->
{cerl:cerl(), [cerl:cerl()]}.
clauses(Cs, Env) ->
@@ -406,6 +410,8 @@ make_let(Vs, A, B) ->
%% @see rec_env
-ifndef(NO_UNUSED).
+-spec expr(cerl:cerl(), rec_env:environment()) -> cerl:cerl().
+
expr(E, Env) ->
case cerl:type(E) of
literal ->
diff --git a/lib/hipe/cerl/cerl_to_icode.erl b/lib/hipe/cerl/cerl_to_icode.erl
index 2645056be1..f98aaa12f3 100644
--- a/lib/hipe/cerl/cerl_to_icode.erl
+++ b/lib/hipe/cerl/cerl_to_icode.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -110,7 +110,7 @@
effect = false :: boolean(),
fail = [], % [] or fail-to label
class = expr :: 'expr' | 'guard',
- line = 0 :: erl_scan:line(), % current line number
+ line = 0 :: erl_anno:line(), % current line number
'receive' :: 'undefined' | #'receive'{}
}).
diff --git a/lib/hipe/cerl/erl_bif_types.erl b/lib/hipe/cerl/erl_bif_types.erl
index 74e93bf098..ee77d65932 100644
--- a/lib/hipe/cerl/erl_bif_types.erl
+++ b/lib/hipe/cerl/erl_bif_types.erl
@@ -1070,9 +1070,6 @@ type(hipe_bifs, find_na_or_make_stub, 2, Xs, Opaques) ->
type(hipe_bifs, fun_to_address, 1, Xs, Opaques) ->
strict(hipe_bifs, fun_to_address, 1, Xs,
fun (_) -> t_integer() end, Opaques);
-%% type(hipe_bifs, get_emu_address, 1, Xs, Opaques) ->
-%% strict(hipe_bifs, get_emu_address, 1, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, get_fe, 2, Xs, Opaques) ->
strict(hipe_bifs, get_fe, 2, Xs, fun (_) -> t_integer() end, Opaques);
type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
@@ -1081,9 +1078,6 @@ type(hipe_bifs, get_rts_param, 1, Xs, Opaques) ->
type(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs, Opaques) ->
strict(hipe_bifs, invalidate_funinfo_native_addresses, 1, Xs,
fun (_) -> t_nil() end, Opaques);
-%% type(hipe_bifs, make_native_stub, 2, Xs, Opaques) ->
-%% strict(hipe_bifs, make_native_stub, 2, Xs,
-%% fun (_) -> t_integer() end, Opaques); % address
type(hipe_bifs, mark_referred_from, 1, Xs, Opaques) ->
strict(hipe_bifs, mark_referred_from, 1, Xs,
fun (_) -> t_nil() end, Opaques);
@@ -1119,8 +1113,8 @@ type(hipe_bifs, set_native_address, 3, Xs, Opaques) ->
type(hipe_bifs, set_native_address_in_fe, 2, Xs, Opaques) ->
strict(hipe_bifs, set_native_address_in_fe, 2, Xs,
fun (_) -> t_atom('true') end, Opaques);
-type(hipe_bifs, system_crc, 1, Xs, Opaques) ->
- strict(hipe_bifs, system_crc, 1, Xs, fun (_) -> t_crc32() end, Opaques);
+type(hipe_bifs, system_crc, 0, _, _Opaques) ->
+ t_crc32();
type(hipe_bifs, term_to_word, 1, Xs, Opaques) ->
strict(hipe_bifs, term_to_word, 1, Xs,
fun (_) -> t_integer() end, Opaques);
@@ -2462,16 +2456,12 @@ arg_types(hipe_bifs, find_na_or_make_stub, 2) ->
[t_mfa(), t_boolean()];
arg_types(hipe_bifs, fun_to_address, 1) ->
[t_mfa()];
-%% arg_types(hipe_bifs, get_emu_address, 1) ->
-%% [t_mfa()];
arg_types(hipe_bifs, get_fe, 2) ->
[t_atom(), t_tuple([t_integer(), t_integer(), t_integer()])];
arg_types(hipe_bifs, get_rts_param, 1) ->
[t_fixnum()];
arg_types(hipe_bifs, invalidate_funinfo_native_addresses, 1) ->
[t_list(t_mfa())];
-%% arg_types(hipe_bifs, make_native_stub, 2) ->
-%% [t_integer(), t_arity()];
arg_types(hipe_bifs, mark_referred_from, 1) ->
[t_mfa()];
arg_types(hipe_bifs, merge_term, 1) ->
@@ -2500,8 +2490,8 @@ arg_types(hipe_bifs, set_native_address, 3) ->
[t_mfa(), t_integer(), t_boolean()];
arg_types(hipe_bifs, set_native_address_in_fe, 2) ->
[t_integer(), t_integer()];
-arg_types(hipe_bifs, system_crc, 1) ->
- [t_crc32()];
+arg_types(hipe_bifs, system_crc, 0) ->
+ [];
arg_types(hipe_bifs, term_to_word, 1) ->
[t_any()];
arg_types(hipe_bifs, update_code_size, 3) ->
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 5124e7238a..14335cf635 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -67,7 +67,6 @@
t_cons/2,
t_cons_hd/1, t_cons_hd/2,
t_cons_tl/1, t_cons_tl/2,
- t_constant/0,
t_contains_opaque/1, t_contains_opaque/2,
t_decorate_with_opaque/3,
t_elements/1,
@@ -78,10 +77,11 @@
t_non_neg_fixnum/0,
t_pos_fixnum/0,
t_float/0,
+ t_var_names/1,
t_form_to_string/1,
- t_from_form/1,
- t_from_form/2,
- t_from_form/3,
+ t_from_form/4,
+ t_from_form/5,
+ t_from_form_without_remote/2,
t_from_range/2,
t_from_range_unsafe/2,
t_from_term/1,
@@ -117,7 +117,6 @@
%% t_is_byte/1,
%% t_is_char/1,
t_is_cons/1, t_is_cons/2,
- t_is_constant/1,
t_is_equal/2,
t_is_fixnum/1,
t_is_float/1, t_is_float/2,
@@ -181,7 +180,6 @@
t_remote/3,
t_string/0,
t_struct_from_opaque/2,
- t_solve_remote/3,
t_subst/2,
t_subtract/2,
t_subtract_list/2,
@@ -248,6 +246,8 @@
%%
-define(REC_TYPE_LIMIT, 2).
+-define(EXPAND_DEPTH, 16).
+-define(EXPAND_LIMIT, 10000).
-define(TUPLE_TAG_LIMIT, 5).
-define(TUPLE_ARITY_LIMIT, 8).
@@ -366,7 +366,7 @@
-type record_key() :: {'record', atom()}.
-type type_key() :: {'type' | 'opaque', atom(), arity()}.
--type record_value() :: orddict:orddict(). % XXX. To be refined
+-type record_value() :: [{atom(), erl_parse:abstract_expr(), erl_type()}].
-type type_value() :: {module(), erl_type(), atom()}.
-type type_table() :: dict:dict(record_key(), record_value())
| dict:dict(type_key(), type_value()).
@@ -747,7 +747,7 @@ t_opaque_from_records(RecDict) ->
end
end, RecDict),
OpaqueTypeDict =
- dict:map(fun({opaque, Name, _Arity}, {Module, _Type, ArgNames}) ->
+ dict:map(fun({opaque, Name, _Arity}, {{Module, _Form, ArgNames}, _Type}) ->
%% Args = args_to_types(ArgNames),
%% List = lists:zip(ArgNames, Args),
%% TmpVarDict = dict:from_list(List),
@@ -808,134 +808,6 @@ is_remote(_) -> false.
-type mod_records() :: dict:dict(module(), type_table()).
--spec t_solve_remote(erl_type(), sets:set(mfa()), mod_records()) -> erl_type().
-
-t_solve_remote(Type, ExpTypes, Records) ->
- {RT, _RR} = t_solve_remote(Type, ExpTypes, Records, []),
- RT.
-
-t_solve_remote(?function(Domain, Range), ET, R, C) ->
- {RT1, RR1} = t_solve_remote(Domain, ET, R, C),
- {RT2, RR2} = t_solve_remote(Range, ET, R, C),
- {?function(RT1, RT2), RR1 ++ RR2};
-t_solve_remote(?list(Types, Term, Size), ET, R, C) ->
- {RT1, RR1} = t_solve_remote(Types, ET, R, C),
- {RT2, RR2} = t_solve_remote(Term, ET, R, C),
- {?list(RT1, RT2, Size), RR1 ++ RR2};
-t_solve_remote(?product(Types), ET, R, C) ->
- {RL, RR} = list_solve_remote(Types, ET, R, C),
- {?product(RL), RR};
-t_solve_remote(?opaque(Set), ET, R, C) ->
- List = ordsets:to_list(Set),
- {NewList, RR} = opaques_solve_remote(List, ET, R, C),
- {?opaque(ordsets:from_list(NewList)), RR};
-t_solve_remote(?tuple(?any, _, _) = T, _ET, _R, _C) -> {T, []};
-t_solve_remote(?tuple(Types, _Arity, _Tag), ET, R, C) ->
- {RL, RR} = list_solve_remote(Types, ET, R, C),
- {t_tuple(RL), RR};
-t_solve_remote(?tuple_set(Set), ET, R, C) ->
- {NewTuples, RR} = tuples_solve_remote(Set, ET, R, C),
- {t_sup(NewTuples), RR};
-t_solve_remote(?remote(Set), ET, R, C) ->
- RemoteList = ordsets:to_list(Set),
- {RL, RR} = list_solve_remote_type(RemoteList, ET, R, C),
- {t_sup(RL), RR};
-t_solve_remote(?union(List), ET, R, C) ->
- {RL, RR} = list_solve_remote(List, ET, R, C),
- {t_sup(RL), RR};
-t_solve_remote(T, _ET, _R, _C) -> {T, []}.
-
-t_solve_remote_type(#remote{mod = RemMod, name = Name, args = Args0} = RemType,
- ET, R, C) ->
- Args = lists:map(fun(A) ->
- {Arg, _} = t_solve_remote(A, ET, R, C),
- Arg
- end, Args0),
- ArgsLen = length(Args),
- case dict:find(RemMod, R) of
- error ->
- self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), []};
- {ok, RemDict} ->
- MFA = {RemMod, Name, ArgsLen},
- case sets:is_element(MFA, ET) of
- true ->
- case lookup_type(Name, ArgsLen, RemDict) of
- {type, {_Mod, Type, ArgNames}} ->
- {NewType, NewCycle, NewRR} =
- case can_unfold_more(RemType, C) of
- true ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false ->
- {t_any(), C, [RemType]}
- end,
- {RT, RR} = t_solve_remote(NewType, ET, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(RT, ?REC_TYPE_LIMIT);
- false -> RT
- end,
- {RT1, RetRR};
- {opaque, {Mod, Type, ArgNames}} ->
- List = lists:zip(ArgNames, Args),
- TmpVarDict = dict:from_list(List),
- {Rep, NewCycle, NewRR} =
- case can_unfold_more(RemType, C) of
- true ->
- {t_from_form(Type, RemDict, TmpVarDict), [RemType|C], []};
- false ->
- {t_any(), C, [RemType]}
- end,
- {NewRep, RR} = t_solve_remote(Rep, ET, R, NewCycle),
- RetRR = NewRR ++ RR,
- RT1 =
- case lists:member(RemType, RetRR) of
- true -> t_limit(NewRep, ?REC_TYPE_LIMIT);
- false -> NewRep
- end,
- {skip_opaque_alias(RT1, Mod, Name, Args), RetRR};
- error ->
- Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
- [RemMod, Name]),
- throw({error, Msg})
- end;
- false ->
- self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), []}
- end
- end.
-
-list_solve_remote([], _ET, _R, _C) ->
- {[], []};
-list_solve_remote([Type|Types], ET, R, C) ->
- {RT, RR1} = t_solve_remote(Type, ET, R, C),
- {RL, RR2} = list_solve_remote(Types, ET, R, C),
- {[RT|RL], RR1 ++ RR2}.
-
-list_solve_remote_type([], _ET, _R, _C) ->
- {[], []};
-list_solve_remote_type([Type|Types], ET, R, C) ->
- {RT, RR1} = t_solve_remote_type(Type, ET, R, C),
- {RL, RR2} = list_solve_remote_type(Types, ET, R, C),
- {[RT|RL], RR1 ++ RR2}.
-
-opaques_solve_remote([], _ET, _R, _C) ->
- {[], []};
-opaques_solve_remote([#opaque{struct = Struct} = Remote|Tail], ET, R, C) ->
- {RT, RR1} = t_solve_remote(Struct, ET, R, C),
- {LOp, RR2} = opaques_solve_remote(Tail, ET, R, C),
- {[Remote#opaque{struct = RT}|LOp], RR1 ++ RR2}.
-
-tuples_solve_remote([], _ET, _R, _C) ->
- {[], []};
-tuples_solve_remote([{_Sz, Tuples}|Tail], ET, R, C) ->
- {RL, RR1} = list_solve_remote(Tuples, ET, R, C),
- {LSzTpls, RR2} = tuples_solve_remote(Tail, ET, R, C),
- {RL ++ LSzTpls, RR1 ++ RR2}.
-
%%-----------------------------------------------------------------------------
%% Unit type. Signals non termination.
%%
@@ -1874,17 +1746,6 @@ is_tuple1(_) -> false.
t_bitstrlist() ->
t_iolist(1, t_bitstr()).
-%% XXX. To be removed.
--spec t_constant() -> erl_type().
-
-t_constant() ->
- t_sup([t_number(), t_identifier(), t_atom(), t_fun(), t_binary()]).
-
--spec t_is_constant(erl_type()) -> boolean().
-
-t_is_constant(X) ->
- t_is_subtype(X, t_constant()).
-
-spec t_arity() -> erl_type().
t_arity() ->
@@ -2264,14 +2125,19 @@ expand_range_from_set(Range = ?int_range(From, To), Set) ->
-spec t_sup([erl_type()]) -> erl_type().
-t_sup([?any|_]) ->
- ?any;
-t_sup([H1, H2|T]) ->
- t_sup([t_sup(H1, H2)|T]);
-t_sup([H]) ->
- subst_all_vars_to_any(H);
-t_sup([]) ->
- ?none.
+t_sup([]) -> ?none;
+t_sup(Ts) ->
+ case lists:any(fun is_any/1, Ts) of
+ true -> ?any;
+ false ->
+ t_sup1(Ts, [])
+ end.
+
+t_sup1([H1, H2|T], L) ->
+ t_sup1(T, [t_sup(H1, H2)|L]);
+t_sup1([T], []) -> subst_all_vars_to_any(T);
+t_sup1(Ts, L) ->
+ t_sup1(Ts++L, []).
-spec t_sup(erl_type(), erl_type()) -> erl_type().
@@ -2757,15 +2623,19 @@ inf_collect(_T1, [], _Opaques, OpL) ->
combine(S, T1, T2) ->
#opaque{mod = Mod1, name = Name1, args = Args1} = T1,
#opaque{mod = Mod2, name = Name2, args = Args2} = T2,
+ Comb1 = comb(Mod1, Name1, Args1, S, T1),
case is_same_type_name({Mod1, Name1, Args1}, {Mod2, Name2, Args2}) of
- true -> [comb(Mod1, Name1, Args1, S, T1)];
- false -> [comb(Mod1, Name1, Args1, S, T1), comb(Mod2, Name2, Args2, S, T2)]
+ true -> Comb1;
+ false -> Comb1 ++ comb(Mod2, Name2, Args2, S, T2)
end.
comb(Mod, Name, Args, S, T) ->
case is_same_name(Mod, Name, Args, S) of
- true -> S;
- false -> T#opaque{struct = S}
+ true ->
+ ?opaque(Set) = S,
+ Set;
+ false ->
+ [T#opaque{struct = S}]
end.
is_same_name(Mod1, Name1, Args1,
@@ -3089,12 +2959,12 @@ t_subst_aux(T, _VarMap) ->
subst_all_remote(Type0, Substitute) ->
Map =
fun(Type) ->
- case erl_types:t_is_remote(Type) of
+ case t_is_remote(Type) of
true -> Substitute;
false -> Type
end
end,
- erl_types:t_map(Map, Type0).
+ t_map(Map, Type0).
%%-----------------------------------------------------------------------------
%% Unification
@@ -3776,7 +3646,7 @@ t_abstract_records(?tuple(Elements, Arity, ?atom(_) = Tag), RecDict) ->
[TagAtom] = atom_vals(Tag),
case lookup_record(TagAtom, Arity - 1, RecDict) of
error -> t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
- {ok, Fields} -> t_tuple([Tag|[T || {_Name, T} <- Fields]])
+ {ok, Fields} -> t_tuple([Tag|[T || {_Name, _Abstr, T} <- Fields]])
end;
t_abstract_records(?tuple(Elements, _Arity, _Tag), RecDict) ->
t_tuple([t_abstract_records(E, RecDict) || E <- Elements]);
@@ -3997,7 +3867,8 @@ record_to_string(Tag, [_|Fields], FieldNames, RecDict) ->
FieldStrings = record_fields_to_string(Fields, FieldNames, RecDict, []),
"#" ++ atom_to_string(Tag) ++ "{" ++ string:join(FieldStrings, ",") ++ "}".
-record_fields_to_string([F|Fs], [{FName, _DefType}|FDefs], RecDict, Acc) ->
+record_fields_to_string([F|Fs], [{FName, _Abstr, _DefType}|FDefs],
+ RecDict, Acc) ->
NewAcc =
case t_is_equal(F, t_any()) orelse t_is_any_atom('undefined', F) of
true -> Acc;
@@ -4023,7 +3894,7 @@ record_field_diffs_to_string(?tuple([_|Fs], Arity, Tag), RecDict) ->
FieldDiffs = field_diffs(Fs, FieldNames, RecDict, []),
string:join(FieldDiffs, " and ").
-field_diffs([F|Fs], [{FName, DefType}|FDefs], RecDict, Acc) ->
+field_diffs([F|Fs], [{FName, _Abstr, DefType}|FDefs], RecDict, Acc) ->
%% Don't care about opaqueness for now.
NewAcc =
case not t_is_none(t_inf(F, DefType)) of
@@ -4071,357 +3942,476 @@ mod_name(Mod, Name) ->
%%
%%=============================================================================
--spec t_from_form(parse_form()) -> erl_type().
+-type type_names() :: [type_key() | record_key()].
-t_from_form(Form) ->
- t_from_form(Form, dict:new()).
+-spec t_from_form(parse_form(), sets:set(mfa()),
+ module(), mod_records()) -> erl_type().
--spec t_from_form(parse_form(), type_table()) -> erl_type().
+t_from_form(Form, ExpTypes, Module, RecDict) ->
+ t_from_form(Form, ExpTypes, Module, RecDict, dict:new()).
-t_from_form(Form, RecDict) ->
- t_from_form(Form, RecDict, dict:new()).
+-spec t_from_form(parse_form(), sets:set(mfa()),
+ module(), mod_records(), var_table()) -> erl_type().
--spec t_from_form(parse_form(), type_table(), var_table()) -> erl_type().
+t_from_form(Form, ExpTypes, Module, RecDict, VarDict) ->
+ {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, VarDict),
+ T.
+
+%% Replace external types with with none().
+-spec t_from_form_without_remote(parse_form(), type_table()) -> erl_type().
-t_from_form(Form, RecDict, VarDict) ->
- {T, _R} = t_from_form(Form, [], RecDict, VarDict),
+t_from_form_without_remote(Form, TypeTable) ->
+ Module = mod,
+ RecDict = dict:from_list([{Module, TypeTable}]),
+ ExpTypes = replace_by_none,
+ {T, _} = t_from_form1(Form, [], ExpTypes, Module, RecDict, dict:new()),
T.
--type type_names() :: [type_key() | record_key()].
+%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
+%% EXPAND_LIMIT is used for limiting the size of types by
+%% limiting the number of elements of lists within one type form.
+%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the
+%% types balanced (unions will otherwise collapse to any()) by limiting
+%% the depth the same way as t_limit/2 does.
--spec t_from_form(parse_form(), type_names(), type_table(), var_table()) ->
- {erl_type(), type_names()}.
+-type expand_limit() :: integer().
-t_from_form({var, _L, '_'}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({var, _L, Name}, _TypeNames, _RecDict, VarDict) ->
- case dict:find(Name, VarDict) of
- error -> {t_var(Name), []};
- {ok, Val} -> {Val, []}
+-type expand_depth() :: integer().
+
+t_from_form1(Form, TypeNames, ET, M, MR, V) ->
+ t_from_form1(Form, TypeNames, ET, M, MR, V, ?EXPAND_DEPTH).
+
+t_from_form1(Form, TypeNames, ET, M, MR, V, D) ->
+ L = ?EXPAND_LIMIT,
+ {T, L1} = t_from_form(Form, TypeNames, ET, M, MR, V, D, L),
+ if
+ L1 =< 0, D > 1 ->
+ D1 = D div 2,
+ t_from_form1(Form, TypeNames, ET, M, MR, V, D1);
+ true ->
+ {T, L1}
+ end.
+
+-spec t_from_form(parse_form(), type_names(),
+ sets:set(mfa()) | 'replace_by_none',
+ module(), mod_records(), var_table(),
+ expand_depth(), expand_limit())
+ -> {erl_type(), expand_limit()}.
+
+%% If there is something wrong with parse_form()
+%% throw({error, io_lib:chars()} is called;
+%% for unknown remote types
+%% self() ! {self(), ext_types, {RemMod, Name, ArgsLen}}
+%% is called, unless 'replace_by_none' is given.
+%%
+%% It is assumed that M can be found in MR.
+
+t_from_form(_, _TypeNames, _ET, _M, _MR, _V, D, L) when D =< 0 ; L =< 0 ->
+ {t_any(), L};
+t_from_form({var, _L, '_'}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({var, _L, Name}, _TypeNames, _ET, _M, _MR, V, _D, L) ->
+ case dict:find(Name, V) of
+ error -> {t_var(Name), L};
+ {ok, Val} -> {Val, L}
end;
-t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, RecDict, VarDict) ->
- t_from_form(Type, TypeNames, RecDict, VarDict);
-t_from_form({paren_type, _L, [Type]}, TypeNames, RecDict, VarDict) ->
- t_from_form(Type, TypeNames, RecDict, VarDict);
+t_from_form({ann_type, _L, [_Var, Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ t_from_form(Type, TypeNames, ET, M, MR, V, D, L);
+t_from_form({paren_type, _L, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ t_from_form(Type, TypeNames, ET, M, MR, V, D, L);
t_from_form({remote_type, _L, [{atom, _, Module}, {atom, _, Type}, Args]},
- TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_remote(Module, Type, L), R};
-t_from_form({atom, _L, Atom}, _TypeNames, _RecDict, _VarDict) ->
- {t_atom(Atom), []};
-t_from_form({integer, _L, Int}, _TypeNames, _RecDict, _VarDict) ->
- {t_integer(Int), []};
-t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _RecDict, _VarDict) ->
+ TypeNames, ET, M, MR, V, D, L) ->
+ remote_from_form(Module, Type, Args, TypeNames, ET, M, MR, V, D, L);
+t_from_form({atom, _L, Atom}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_atom(Atom), L};
+t_from_form({integer, _L, Int}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_integer(Int), L};
+t_from_form({op, _L, _Op, _Arg} = Op, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
- {t_integer(Val), []};
+ {t_integer(Val), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
end;
t_from_form({op, _L, _Op, _Arg1, _Arg2} = Op, _TypeNames,
- _RecDict, _VarDict) ->
+ _ET, _M, _MR, _V, _D, L) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
- {t_integer(Val), []};
+ {t_integer(Val), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Op])})
end;
-t_from_form({type, _L, any, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({type, _L, arity, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_arity(), []};
-t_from_form({type, _L, atom, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_atom(), []};
-t_from_form({type, _L, binary, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_binary(), []};
+t_from_form({type, _L, any, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({type, _L, arity, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_arity(), L};
+t_from_form({type, _L, atom, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_atom(), L};
+t_from_form({type, _L, binary, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_binary(), L};
t_from_form({type, _L, binary, [Base, Unit]} = Type,
- _TypeNames, _RecDict, _VarDict) ->
+ _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case {erl_eval:partial_eval(Base), erl_eval:partial_eval(Unit)} of
{{integer, _, B}, {integer, _, U}} when B >= 0, U >= 0 ->
- {t_bitstr(U, B), []};
+ {t_bitstr(U, B), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
end;
-t_from_form({type, _L, bitstring, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_bitstr(), []};
-t_from_form({type, _L, bool, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_boolean(), []}; % XXX: Temporarily
-t_from_form({type, _L, boolean, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_boolean(), []};
-t_from_form({type, _L, byte, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_byte(), []};
-t_from_form({type, _L, char, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_char(), []};
-t_from_form({type, _L, float, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_float(), []};
-t_from_form({type, _L, function, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_fun(), []};
-t_from_form({type, _L, 'fun', []}, _TypeNames, _RecDict, _VarDict) ->
- {t_fun(), []};
+t_from_form({type, _L, bitstring, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_bitstr(), L};
+t_from_form({type, _L, bool, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_boolean(), L}; % XXX: Temporarily
+t_from_form({type, _L, boolean, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_boolean(), L};
+t_from_form({type, _L, byte, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_byte(), L};
+t_from_form({type, _L, char, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_char(), L};
+t_from_form({type, _L, float, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_float(), L};
+t_from_form({type, _L, function, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_fun(), L};
+t_from_form({type, _L, 'fun', []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_fun(), L};
t_from_form({type, _L, 'fun', [{type, _, any}, Range]}, TypeNames,
- RecDict, VarDict) ->
- {T, R} = t_from_form(Range, TypeNames, RecDict, VarDict),
- {t_fun(T), R};
+ ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L - 1),
+ {t_fun(T), L1};
t_from_form({type, _L, 'fun', [{type, _, product, Domain}, Range]},
- TypeNames, RecDict, VarDict) ->
- {L, R1} = list_from_form(Domain, TypeNames, RecDict, VarDict),
- {T, R2} = t_from_form(Range, TypeNames, RecDict, VarDict),
- {t_fun(L, T), R1 ++ R2};
-t_from_form({type, _L, identifier, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_identifier(), []};
-t_from_form({type, _L, integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_integer(), []};
-t_from_form({type, _L, iodata, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_iodata(), []};
-t_from_form({type, _L, iolist, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_iolist(), []};
-t_from_form({type, _L, list, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_list(), []};
-t_from_form({type, _L, list, [Type]}, TypeNames, RecDict, VarDict) ->
- {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {t_list(T), R};
-t_from_form({type, _L, map, _}, TypeNames, RecDict, VarDict) ->
- builtin_type(map, t_map([]), TypeNames, RecDict, VarDict);
-t_from_form({type, _L, mfa, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_mfa(), []};
-t_from_form({type, _L, module, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_module(), []};
-t_from_form({type, _L, nil, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_nil(), []};
-t_from_form({type, _L, neg_integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_neg_integer(), []};
-t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _RecDict,
- _VarDict) ->
- {t_non_neg_integer(), []};
-t_from_form({type, _L, no_return, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_unit(), []};
-t_from_form({type, _L, node, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_node(), []};
-t_from_form({type, _L, none, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_none(), []};
-t_from_form({type, _L, nonempty_list, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_nonempty_list(), []};
-t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, RecDict, VarDict) ->
- {T, R} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {t_nonempty_list(T), R};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {Dom1, L1} = list_from_form(Domain, TypeNames, ET, M, MR, V, D, L),
+ {Ran1, L2} = t_from_form(Range, TypeNames, ET, M, MR, V, D - 1, L1),
+ {t_fun(Dom1, Ran1), L2};
+t_from_form({type, _L, identifier, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_identifier(), L};
+t_from_form({type, _L, integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_integer(), L};
+t_from_form({type, _L, iodata, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_iodata(), L};
+t_from_form({type, _L, iolist, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_iolist(), L};
+t_from_form({type, _L, list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_list(), L};
+t_from_form({type, _L, list, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D - 1, L - 1),
+ {t_list(T), L1};
+t_from_form({type, _L, map, _}, TypeNames, ET, M, MR, V, D, L) ->
+ builtin_type(map, t_map([]), TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, mfa, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_mfa(), L};
+t_from_form({type, _L, module, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_module(), L};
+t_from_form({type, _L, nil, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nil(), L};
+t_from_form({type, _L, neg_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_neg_integer(), L};
+t_from_form({type, _L, non_neg_integer, []}, _TypeNames, _ET, _M, _MR,
+ _V, _D, L) ->
+ {t_non_neg_integer(), L};
+t_from_form({type, _L, no_return, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_unit(), L};
+t_from_form({type, _L, node, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_node(), L};
+t_from_form({type, _L, none, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_none(), L};
+t_from_form({type, _L, nonempty_list, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nonempty_list(), L};
+t_from_form({type, _L, nonempty_list, [Type]}, TypeNames, ET, M, MR, V, D, L) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1),
+ {t_nonempty_list(T), L1};
t_from_form({type, _L, nonempty_improper_list, [Cont, Term]}, TypeNames,
- RecDict, VarDict) ->
- {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict),
- {t_cons(T1, T2), R1 ++ R2};
+ ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1),
+ {t_cons(T1, T2), L2};
t_from_form({type, _L, nonempty_maybe_improper_list, []}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_cons(?any, ?any), []};
+ _ET, _M, _MR, _V, _D, L) ->
+ {t_cons(?any, ?any), L};
t_from_form({type, _L, nonempty_maybe_improper_list, [Cont, Term]},
- TypeNames, RecDict, VarDict) ->
- {T1, R1} = t_from_form(Cont, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Term, TypeNames, RecDict, VarDict),
- {t_cons(T1, T2), R1 ++ R2};
-t_from_form({type, _L, nonempty_string, []}, _TypeNames, _RecDict,
- _VarDict) ->
- {t_nonempty_string(), []};
-t_from_form({type, _L, number, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_number(), []};
-t_from_form({type, _L, pid, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_pid(), []};
-t_from_form({type, _L, port, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_port(), []};
-t_from_form({type, _L, pos_integer, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_pos_integer(), []};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Cont, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Term, TypeNames, ET, M, MR, V, D, L1),
+ {t_cons(T1, T2), L2};
+t_from_form({type, _L, nonempty_string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_nonempty_string(), L};
+t_from_form({type, _L, number, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_number(), L};
+t_from_form({type, _L, pid, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_pid(), L};
+t_from_form({type, _L, port, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_port(), L};
+t_from_form({type, _L, pos_integer, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_pos_integer(), L};
t_from_form({type, _L, maybe_improper_list, []}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_maybe_improper_list(), []};
+ _ET, _M, _MR, _V, _D, L) ->
+ {t_maybe_improper_list(), L};
t_from_form({type, _L, maybe_improper_list, [Content, Termination]},
- TypeNames, RecDict, VarDict) ->
- {T1, R1} = t_from_form(Content, TypeNames, RecDict, VarDict),
- {T2, R2} = t_from_form(Termination, TypeNames, RecDict, VarDict),
- {t_maybe_improper_list(T1, T2), R1 ++ R2};
-t_from_form({type, _L, product, Elements}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Elements, TypeNames, RecDict, VarDict),
- {t_product(L), R};
+ TypeNames, ET, M, MR, V, D, L) ->
+ {T1, L1} = t_from_form(Content, TypeNames, ET, M, MR, V, D, L - 1),
+ {T2, L2} = t_from_form(Termination, TypeNames, ET, M, MR, V, D, L1),
+ {t_maybe_improper_list(T1, T2), L2};
+t_from_form({type, _L, product, Elements}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Elements, TypeNames, ET, M, MR, V, D - 1, L),
+ {t_product(Lst), L1};
t_from_form({type, _L, range, [From, To]} = Type,
- _TypeNames, _RecDict, _VarDict) ->
+ _TypeNames, _ET, _M, _MR, _V, _D, L) ->
case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
{{integer, _, FromVal}, {integer, _, ToVal}} ->
- {t_from_range(FromVal, ToVal), []};
+ {t_from_range(FromVal, ToVal), L};
_ -> throw({error, io_lib:format("Unable to evaluate type ~w\n", [Type])})
end;
-t_from_form({type, _L, record, [Name|Fields]}, TypeNames, RecDict, VarDict) ->
- record_from_form(Name, Fields, TypeNames, RecDict, VarDict);
-t_from_form({type, _L, reference, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_reference(), []};
-t_from_form({type, _L, string, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_string(), []};
-t_from_form({type, _L, term, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_any(), []};
-t_from_form({type, _L, timeout, []}, _TypeNames, _RecDict, _VarDict) ->
- {t_timeout(), []};
-t_from_form({type, _L, tuple, any}, _TypeNames, _RecDict, _VarDict) ->
- {t_tuple(), []};
-t_from_form({type, _L, tuple, Args}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_tuple(L), R};
-t_from_form({type, _L, union, Args}, TypeNames, RecDict, VarDict) ->
- {L, R} = list_from_form(Args, TypeNames, RecDict, VarDict),
- {t_sup(L), R};
-t_from_form({user_type, _L, Name, Args}, TypeNames, RecDict, VarDict) ->
- type_from_form(Name, Args, TypeNames, RecDict, VarDict);
-t_from_form({type, _L, Name, Args}, TypeNames, RecDict, VarDict) ->
+t_from_form({type, _L, record, [Name|Fields]}, TypeNames, ET, M, MR, V, D, L) ->
+ record_from_form(Name, Fields, TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, reference, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_reference(), L};
+t_from_form({type, _L, string, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_string(), L};
+t_from_form({type, _L, term, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_any(), L};
+t_from_form({type, _L, timeout, []}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_timeout(), L};
+t_from_form({type, _L, tuple, any}, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {t_tuple(), L};
+t_from_form({type, _L, tuple, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D - 1, L),
+ {t_tuple(Lst), L1};
+t_from_form({type, _L, union, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ {Lst, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ {t_sup(Lst), L1};
+t_from_form({user_type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) ->
+ type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L);
+t_from_form({type, _L, Name, Args}, TypeNames, ET, M, MR, V, D, L) ->
%% Compatibility: modules compiled before Erlang/OTP 18.0.
- type_from_form(Name, Args, TypeNames, RecDict, VarDict);
+ type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L);
t_from_form({opaque, _L, Name, {Mod, Args, Rep}}, _TypeNames,
- _RecDict, _VarDict) ->
- {t_opaque(Mod, Name, Args, Rep), []}.
-
-builtin_type(Name, Type, TypeNames, RecDict, VarDict) ->
- case lookup_type(Name, 0, RecDict) of
- {_, {_M, _T, _A}} ->
- type_from_form(Name, [], TypeNames, RecDict, VarDict);
+ _ET, _M, _MR, _V, _D, L) ->
+ %% XXX. To be removed.
+ {t_opaque(Mod, Name, Args, Rep), L}.
+
+builtin_type(Name, Type, TypeNames, ET, M, MR, V, D, L) ->
+ case dict:find(M, MR) of
+ {ok, R} ->
+ case lookup_type(Name, 0, R) of
+ {_, {{_M, _F, _A}, _T}} ->
+ type_from_form(Name, [], TypeNames, ET, M, MR, V, D, L);
+ error ->
+ {Type, L}
+ end;
error ->
- {Type, []}
+ {Type, L}
end.
-type_from_form(Name, Args, TypeNames, RecDict, VarDict) ->
+type_from_form(Name, Args, TypeNames, ET, M, MR, V, D, L) ->
ArgsLen = length(Args),
- ArgTypes = forms_to_types(Args, TypeNames, RecDict, VarDict),
- case lookup_type(Name, ArgsLen, RecDict) of
- {type, {_Module, Type, ArgNames}} ->
- TypeName = {type, Name, ArgsLen},
+ {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ {ok, R} = dict:find(M, MR),
+ case lookup_type(Name, ArgsLen, R) of
+ {type, {{Module, Form, ArgNames}, _Type}} ->
+ TypeName = {type, Module, Name, ArgsLen},
case can_unfold_more(TypeName, TypeNames) of
true ->
List = lists:zip(ArgNames, ArgTypes),
- TmpVarDict = dict:from_list(List),
- {T, R} = t_from_form(Type, [TypeName|TypeNames],
- RecDict, TmpVarDict),
- case lists:member(TypeName, R) of
- true -> {t_limit(T, ?REC_TYPE_LIMIT), R};
- false -> {T, R}
- end;
- false -> {t_any(), [TypeName]}
+ TmpV = dict:from_list(List),
+ t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1);
+ false ->
+ {t_any(), L1}
end;
- {opaque, {Module, Type, ArgNames}} ->
- TypeName = {opaque, Name, ArgsLen},
- {Rep, Rret} =
+ {opaque, {{Module, Form, ArgNames}, Type}} ->
+ TypeName = {opaque, Module, Name, ArgsLen},
+ {Rep, L2} =
case can_unfold_more(TypeName, TypeNames) of
true ->
List = lists:zip(ArgNames, ArgTypes),
- TmpVarDict = dict:from_list(List),
- {T, R} = t_from_form(Type, [TypeName|TypeNames],
- RecDict, TmpVarDict),
- case lists:member(TypeName, R) of
- true -> {t_limit(T, ?REC_TYPE_LIMIT), R};
- false -> {T, R}
- end;
- false -> {t_any(), [TypeName]}
+ TmpV = dict:from_list(List),
+ t_from_form(Form, [TypeName|TypeNames], ET, M, MR, TmpV, D, L1);
+ false -> {t_any(), L1}
end,
+ Rep1 = choose_opaque_type(Rep, Type),
Args2 = [subst_all_vars_to_any(ArgType) || ArgType <- ArgTypes],
- {skip_opaque_alias(Rep, Module, Name, Args2), Rret};
+ {skip_opaque_alias(Rep1, Module, Name, Args2), L2};
error ->
Msg = io_lib:format("Unable to find type ~w/~w\n", [Name, ArgsLen]),
throw({error, Msg})
end.
-forms_to_types(Forms, TypeNames, RecDict, VarDict) ->
- {Types, _} = list_from_form(Forms, TypeNames, RecDict, VarDict),
- Types.
-
skip_opaque_alias(?opaque(_) = T, _Mod, _Name, _Args) -> T;
skip_opaque_alias(T, Module, Name, Args) ->
t_opaque(Module, Name, Args, T).
-record_from_form({atom, _, Name}, ModFields, TypeNames, RecDict, VarDict) ->
+remote_from_form(RemMod, Name, Args, TypeNames, ET, M, MR, V, D, L) ->
+ {ArgTypes, L1} = list_from_form(Args, TypeNames, ET, M, MR, V, D, L),
+ if
+ ET =:= replace_by_none ->
+ {t_none(), L1};
+ true ->
+ ArgsLen = length(Args),
+ case dict:find(RemMod, MR) of
+ error ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), L1};
+ {ok, RemDict} ->
+ MFA = {RemMod, Name, ArgsLen},
+ case sets:is_element(MFA, ET) of
+ true ->
+ case lookup_type(Name, ArgsLen, RemDict) of
+ {type, {{_Mod, Form, ArgNames}, _Type}} ->
+ RemType = {type, RemMod, Name, ArgsLen},
+ case can_unfold_more(RemType, TypeNames) of
+ true ->
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpVarDict = dict:from_list(List),
+ NewTypeNames = [RemType|TypeNames],
+ t_from_form(Form, NewTypeNames, ET,
+ RemMod, MR, TmpVarDict, D, L1);
+ false ->
+ {t_any(), L1}
+ end;
+ {opaque, {{Mod, Form, ArgNames}, Type}} ->
+ RemType = {opaque, RemMod, Name, ArgsLen},
+ List = lists:zip(ArgNames, ArgTypes),
+ TmpVarDict = dict:from_list(List),
+ {NewRep, L2} =
+ case can_unfold_more(RemType, TypeNames) of
+ true ->
+ NewTypeNames = [RemType|TypeNames],
+ t_from_form(Form, NewTypeNames, ET, RemMod, MR,
+ TmpVarDict, D, L1);
+ false ->
+ {t_any(), L1}
+ end,
+ NewRep1 = choose_opaque_type(NewRep, Type),
+ {skip_opaque_alias(NewRep1, Mod, Name, ArgTypes), L2};
+ error ->
+ Msg = io_lib:format("Unable to find remote type ~w:~w()\n",
+ [RemMod, Name]),
+ throw({error, Msg})
+ end;
+ false ->
+ self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
+ {t_any(), L1}
+ end
+ end
+ end.
+
+%% Opaque types (both local and remote) are problematic when it comes
+%% to the limits (TypeNames, D, and L). The reason is that if any() is
+%% substituted for a more specialized subtype of an opaque type, the
+%% property stated along with decorate_with_opaque() (the type has to
+%% be a subtype of the declared type) no longer holds.
+%%
+%% The less than perfect remedy: if the opaque type created from a
+%% form is not a subset of the declared type, the declared type is
+%% used instead, effectively bypassing the limits, and potentially
+%% resulting in huge types.
+choose_opaque_type(Type, DeclType) ->
+ case
+ t_is_subtype(subst_all_vars_to_any(Type),
+ subst_all_vars_to_any(DeclType))
+ of
+ true -> Type;
+ false -> DeclType
+ end.
+
+record_from_form({atom, _, Name}, ModFields, TypeNames, ET, M, MR, V, D, L) ->
case can_unfold_more({record, Name}, TypeNames) of
true ->
- case lookup_record(Name, RecDict) of
+ {ok, R} = dict:find(M, MR),
+ case lookup_record(Name, R) of
{ok, DeclFields} ->
- TypeNames1 = [{record, Name}|TypeNames],
- AreTyped = [is_erl_type(FieldType)
- || {_FieldName, FieldType} <- DeclFields],
- {DeclFields1, R1} =
- case lists:all(fun(Elem) -> Elem end, AreTyped) of
- true -> {DeclFields, []};
- false -> fields_from_form(DeclFields, TypeNames1,
- RecDict, dict:new())
- end,
- {GetModRec, R2} = get_mod_record(ModFields, DeclFields1,
- TypeNames1,
- RecDict, VarDict),
+ NewTypeNames = [{record, Name}|TypeNames],
+ {GetModRec, L1} = get_mod_record(ModFields, DeclFields,
+ NewTypeNames, ET, M, MR, V, D, L),
case GetModRec of
{error, FieldName} ->
throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
[Name, FieldName])});
{ok, NewFields} ->
- {t_tuple(
- [t_atom(Name)|[Type || {_FieldName, Type} <- NewFields]]),
- R1 ++ R2}
+ {NewFields1, L2} =
+ fields_from_form(NewFields, NewTypeNames, ET, M, MR,
+ dict:new(), D, L1),
+ Rec = t_tuple(
+ [t_atom(Name)|[Type
+ || {_FieldName, Type} <- NewFields1]]),
+ {Rec, L2}
end;
error ->
throw({error, io_lib:format("Unknown record #~w{}\n", [Name])})
end;
- false -> {t_any(), []}
+ false ->
+ {t_any(), L}
end.
-get_mod_record([], DeclFields, _TypeNames, _RecDict, _VarDict) ->
- {{ok, DeclFields}, []};
-get_mod_record(ModFields, DeclFields, TypeNames, RecDict, VarDict) ->
- DeclFieldsDict = orddict:from_list(DeclFields),
- {ModFieldsDict, R} = build_field_dict(ModFields, TypeNames,
- RecDict, VarDict),
- case get_mod_record(DeclFieldsDict, ModFieldsDict, []) of
- {error, _FieldName} = Error -> {Error, R};
- {ok, FinalOrdDict} ->
- {{ok, [{FieldName, orddict:fetch(FieldName, FinalOrdDict)}
- || {FieldName, _} <- DeclFields]},
- R}
+get_mod_record([], DeclFields, _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {{ok, DeclFields}, L};
+get_mod_record(ModFields, DeclFields, TypeNames, ET, M, MR, V, D, L) ->
+ DeclFieldsDict = lists:keysort(1, DeclFields),
+ {ModFieldsDict, L1} =
+ build_field_dict(ModFields, TypeNames, ET, M, MR, V, D, L),
+ case get_mod_record_types(DeclFieldsDict, ModFieldsDict, []) of
+ {error, _FieldName} = Error -> {Error, L1};
+ {ok, FinalKeyDict} ->
+ Fields = [lists:keyfind(FieldName, 1, FinalKeyDict)
+ || {FieldName, _, _} <- DeclFields],
+ {{ok, Fields}, L1}
end.
-build_field_dict(FieldTypes, TypeNames, RecDict, VarDict) ->
- build_field_dict(FieldTypes, TypeNames, RecDict, VarDict, []).
-
-build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left],
- TypeNames, RecDict, VarDict, Acc) ->
- {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict),
- NewAcc = [{Name, T}|Acc],
- {D, R2} = build_field_dict(Left, TypeNames, RecDict, VarDict, NewAcc),
- {D, R1 ++ R2};
-build_field_dict([], _TypeNames, _RecDict, _VarDict, Acc) ->
- {orddict:from_list(Acc), []}.
-
-get_mod_record([{FieldName, DeclType}|Left1],
- [{FieldName, ModType}|Left2], Acc) ->
- ModTypeNoVars = subst_all_vars_to_any(ModType),
- case
- contains_remote(ModTypeNoVars)
- orelse contains_remote(DeclType)
- orelse t_is_subtype(ModTypeNoVars, DeclType)
- of
+build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L) ->
+ build_field_dict(FieldTypes, TypeNames, ET, M, MR, V, D, L, []).
+
+build_field_dict([{type, _, field_type, [{atom, _, Name}, Type]}|Left],
+ TypeNames, ET, M, MR, V, D, L, Acc) ->
+ {T, L1} = t_from_form(Type, TypeNames, ET, M, MR, V, D, L - 1),
+ %% The cached record field type (DeclType) in
+ %% get_mod_record_types()), was created with a similar call as TT.
+ %% Using T for the subtype test does not work since any() is not
+ %% always a subset of the field type.
+ TT = t_from_form(Type, ET, M, MR, V),
+ NewAcc = [{Name, Type, T, TT}|Acc],
+ {Dict, L2} =
+ build_field_dict(Left, TypeNames, ET, M, MR, V, D, L1, NewAcc),
+ {Dict, L2};
+build_field_dict([], _TypeNames, _ET, _M, _MR, _V, _D, L, Acc) ->
+ {lists:keysort(1, Acc), L}.
+
+get_mod_record_types([{FieldName, _Abstr, DeclType}|Left1],
+ [{FieldName, TypeForm, ModType, ModTypeTest}|Left2],
+ Acc) ->
+ ModTypeNoVars = subst_all_vars_to_any(ModTypeTest),
+ case t_is_subtype(ModTypeNoVars, DeclType) of
false -> {error, FieldName};
- true -> get_mod_record(Left1, Left2, [{FieldName, ModType}|Acc])
+ true -> get_mod_record_types(Left1, Left2,
+ [{FieldName, TypeForm, ModType}|Acc])
end;
-get_mod_record([{FieldName1, _DeclType} = DT|Left1],
- [{FieldName2, _ModType}|_] = List2,
- Acc) when FieldName1 < FieldName2 ->
- get_mod_record(Left1, List2, [DT|Acc]);
-get_mod_record(DeclFields, [], Acc) ->
- {ok, orddict:from_list(Acc ++ DeclFields)};
-get_mod_record(_, [{FieldName2, _ModType}|_], _Acc) ->
+get_mod_record_types([{FieldName1, _Abstr, _DeclType} = DT|Left1],
+ [{FieldName2, _FormType, _ModType, _TT}|_] = List2,
+ Acc) when FieldName1 < FieldName2 ->
+ get_mod_record_types(Left1, List2, [DT|Acc]);
+get_mod_record_types(Left1, [], Acc) ->
+ {ok, lists:keysort(1, Left1++Acc)};
+get_mod_record_types(_, [{FieldName2, _FormType, _ModType, _TT}|_], _Acc) ->
{error, FieldName2}.
-contains_remote(Type) ->
- TypeNoRemote = subst_all_remote(Type, t_none()),
- not t_is_equal(Type, TypeNoRemote).
-
-fields_from_form([], _TypeNames, _RecDict, _VarDict) ->
- {[], []};
-fields_from_form([{Name, Type}|Tail], TypeNames, RecDict,
- VarDict) ->
- {T, R1} = t_from_form(Type, TypeNames, RecDict, VarDict),
- {F, R2} = fields_from_form(Tail, TypeNames, RecDict, VarDict),
- {[{Name, T}|F], R1 ++ R2}.
-
-list_from_form([], _TypeNames, _RecDict, _VarDict) ->
- {[], []};
-list_from_form([H|Tail], TypeNames, RecDict, VarDict) ->
- {T, R1} = t_from_form(H, TypeNames, RecDict, VarDict),
- {L, R2} = list_from_form(Tail, TypeNames, RecDict, VarDict),
- {[T|L], R1 ++ R2}.
+%% It is important to create a limited version of the record type
+%% since nested record types can otherwise easily result in huge
+%% terms.
+fields_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {[], L};
+fields_from_form([{Name, Abstr, _Type}|Tail], TypeNames, ET, M, MR,
+ V, D, L) ->
+ {T, L1} = t_from_form(Abstr, TypeNames, ET, M, MR, V, D, L),
+ {F, L2} = fields_from_form(Tail, TypeNames, ET, M, MR, V, D, L1),
+ {[{Name, T}|F], L2}.
+
+list_from_form([], _TypeNames, _ET, _M, _MR, _V, _D, L) ->
+ {[], L};
+list_from_form([H|Tail], TypeNames, ET, M, MR, V, D, L) ->
+ {H1, L1} = t_from_form(H, TypeNames, ET, M, MR, V, D, L - 1),
+ {T1, L2} = list_from_form(Tail, TypeNames, ET, M, MR, V, D, L1),
+ {[H1|T1], L2}.
+
+-spec t_var_names([erl_type()]) -> [atom()].
+
+t_var_names([{var, _, Name}|L]) when L =/= '_' ->
+ [Name|t_var_names(L)];
+t_var_names([]) ->
+ [].
-spec t_form_to_string(parse_form()) -> string().
@@ -4505,7 +4495,13 @@ t_form_to_string({type, _L, tuple, Args}) ->
t_form_to_string({type, _L, union, Args}) ->
string:join(t_form_to_string_list(Args), " | ");
t_form_to_string({type, _L, Name, []} = T) ->
- try t_to_string(t_from_form(T))
+ try
+ M = mod,
+ D0 = dict:new(),
+ MR = dict:from_list([{M, D0}]),
+ {T1, _} =
+ t_from_form(T, [], sets:new(), M, MR, D0, _Deep=1000, _ALot=100000),
+ t_to_string(T1)
catch throw:{error, _} -> atom_to_string(Name) ++ "()"
end;
t_form_to_string({user_type, _L, Name, List}) ->
@@ -4556,7 +4552,7 @@ is_erl_type(#c{}) -> true;
is_erl_type(_) -> false.
-spec lookup_record(atom(), type_table()) ->
- 'error' | {'ok', [{atom(), parse_form() | erl_type()}]}.
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
lookup_record(Tag, RecDict) when is_atom(Tag) ->
case dict:find({record, Tag}, RecDict) of
@@ -4571,7 +4567,7 @@ lookup_record(Tag, RecDict) when is_atom(Tag) ->
end.
-spec lookup_record(atom(), arity(), type_table()) ->
- 'error' | {'ok', [{atom(), erl_type()}]}.
+ 'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
lookup_record(Tag, Arity, RecDict) when is_atom(Tag) ->
case dict:find({record, Tag}, RecDict) of
diff --git a/lib/hipe/doc/src/notes.xml b/lib/hipe/doc/src/notes.xml
index 2d6fd245f7..8d3358533b 100644
--- a/lib/hipe/doc/src/notes.xml
+++ b/lib/hipe/doc/src/notes.xml
@@ -30,6 +30,55 @@
</header>
<p>This document describes the changes made to HiPE.</p>
+<section><title>Hipe 3.11.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix HiPE for ARM when Erlang VM is compiled for Thumb
+ execution mode. This was a problem on e.g. Ubuntu which
+ configures its system GCC to generate Thumb by default.</p>
+ <p>
+ Own Id: OTP-12405</p>
+ </item>
+ <item>
+ <p>
+ Reduced lock contention of dynamic function lookups (like
+ apply) from hipe compiled code.</p>
+ <p>
+ Own Id: OTP-12557</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Fix two bugs in HiPE compiler regarding floating-points,
+ both leading to crash during compilation. The
+ target-specific code generators failed to handle integer
+ to floating-point conversion instructions with constant
+ operands. The middle-end could use an incorrect
+ representation for copies between floating-point
+ registers.</p>
+ <p>
+ Own Id: OTP-12413</p>
+ </item>
+ <item>
+ <p>
+ Improved error handling when memory allocation for HiPE
+ code fails.</p>
+ <p>
+ Own Id: OTP-12448</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Hipe 3.11.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/hipe/icode/hipe_beam_to_icode.erl b/lib/hipe/icode/hipe_beam_to_icode.erl
index 4691662f9f..3e099fcc25 100644
--- a/lib/hipe/icode/hipe_beam_to_icode.erl
+++ b/lib/hipe/icode/hipe_beam_to_icode.erl
@@ -1584,11 +1584,7 @@ gen_put_map_instrs(exists, Op, TempMapVar, Dst, FailLbl, Pairs, Env) ->
end,
{[IsMapCode, TrueLabel, PutInstructions, ReturnLbl], Env1};
gen_put_map_instrs(new, Op, TempMapVar, Dst, new, Pairs, Env) ->
- TrueLabel = mk_label(new),
FailLbl = mk_label(new),
- IsMapCode = hipe_icode:mk_type([TempMapVar], map,
- hipe_icode:label_name(TrueLabel),
- hipe_icode:label_name(FailLbl)),
DstMapVar = mk_var(Dst),
{ReturnLbl, PutInstructions, Env1}
= case Op of
@@ -1596,10 +1592,10 @@ gen_put_map_instrs(new, Op, TempMapVar, Dst, new, Pairs, Env) ->
trans_put_map_assoc(TempMapVar, DstMapVar, Pairs, Env, []);
exact ->
trans_put_map_exact(TempMapVar, DstMapVar,
- hipe_icode:label_name(FailLbl), Pairs, Env, [])
+ none, Pairs, Env, [])
end,
Fail = hipe_icode:mk_fail([hipe_icode:mk_const(badarg)], error),
- {[IsMapCode, TrueLabel, PutInstructions, FailLbl, Fail, ReturnLbl], Env1}.
+ {[PutInstructions, FailLbl, Fail, ReturnLbl], Env1}.
%%-----------------------------------------------------------------------
%% This function generates the instructions needed to insert several
@@ -1629,6 +1625,13 @@ trans_put_map_exact(MapVar, DestMapVar, _FLbl, [], Env, Acc) ->
ReturnLbl = mk_label(new),
GotoReturn = hipe_icode:mk_goto(hipe_icode:label_name(ReturnLbl)),
{ReturnLbl, lists:reverse([GotoReturn, MoveToReturnVar | Acc]), Env};
+trans_put_map_exact(MapVar, DestMapVar, none, [Key, Value | Rest], Env, Acc) ->
+ {MoveKey, KeyVar, Env1} = mk_move_and_var(Key, Env),
+ {MoveVal, ValVar, Env2} = mk_move_and_var(Value, Env1),
+ BifCallPut = hipe_icode:mk_call([MapVar], maps, update,
+ [KeyVar, ValVar, MapVar], remote),
+ Acc1 = [BifCallPut, MoveVal, MoveKey | Acc],
+ trans_put_map_exact(MapVar, DestMapVar, none, Rest, Env2, Acc1);
trans_put_map_exact(MapVar, DestMapVar, FLbl, [Key, Value | Rest], Env, Acc) ->
SuccLbl = mk_label(new),
{MoveKey, KeyVar, Env1} = mk_move_and_var(Key, Env),
diff --git a/lib/hipe/llvm/hipe_llvm_main.erl b/lib/hipe/llvm/hipe_llvm_main.erl
index 0e50c9539b..3c24425828 100644
--- a/lib/hipe/llvm/hipe_llvm_main.erl
+++ b/lib/hipe/llvm/hipe_llvm_main.erl
@@ -465,7 +465,7 @@ remove_temp_folder(Dir, Options) ->
end.
unique_id(FunName, Arity) ->
- integer_to_list(erlang:phash2({FunName, Arity, now()})).
+ integer_to_list(erlang:phash2({FunName, Arity, erlang:unique_integer()})).
unique_folder(FunName, Arity, Options) ->
DirName = "llvm_" ++ unique_id(FunName, Arity) ++ "/",
diff --git a/lib/hipe/main/hipe.app.src b/lib/hipe/main/hipe.app.src
index e81212d4dc..7b6d9e30e3 100644
--- a/lib/hipe/main/hipe.app.src
+++ b/lib/hipe/main/hipe.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -223,5 +223,5 @@
{registered,[]},
{applications, [kernel,stdlib]},
{env, []},
- {runtime_dependencies, ["syntax_tools-1.6.14","stdlib-2.0","kernel-3.0",
- "erts-6.0","compiler-5.0"]}]}.
+ {runtime_dependencies, ["syntax_tools-1.6.14","stdlib-2.5","kernel-3.0",
+ "erts-7.0","compiler-5.0"]}]}.
diff --git a/lib/hipe/ppc/hipe_rtl_to_ppc.erl b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
index 7dfa56df29..a55fc137c3 100644
--- a/lib/hipe/ppc/hipe_rtl_to_ppc.erl
+++ b/lib/hipe/ppc/hipe_rtl_to_ppc.erl
@@ -102,10 +102,18 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
{Dst, Map0} = conv_fpreg(hipe_rtl:fconv_dst(I), Map),
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0), % exclude imm src
- I2 = mk_fconv(Dst, Src),
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
+ I2 =
+ case hipe_ppc:is_temp(Src) of
+ true ->
+ mk_fconv(Dst, Src);
+ false ->
+ Tmp = new_untagged_temp(),
+ mk_li(Tmp, Src,
+ mk_fconv(Dst, Tmp))
+ end,
{I2, Map1, Data}.
mk_fconv(Dst, Src) ->
diff --git a/lib/hipe/rtl/hipe_rtl.erl b/lib/hipe/rtl/hipe_rtl.erl
index bc61bec0bd..2f62dd79ad 100644
--- a/lib/hipe/rtl/hipe_rtl.erl
+++ b/lib/hipe/rtl/hipe_rtl.erl
@@ -413,11 +413,11 @@ rtl_info_update(Rtl, Info) -> Rtl#rtl{info=Info}.
%% move
%%
-mk_move(Dst, Src) -> #move{dst=Dst, src=Src}.
+mk_move(Dst, Src) -> false = is_fpreg(Dst), false = is_fpreg(Src), #move{dst=Dst, src=Src}.
move_dst(#move{dst=Dst}) -> Dst.
-move_dst_update(M, NewDst) -> M#move{dst=NewDst}.
+move_dst_update(M, NewDst) -> false = is_fpreg(NewDst), M#move{dst=NewDst}.
move_src(#move{src=Src}) -> Src.
-move_src_update(M, NewSrc) -> M#move{src=NewSrc}.
+move_src_update(M, NewSrc) -> false = is_fpreg(NewSrc), M#move{src=NewSrc}.
%% is_move(#move{}) -> true;
%% is_move(_) -> false.
@@ -469,7 +469,11 @@ phi_remove_pred(Phi, Pred) ->
case NewArgList of
[Arg] -> %% the phi should be turned into a move instruction
{_Label,Var} = Arg,
- mk_move(phi_dst(Phi), Var);
+ Dst = phi_dst(Phi),
+ case {is_fpreg(Dst), is_fpreg(Var)} of
+ {true, true} -> mk_fmove(Dst, Var);
+ {false, false} -> mk_move(Dst, Var)
+ end;
%% io:format("~nPhi (~w) turned into move (~w) when removing pred ~w~n",[Phi,Move,Pred]),
[_|_] ->
Phi#phi{arglist=NewArgList}
@@ -836,11 +840,11 @@ fp_unop_op(#fp_unop{op=Op}) -> Op.
%% fmove
%%
-mk_fmove(X, Y) -> #fmove{dst=X, src=Y}.
+mk_fmove(X, Y) -> true = is_fpreg(X), true = is_fpreg(Y), #fmove{dst=X, src=Y}.
fmove_dst(#fmove{dst=Dst}) -> Dst.
-fmove_dst_update(M, NewDst) -> M#fmove{dst=NewDst}.
+fmove_dst_update(M, NewDst) -> true = is_fpreg(NewDst), M#fmove{dst=NewDst}.
fmove_src(#fmove{src=Src}) -> Src.
-fmove_src_update(M, NewSrc) -> M#fmove{src=NewSrc}.
+fmove_src_update(M, NewSrc) -> true = is_fpreg(NewSrc), M#fmove{src=NewSrc}.
%%
%% fconv
diff --git a/lib/hipe/rtl/hipe_rtl_binary_match.erl b/lib/hipe/rtl/hipe_rtl_binary_match.erl
index 8831199244..a36a024980 100644
--- a/lib/hipe/rtl/hipe_rtl_binary_match.erl
+++ b/lib/hipe/rtl/hipe_rtl_binary_match.erl
@@ -697,13 +697,22 @@ get_binary_bytes(Binary, BinSize, Base, Offset, Orig,
%%%%%%%%%%%%%%%%%%%%%%%%% UTILS %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
get_base(Orig,Base) ->
- [HeapLbl,REFCLbl,EndLbl] = create_lbls(3),
+ [HeapLbl,REFCLbl,WritableLbl,NotWritableLbl,EndLbl] = create_lbls(5),
+ Flags = hipe_rtl:mk_new_reg_gcsafe(),
+
[hipe_tagscheme:test_heap_binary(Orig, hipe_rtl:label_name(HeapLbl),
hipe_rtl:label_name(REFCLbl)),
HeapLbl,
hipe_rtl:mk_alu(Base, Orig, 'add', hipe_rtl:mk_imm(?HEAP_BIN_DATA-2)),
hipe_rtl:mk_goto(hipe_rtl:label_name(EndLbl)),
REFCLbl,
+ get_field_from_term({proc_bin, flags}, Orig, Flags),
+ hipe_rtl:mk_branch(Flags, 'ne', hipe_rtl:mk_imm(0),
+ hipe_rtl:label_name(WritableLbl),
+ hipe_rtl:label_name(NotWritableLbl)),
+ WritableLbl,
+ hipe_rtl:mk_call([], emasculate_binary, [Orig], [], [], 'not_remote'),
+ NotWritableLbl,
hipe_rtl:mk_load(Base, Orig, hipe_rtl:mk_imm(?PROC_BIN_BYTES-2)),
EndLbl].
@@ -990,19 +999,19 @@ unsigned_bignum(Dst1, Src, TrueLblName) ->
hipe_tagscheme:unsafe_mk_big(Dst1, Src, unsigned),
hipe_rtl:mk_goto(TrueLblName)].
-load_bytes(Dst, Base, Offset, {Signedness, _Endianess},1) ->
+load_bytes(Dst, Base, Offset, {Signedness, _Endianness},1) ->
[hipe_rtl:mk_load(Dst, Base, Offset, byte, Signedness),
hipe_rtl:mk_alu(Offset, Offset, add, hipe_rtl:mk_imm(1))];
-load_bytes(Dst, Base, Offset, {Signedness, Endianess},2) ->
- case Endianess of
+load_bytes(Dst, Base, Offset, {Signedness, Endianness},2) ->
+ case Endianness of
big ->
hipe_rtl_arch:load_big_2(Dst, Base, Offset, Signedness);
little ->
hipe_rtl_arch:load_little_2(Dst, Base, Offset, Signedness)
end;
-load_bytes(Dst, Base, Offset, {Signedness, Endianess},3) ->
+load_bytes(Dst, Base, Offset, {Signedness, Endianness},3) ->
Tmp1 = hipe_rtl:mk_new_reg(),
- case Endianess of
+ case Endianness of
big ->
[hipe_rtl:mk_load(Dst, Base, Offset, byte, Signedness),
hipe_rtl:mk_alu(Offset, Offset, add, hipe_rtl:mk_imm(1)),
@@ -1026,18 +1035,18 @@ load_bytes(Dst, Base, Offset, {Signedness, Endianess},3) ->
hipe_rtl:mk_alu(Dst, Dst, 'or', Tmp1),
hipe_rtl:mk_alu(Offset, Offset, add, hipe_rtl:mk_imm(1))]
end;
-load_bytes(Dst, Base, Offset, {Signedness, Endianess}, 4) ->
- case Endianess of
+load_bytes(Dst, Base, Offset, {Signedness, Endianness}, 4) ->
+ case Endianness of
big ->
hipe_rtl_arch:load_big_4(Dst, Base, Offset, Signedness);
little ->
hipe_rtl_arch:load_little_4(Dst, Base, Offset, Signedness)
end;
-load_bytes(Dst, Base, Offset, {Signedness, Endianess}, X) when X > 1 ->
+load_bytes(Dst, Base, Offset, {Signedness, Endianness}, X) when X > 1 ->
[LoopLbl, EndLbl] = create_lbls(2),
[Tmp1, Limit, TmpOffset] = create_regs(3),
- case Endianess of
+ case Endianness of
big ->
[hipe_rtl:mk_alu(Limit, Offset, add, hipe_rtl:mk_imm(X)),
hipe_rtl:mk_load(Dst, Base, Offset, byte, Signedness),
diff --git a/lib/hipe/sparc/hipe_rtl_to_sparc.erl b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
index dc001f865e..fd21be3ae7 100644
--- a/lib/hipe/sparc/hipe_rtl_to_sparc.erl
+++ b/lib/hipe/sparc/hipe_rtl_to_sparc.erl
@@ -85,17 +85,17 @@ conv_insn(I, Map, Data) ->
end.
conv_fconv(I, Map, Data) ->
- %% Dst := (double)Src, where Dst is FP reg and Src is int reg
- {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map), % exclude imm src
+ %% Dst := (double)Src, where Dst is FP reg and Src is GP reg or imm
+ {Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map),
{Dst, Map2} = conv_fpreg(hipe_rtl:fconv_dst(I), Map1),
I2 = mk_fconv(Src, Dst),
{I2, Map2, Data}.
mk_fconv(Src, Dst) ->
CSP = hipe_sparc:mk_temp(14, 'untagged'), % o6
- Disp = hipe_sparc:mk_simm13(100),
- [hipe_sparc:mk_store('stw', Src, CSP, Disp),
- hipe_sparc:mk_pseudo_fload(CSP, Disp, Dst, true),
+ Offset = 100,
+ mk_store('stw', Src, CSP, Offset) ++
+ [hipe_sparc:mk_pseudo_fload(CSP, hipe_sparc:mk_simm13(Offset), Dst, true),
hipe_sparc:mk_fp_unary('fitod', Dst, Dst)].
conv_fmove(I, Map, Data) ->
diff --git a/lib/hipe/test/bs_SUITE_data/bs_match.erl b/lib/hipe/test/bs_SUITE_data/bs_match.erl
index 8194d878b8..7bc93a316b 100644
--- a/lib/hipe/test/bs_SUITE_data/bs_match.erl
+++ b/lib/hipe/test/bs_SUITE_data/bs_match.erl
@@ -12,7 +12,8 @@
test() ->
Funs = [fun test_aligned/0, fun test_unaligned/0,
- fun test_zero_tail/0, fun test_integer_matching/0],
+ fun test_zero_tail/0, fun test_integer_matching/0,
+ fun test_writable_bin/0],
lists:foreach(fun (F) -> ok = F() end, Funs).
%%-------------------------------------------------------------------
@@ -173,3 +174,14 @@ test_dynamic_integer_matching(N) ->
<<12:N/integer, 0:S>> = <<12:N/integer, 0:S>>,
<<12:N/integer-little, 0:S>> = <<12:N/integer-little, 0:S>>,
ok.
+
+test_writable_bin() ->
+ test_writable_bin(<<>>, 0),
+ ok.
+
+test_writable_bin(Bin, 128) ->
+ Bin;
+test_writable_bin(Bin0, N) when N < 128 ->
+ Bin1 = <<Bin0/binary, N>>,
+ <<_/utf8, _/binary>> = Bin1,
+ test_writable_bin(Bin1, N+1).
diff --git a/lib/hipe/test/maps_SUITE_data/maps_map_size.erl b/lib/hipe/test/maps_SUITE_data/maps_map_size.erl
index 25c8e5d4c7..3cd2d90dfb 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_map_size.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_map_size.erl
@@ -17,9 +17,9 @@ test() ->
false = map_is_size(M#{ "c" => 2}, 2),
%% Error cases.
- {'EXIT',{badarg,_}} = (catch map_size([])),
- {'EXIT',{badarg,_}} = (catch map_size(<<1,2,3>>)),
- {'EXIT',{badarg,_}} = (catch map_size(1)),
+ {'EXIT',{{badmap,[]},_}} = (catch map_size([])),
+ {'EXIT',{{badmap,<<1,2,3>>},_}} = (catch map_size(<<1,2,3>>)),
+ {'EXIT',{{badmap,1},_}} = (catch map_size(1)),
ok.
map_is_size(M,N) when map_size(M) =:= N -> true;
diff --git a/lib/hipe/test/maps_SUITE_data/maps_map_sort_literals.erl b/lib/hipe/test/maps_SUITE_data/maps_map_sort_literals.erl
index 31abf15d49..ccacbfe5c8 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_map_sort_literals.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_map_sort_literals.erl
@@ -10,23 +10,25 @@ test() ->
false = #{ c => 1, b => 1, a => 1} < id(#{ c => 1, a => 1}),
%% key order
- true = id(#{ a => 1 }) < id(#{ b => 1}),
- false = id(#{ b => 1 }) < id(#{ a => 1}),
- true = id(#{ a => 1, b => 1, c => 1 }) < id(#{ b => 1, c => 1, d => 1}),
- true = id(#{ b => 1, c => 1, d => 1 }) > id(#{ a => 1, b => 1, c => 1}),
- true = id(#{ c => 1, b => 1, a => 1 }) < id(#{ b => 1, c => 1, d => 1}),
- true = id(#{ "a" => 1 }) < id(#{ <<"a">> => 1}),
- false = id(#{ <<"a">> => 1 }) < id(#{ "a" => 1}),
- false = id(#{ 1 => 1 }) < id(#{ 1.0 => 1}),
- false = id(#{ 1.0 => 1 }) < id(#{ 1 => 1}),
+ true = #{ a => 1 } < id(#{ b => 1}),
+ false = #{ b => 1 } < id(#{ a => 1}),
+ true = #{ a => 1, b => 1, c => 1 } < id(#{ b => 1, c => 1, d => 1}),
+ true = #{ b => 1, c => 1, d => 1 } > id(#{ a => 1, b => 1, c => 1}),
+ true = #{ c => 1, b => 1, a => 1 } < id(#{ b => 1, c => 1, d => 1}),
+ true = #{ "a" => 1 } < id(#{ <<"a">> => 1}),
+ false = #{ <<"a">> => 1 } < id(#{ "a" => 1}),
+ true = #{ 1 => 1 } < id(#{ 1.0 => 1}),
+ false = #{ 1.0 => 1 } < id(#{ 1 => 1}),
%% value order
- true = id(#{ a => 1 }) < id(#{ a => 2}),
- false = id(#{ a => 2 }) < id(#{ a => 1}),
- false = id(#{ a => 2, b => 1 }) < id(#{ a => 1, b => 3}),
- true = id(#{ a => 1, b => 1 }) < id(#{ a => 1, b => 3}),
+ true = #{ a => 1 } < id(#{ a => 2}),
+ false = #{ a => 2 } < id(#{ a => 1}),
+ false = #{ a => 2, b => 1 } < id(#{ a => 1, b => 3}),
+ true = #{ a => 1, b => 1 } < id(#{ a => 1, b => 3}),
+ false = #{ a => 1 } < id(#{ a => 1.0}),
+ false = #{ a => 1.0 } < id(#{ a => 1}),
- true = id(#{ "a" => "hi", b => 134 }) == id(#{ b => 134,"a" => "hi"}),
+ true = #{ "a" => "hi", b => 134 } == id(#{ b => 134,"a" => "hi"}),
%% lists:sort
@@ -34,7 +36,6 @@ test() ->
[#{1:=ok},#{a:=ok},#{"a":=ok},#{<<"a">>:=ok}] = lists:sort([#{"a"=>ok},#{a=>ok},#{1=>ok},#{<<"a">>=>ok}]),
[#{1:=3},#{a:=2},#{"a":=1},#{<<"a">>:=4}] = lists:sort(SortVs),
[#{1:=3},#{a:=2},#{"a":=1},#{<<"a">>:=4}] = lists:sort(lists:reverse(SortVs)),
-
ok.
%% Use this function to avoid compile-time evaluation of an expression.
diff --git a/lib/hipe/test/maps_SUITE_data/maps_put_map_assoc.erl b/lib/hipe/test/maps_SUITE_data/maps_put_map_assoc.erl
index 72ac9ce078..2fe4f204d1 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_put_map_assoc.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_put_map_assoc.erl
@@ -8,7 +8,7 @@ test() ->
true = assoc_guard(#{}),
false = assoc_guard(not_a_map),
#{a := true} = assoc_update(#{}),
- {'EXIT', {badarg, [{?MODULE, assoc_update, 1, _}|_]}}
+ {'EXIT', {{badmap, not_a_map}, [{?MODULE, assoc_update, 1, _}|_]}}
= (catch assoc_update(not_a_map)),
ok = assoc_guard_clause(#{}),
{'EXIT', {function_clause, [{?MODULE, assoc_guard_clause, _, _}|_]}}
diff --git a/lib/hipe/test/maps_SUITE_data/maps_put_map_exact.erl b/lib/hipe/test/maps_SUITE_data/maps_put_map_exact.erl
index 1cfcd80180..3c85289a36 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_put_map_exact.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_put_map_exact.erl
@@ -9,9 +9,9 @@ test() ->
false = exact_guard(not_a_map),
true = exact_guard(#{a => false}),
#{a := true} = exact_update(#{a => false}),
- {'EXIT', {badarg, [{?MODULE, exact_update, 1, _}|_]}}
+ {'EXIT', {{badmap, not_a_map}, [{?MODULE, exact_update, 1, _}|_]}}
= (catch exact_update(not_a_map)),
- {'EXIT', {badarg, [{?MODULE, exact_update, 1, _}|_]}}
+ {'EXIT', {{badkey, a}, [{?MODULE, exact_update, 1, _}|_]}}
= (catch exact_update(#{})),
ok = exact_guard_clause(#{a => yes}),
{'EXIT', {function_clause, [{?MODULE, exact_guard_clause, _, _}|_]}}
diff --git a/lib/hipe/test/maps_SUITE_data/maps_update_assoc.erl b/lib/hipe/test/maps_SUITE_data/maps_update_assoc.erl
index cc7c1353de..99228a1927 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_update_assoc.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_update_assoc.erl
@@ -14,7 +14,7 @@ test() ->
%% Errors cases.
BadMap = id(badmap),
- {'EXIT',{badarg,_}} = (catch BadMap#{nonexisting=>val}),
+ {'EXIT',{{badmap,badmap},_}} = (catch BadMap#{nonexisting=>val}),
ok.
diff --git a/lib/hipe/test/maps_SUITE_data/maps_update_exact.erl b/lib/hipe/test/maps_SUITE_data/maps_update_exact.erl
index 6e5acb3283..1c38820a7c 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_update_exact.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_update_exact.erl
@@ -21,11 +21,11 @@ test() ->
1.0 => new_val4 },
%% Errors cases.
- {'EXIT',{badarg,_}} = (catch ((id(nil))#{ a := b })),
- {'EXIT',{badarg,_}} = (catch M0#{nonexisting:=val}),
- {'EXIT',{badarg,_}} = (catch M0#{1.0:=v,1.0=>v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42.0:=v,42:=v2}),
- {'EXIT',{badarg,_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
+ {'EXIT',{{badmap,nil},_}} = (catch ((id(nil))#{ a := b })),
+ {'EXIT',{{badkey,nonexisting},_}} = (catch M0#{nonexisting:=val}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{1.0:=v,1.0=>v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42.0:=v,42:=v2}),
+ {'EXIT',{{badkey,_},_}} = (catch M0#{42=>v1,42.0:=v2,42:=v3}),
ok.
%% Use this function to avoid compile-time evaluation of an expression.
diff --git a/lib/hipe/test/maps_SUITE_data/maps_update_map_expressions.erl b/lib/hipe/test/maps_SUITE_data/maps_update_map_expressions.erl
index 181e3f18f7..213fc33d97 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_update_map_expressions.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_update_map_expressions.erl
@@ -23,9 +23,9 @@ test() ->
#{ "a" := b } = F(),
- %% Error cases, FIXME: should be 'badmap'?
- {'EXIT',{badarg,_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
- {'EXIT',{badarg,_}} = (catch (id([]))#{ a := 42, b => 2 }),
+ %% Error cases.
+ {'EXIT',{{badmap,<<>>},_}} = (catch (id(<<>>))#{ a := 42, b => 2 }),
+ {'EXIT',{{badmap,[]},_}} = (catch (id([]))#{ a := 42, b => 2 }),
ok.
%% Use this function to avoid compile-time evaluation of an expression.
diff --git a/lib/hipe/tools/hipe_timer.erl b/lib/hipe/tools/hipe_timer.erl
index 03cc358f17..5f44bc066d 100644
--- a/lib/hipe/tools/hipe_timer.erl
+++ b/lib/hipe/tools/hipe_timer.erl
@@ -46,27 +46,27 @@ tr(F) ->
{R,{WT-EWT,(RT-ERT)/1000}}.
empty_time() ->
- {WT1,WT2,WT3} = erlang:now(),
+ WTA = erlang:monotonic_time(),
{A,_} = erlang:statistics(runtime),
- {WT12,WT22,WT32} = erlang:now(),
+ WTB = erlang:monotonic_time(),
{B,_} = erlang:statistics(runtime),
- {(WT12-WT1)*1000000+(WT22-WT2)+(WT32-WT3)/1000000,B-A}.
+ {(WTB-WTA)/erlang:convert_time_unit(1, seconds, native),B-A}.
time(F) ->
- {WT1,WT2,WT3} = erlang:now(),
+ WTA = erlang:monotonic_time(),
{A,_} = erlang:statistics(runtime),
F(),
- {WT12,WT22,WT32} = erlang:now(),
+ WTB = erlang:monotonic_time(),
{B,_} = erlang:statistics(runtime),
- {(WT12-WT1)*1000000+(WT22-WT2)+(WT32-WT3)/1000000,B-A}.
+ {(WTB-WTA)/erlang:convert_time_unit(1, seconds, native),B-A}.
timer(F) ->
- {WT1,WT2,WT3} = erlang:now(),
+ WTA = erlang:monotonic_time(),
{A,_} = erlang:statistics(runtime),
R = F(),
- {WT12,WT22,WT32} = erlang:now(),
+ WTB = erlang:monotonic_time(),
{B,_} = erlang:statistics(runtime),
- {R,{(WT12-WT1)*1000000+(WT22-WT2)+(WT32-WT3)/1000000,B-A}}.
+ {R,{(WTB-WTA)/erlang:convert_time_unit(1, seconds, native),B-A}}.
advanced(_Fun, I) when I < 2 -> false;
advanced(Fun, Iterations) ->
diff --git a/lib/hipe/vsn.mk b/lib/hipe/vsn.mk
index 4cf09830cb..e507ae933f 100644
--- a/lib/hipe/vsn.mk
+++ b/lib/hipe/vsn.mk
@@ -1 +1 @@
-HIPE_VSN = 3.11.2
+HIPE_VSN = 3.12
diff --git a/lib/hipe/x86/hipe_rtl_to_x86.erl b/lib/hipe/x86/hipe_rtl_to_x86.erl
index d77e4fed3b..36da2f4d44 100644
--- a/lib/hipe/x86/hipe_rtl_to_x86.erl
+++ b/lib/hipe/x86/hipe_rtl_to_x86.erl
@@ -236,7 +236,7 @@ conv_insn(I, Map, Data) ->
#fconv{} ->
{Dst, Map0} = conv_dst(hipe_rtl:fconv_dst(I), Map),
{[], Src, Map1} = conv_src(hipe_rtl:fconv_src(I), Map0),
- I2 = [hipe_x86:mk_fmove(Src, Dst)],
+ I2 = conv_fconv(Dst, Src),
{I2, Map1, Data};
X ->
%% gctest??
@@ -712,6 +712,19 @@ vmap_lookup(Map, Key) ->
vmap_bind(Map, Key, Val) ->
gb_trees:insert(Key, Val, Map).
+%%% Finalise the conversion of an Integer-to-Float operation.
+
+conv_fconv(Dst, Src) ->
+ case hipe_x86:is_imm(Src) of
+ false ->
+ [hipe_x86:mk_fmove(Src, Dst)];
+ true ->
+ %% cvtsi2sd does not allow src to be an immediate
+ Tmp = new_untagged_temp(),
+ [hipe_x86:mk_move(Src, Tmp),
+ hipe_x86:mk_fmove(Tmp, Dst)]
+ end.
+
%%% Finalise the conversion of a 2-address FP operation.
conv_fp_unary(Dst, Src, FpUnOp) ->
diff --git a/lib/ic/test/java_client_erl_server_SUITE.erl b/lib/ic/test/java_client_erl_server_SUITE.erl
index cbcf32515e..6ac08fd0fe 100644
--- a/lib/ic/test/java_client_erl_server_SUITE.erl
+++ b/lib/ic/test/java_client_erl_server_SUITE.erl
@@ -280,11 +280,7 @@ classpath(Dir) ->
Dir++PS++
filename:join([code:lib_dir(ic),"priv","ic.jar"])++PS++
filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++
- case os:getenv("CLASSPATH") of
- false -> "";
- Classpath -> Classpath
- end.
-
+ os:getenv("CLASSPATH", "").
cmd(Cmd) ->
PortOpts = [{line,80},eof,exit_status,stderr_to_stdout],
diff --git a/lib/inets/doc/src/Makefile b/lib/inets/doc/src/Makefile
index 1a8e1c7ca8..961bfa838d 100644
--- a/lib/inets/doc/src/Makefile
+++ b/lib/inets/doc/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2012. All Rights Reserved.
+# Copyright Ericsson AB 1997-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -52,6 +52,7 @@ XML_REF3_FILES = \
httpc.xml\
httpd.xml \
httpd_conf.xml \
+ httpd_custom_api.xml \
httpd_socket.xml \
httpd_util.xml \
mod_alias.xml \
diff --git a/lib/inets/doc/src/http_server.xml b/lib/inets/doc/src/http_server.xml
index e3b763b4f3..65e89db391 100644
--- a/lib/inets/doc/src/http_server.xml
+++ b/lib/inets/doc/src/http_server.xml
@@ -46,8 +46,7 @@
Layer), ESI (Erlang Scripting Interface), CGI (Common Gateway
Interface), User Authentication(using Mnesia, dets or plain text
database), Common Logfile Format (with or without disk_log(3)
- support), URL Aliasing, Action Mappings, Directory Listings and SSI
- (Server-Side Includes).</p>
+ support), URL Aliasing, Action Mappings, and Directory Listings</p>
<p>The configuration of the server is provided as an erlang
property list, and for backwards compatibility also a configuration
@@ -478,170 +477,9 @@ http://your.server.org/eval?httpd_example:print(atom_to_list(apply(erlang,halt,[
</p>
<p><em>[date]</em> access to <em>path</em> failed for
<em>remotehost</em>, reason: <em>reason</em></p>
-
- <marker id="ssi"></marker>
</section>
-
+
<section>
- <title>Server Side Includes</title>
- <p>Server Side Includes enables the server to run code embedded
- in HTML pages to generate the response to the client.</p>
- <note>
- <p>Having the server parse HTML pages is a double edged sword!
- It can be costly for a heavily loaded server to perform
- parsing of HTML pages while sending them. Furthermore, it can
- be considered a security risk to have average users executing
- commands in the name of the Erlang node user. Carefully
- consider these items before activating server-side includes.</p>
- </note>
-
- <section>
- <marker id="ssi_setup"></marker>
- <title>SERVER-SIDE INCLUDES (SSI) SETUP</title>
- <p>The server must be told which filename extensions to be used
- for the parsed files. These files, while very similar to HTML,
- are not HTML and are thus not treated the same. Internally, the
- server uses the magic MIME type <c>text/x-server-parsed-html</c>
- to identify parsed documents. It will then perform a format
- conversion to change these files into HTML for the
- client. Update the <c>mime.types</c> file, as described in the
- Mime Type Settings, to tell the server which extension to use
- for parsed files, for example:
- </p>
- <pre>
- text/x-server-parsed-html shtml shtm
- </pre>
- <p>This makes files ending with <c>.shtml</c> and <c>.shtm</c>
- into parsed files. Alternatively, if the performance hit is not a
- problem, <em>all</em> HTML pages can be marked as parsed:
- </p>
- <pre>
- text/x-server-parsed-html html htm
- </pre>
- </section>
-
- <section>
- <marker id="ssi_format"></marker>
- <title>Server-Side Includes (SSI) Format</title>
- <p>All server-side include directives to the server are formatted
- as SGML comments within the HTML page. This is in case the
- document should ever find itself in the client's hands
- unparsed. Each directive has the following format:
- </p>
- <pre>
- &lt;!--#command tag1="value1" tag2="value2" --&gt;
- </pre>
- <p>Each command takes different arguments, most only accept one
- tag at a time. Here is a breakdown of the commands and their
- associated tags:
- </p>
- <p>The config directive controls various aspects of the
- file parsing. There are two valid tags:
- </p>
- <taglist>
- <tag><c>errmsg</c></tag>
- <item>
- <p>controls the message sent back to the client if an
- error occurred while parsing the document. All errors are
- logged in the server's error log.</p>
- </item>
- <tag><c>sizefmt</c></tag>
- <item>
- <p>determines the format used to display the size of
- a file. Valid choices are <c>bytes</c> or
- <c>abbrev</c>. <c>bytes</c> for a formatted byte count
- or <c>abbrev</c> for an abbreviated version displaying
- the number of kilobytes.</p>
- </item>
- </taglist>
- <p>The include directory
- will insert the text of a document into the parsed
- document. This command accepts two tags:</p>
- <taglist>
- <tag><c>virtual</c></tag>
- <item>
- <p>gives a virtual path to a document on the
- server. Only normal files and other parsed documents can
- be accessed in this way.</p>
- </item>
- <tag><c>file</c></tag>
- <item>
- <p>gives a pathname relative to the current
- directory. <c>../</c> cannot be used in this pathname, nor
- can absolute paths. As above, you can send other parsed
- documents, but you cannot send CGI scripts.</p>
- </item>
- </taglist>
- <p>The echo directive prints the value of one of the include
- variables (defined below). The only valid tag to this
- command is <c>var</c>, whose value is the name of the
- variable you wish to echo.</p>
- <p>The fsize directive prints the size of the specified
- file. Valid tags are the same as with the <c>include</c>
- command. The resulting format of this command is subject
- to the <c>sizefmt</c> parameter to the <c>config</c>
- command.</p>
- <p>The lastmod directive prints the last modification date of
- the specified file. Valid tags are the same as with the
- <c>include</c> command.</p>
- <p>The exec directive executes a given shell command or CGI
- script. Valid tags are:</p>
- <taglist>
- <tag><c>cmd</c></tag>
- <item>
- <p>executes the given string using <c>/bin/sh</c>. All
- of the variables defined below are defined, and can be
- used in the command.</p>
- </item>
- <tag><c>cgi</c></tag>
- <item>
- <p>executes the given virtual path to a CGI script and
- includes its output. The server does not perform error
- checking on the script output.</p>
- </item>
- </taglist>
- </section>
-
- <section>
- <marker id="ssi_environment_variables"></marker>
- <title>Server-Side Includes (SSI) Environment Variables</title>
- <p>A number of variables are made available to parsed
- documents. In addition to the CGI variable set, the following
- variables are made available:
- </p>
- <taglist>
- <tag><c>DOCUMENT_NAME</c></tag>
- <item>
- <p>The current filename.</p>
- </item>
- <tag><c>DOCUMENT_URI</c></tag>
- <item>
- <p>The virtual path to this document (such as
- <c>/docs/tutorials/foo.shtml</c>).</p>
- </item>
- <tag><c>QUERY_STRING_UNESCAPED</c></tag>
- <item>
- <p>The unescaped version of any search query the client
- sent, with all shell-special characters escaped with
- <c>\</c>.</p>
- </item>
- <tag><c>DATE_LOCAL</c></tag>
- <item>
- <p>The current date, local time zone.</p>
- </item>
- <tag><c>DATE_GMT</c></tag>
- <item>
- <p>Same as DATE_LOCAL but in Greenwich mean time.</p>
- </item>
- <tag><c>LAST_MODIFIED</c></tag>
- <item>
- <p>The last modification date of the current document.</p>
- </item>
- </taglist>
- </section>
- </section>
-
- <section>
<title>The Erlang Web Server API</title>
<p>The process of handling a HTTP request involves several steps
such as:</p>
@@ -907,28 +745,6 @@ start() ->
</taglist>
</section>
- <section>
- <title>mod_include - SSI</title>
- <p>This module makes it possible to expand "macros" embedded in
- HTML pages before they are delivered to the client, that is
- Server-Side Includes (SSI).
- </p>
- <p>Uses the following Erlang Webserver API interaction data:
- </p>
- <list type="bulleted">
- <item>real_name - from mod_alias</item>
- <item>remote_user - from mod_auth</item>
- </list>
- <p>Exports the following Erlang Webserver API interaction data:
- </p>
- <taglist>
- <tag><c>{mime_type, MimeType}</c></tag>
- <item>The file suffix of the incoming URL mapped into a
- <c>MimeType</c> as defined in the Mime Type Settings
- section.</item>
- </taglist>
- </section>
-
<section>
<title>mod_log - Logging Using Text Files.</title>
<p>Standard logging using the "Common Logfile Format" and text
diff --git a/lib/inets/doc/src/http_uri.xml b/lib/inets/doc/src/http_uri.xml
index e64c375bba..acbd79b201 100644
--- a/lib/inets/doc/src/http_uri.xml
+++ b/lib/inets/doc/src/http_uri.xml
@@ -63,6 +63,7 @@ host() = string()
port() = pos_integer()
path() = string() - Representing a file path or directory path
query() = string()
+fragment() = string()
]]></code>
<marker id="scheme_defaults"></marker>
@@ -92,13 +93,16 @@ query() = string()
<v>URI = uri() </v>
<v>Options = [Option] </v>
<v>Option = {ipv6_host_with_brackets, boolean()} |
- {scheme_defaults, scheme_defaults()}]</v>
- <v>Result = {Scheme, UserInfo, Host, Port, Path, Query}</v>
+ {scheme_defaults, scheme_defaults()} |
+ {fragment, boolean()}]</v>
+ <v>Result = {Scheme, UserInfo, Host, Port, Path, Query} |
+ {Scheme, UserInfo, Host, Port, Path, Query, Fragment}</v>
<v>UserInfo = user_info()</v>
<v>Host = host()</v>
<v>Port = pos_integer()</v>
<v>Path = path()</v>
<v>Query = query()</v>
+ <v>Fragment = fragment()</v>
<v>Reason = term() </v>
</type>
<desc>
@@ -111,6 +115,9 @@ query() = string()
a scheme not found in the scheme defaults) a port number must be
provided or else the parsing will fail. </p>
+ <p>If the fragment option is true, the URI fragment will be returned as
+ part of the parsing result, otherwise it is completely ignored.</p>
+
<marker id="encode"></marker>
</desc>
</func>
diff --git a/lib/inets/doc/src/httpc.xml b/lib/inets/doc/src/httpc.xml
index 4178cb7d4c..6984408932 100644
--- a/lib/inets/doc/src/httpc.xml
+++ b/lib/inets/doc/src/httpc.xml
@@ -366,7 +366,7 @@ filename() = string()
<tag><c><![CDATA[receiver]]></c></tag>
<item>
<p>Defines how the client will deliver the result of an
- asynchroneous request (<c>sync</c> has the value
+ asynchronous request (<c>sync</c> has the value
<c>false</c>). </p>
<taglist>
diff --git a/lib/inets/doc/src/httpd.xml b/lib/inets/doc/src/httpd.xml
index 4ca038cc99..435f99ee23 100644
--- a/lib/inets/doc/src/httpd.xml
+++ b/lib/inets/doc/src/httpd.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1997</year><year>2013</year>
+ <year>1997</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -204,7 +204,15 @@
<marker id="props_limit"></marker>
<p><em>Limit properties</em> </p>
- <taglist>
+ <taglist>
+
+ <marker id="prop_customize"></marker>
+ <tag>{customize, atom()}</tag>
+ <item>
+ <p>A callback module to customize the inets HTTP servers behaviour
+ see <seealso marker="http_custom_api"> httpd_custom_api</seealso> </p>
+ </item>
+
<marker id="prop_disable_chunked_encoding"></marker>
<tag>{disable_chunked_transfer_encoding_send, boolean()}</tag>
<item>
@@ -249,7 +257,16 @@
<p>Limits the size of the message header of HTTP request.
Defaults to 10240. </p>
</item>
-
+
+ <marker id="prop_max_content_length"></marker>
+ <tag>{max_content_length, integer()}</tag>
+ <item>
+ <p>Maximum Content-Length in an incoming request, in bytes. Requests
+ with content larger than this are answered with Status 413.
+ Defaults to 100000000 (100 MB).
+ </p>
+ </item>
+
<marker id="prop_max_uri"></marker>
<tag>{max_uri_size, integer()}</tag>
<item>
@@ -306,7 +323,7 @@ text/plain asc txt
</item>
<marker id="prop_server_tokens"></marker>
- <tag>{server_tokens, prod|major|minor|minimal|os|full|{private, string()}}</tag>
+ <tag>{server_tokens, none|prod|major|minor|minimal|os|full|{private, string()}}</tag>
<item>
<p>ServerTokens defines how the value of the server header
should look. </p>
@@ -314,6 +331,7 @@ text/plain asc txt
here is what the server header string could look like for
the different values of server-tokens: </p>
<pre>
+none "" % A Server: header will not be generated
prod "inets"
major "inets/5"
minor "inets/5.8"
diff --git a/lib/inets/doc/src/httpd_conf.xml b/lib/inets/doc/src/httpd_conf.xml
index 3ef03966a7..60fc2f135e 100644
--- a/lib/inets/doc/src/httpd_conf.xml
+++ b/lib/inets/doc/src/httpd_conf.xml
@@ -97,7 +97,7 @@
<v>FilePath = string()</v>
<v>Result = {ok,Directory} | {error,Reason}</v>
<v>Directory = string()</v>
- <v>Reason = string() | enoent | eaccess | enotdir | FileInfo</v>
+ <v>Reason = string() | enoent | eacces | enotdir | FileInfo</v>
<v>FileInfo = File info record</v>
</type>
<desc>
@@ -105,7 +105,7 @@
<p><c>is_directory/1</c> checks if <c>FilePath</c> is a
directory in which case it is returned. Please read
<c>file(3)</c> for a description of <c>enoent</c>,
- <c>eaccess</c> and <c>enotdir</c>. The definition of
+ <c>eacces</c> and <c>enotdir</c>. The definition of
the file info record can be found by including <c>file.hrl</c>
from the kernel application, see file(3).</p>
@@ -120,14 +120,14 @@
<v>FilePath = string()</v>
<v>Result = {ok,File} | {error,Reason}</v>
<v>File = string()</v>
- <v>Reason = string() | enoent | eaccess | enotdir | FileInfo</v>
+ <v>Reason = string() | enoent | eacces | enotdir | FileInfo</v>
<v>FileInfo = File info record</v>
</type>
<desc>
<marker id="is_file"></marker>
<p><c>is_file/1</c> checks if <c>FilePath</c> is a regular
file in which case it is returned. Read <c>file(3)</c> for a
- description of <c>enoent</c>, <c>eaccess</c> and
+ description of <c>enoent</c>, <c>eacces</c> and
<c>enotdir</c>. The definition of the file info record can be
found by including <c>file.hrl</c> from the kernel application,
see file(3).</p>
diff --git a/lib/inets/doc/src/httpd_custom_api.xml b/lib/inets/doc/src/httpd_custom_api.xml
new file mode 100644
index 0000000000..faf1d277df
--- /dev/null
+++ b/lib/inets/doc/src/httpd_custom_api.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year><year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>httpd_custom_api</title>
+ <file>httpd_custom_api.xml</file>
+ </header>
+ <module>httpd_custom_api</module>
+ <modulesummary>Behaviour with optional callbacks to customize the inets HTTP server.</modulesummary>
+ <description>
+ <p> The module implementing this behaviour shall be supplied to to the servers
+ configuration with the option <seealso marker="httpd:prop_customize"> customize</seealso></p>
+
+ </description>
+ <funcs>
+ <func>
+ <name>response_header({HeaderName, HeaderValue}) -> {true, Header} | false </name>
+ <fsummary>Filter and possible alter HTTP response headers.</fsummary>
+ <type>
+ <v>Header = {HeaderName :: string(), HeaderValue::string()}</v>
+ <d>The header name will be in lower case and should not be altered.</d>
+ </type>
+ <desc>
+ <p> Filter and possible alter HTTP response headers before they are sent to the client.
+ </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>request_header({HeaderName, HeaderValue}) -> {true, Header} | false </name>
+ <fsummary>Filter and possible alter HTTP request headers.</fsummary>
+ <type>
+ <v>Header = {HeaderName :: string(), HeaderValue::string()}</v>
+ <d>The header name will be in lower case and should not be altered.</d>
+ </type>
+ <desc>
+ <p> Filter and possible alter HTTP request headers before they are processed by the server.
+ </p>
+ </desc>
+ </func>
+ </funcs>
+</erlref>
+
+
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index fb7034498c..f563a8c4b0 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -32,7 +32,132 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 5.10.4</title>
+ <section><title>Inets 5.10.9</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Add behaviour with optional callbacks to customize the
+ inets HTTP server.</p>
+ <p>
+ Own Id: OTP-12776</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.8</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Reject messages with a Content-Length less than 0</p>
+ <p>
+ Own Id: OTP-12739 Aux Id: seq12860 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.7</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ New value in <c>server_tokens</c> config for limiting
+ banner grabbing attempts. </p>
+ <p>
+ By setting <c>{server_tokens, none}</c> in
+ <c>ServiceConfig</c> for <c>inets:start(httpd,
+ ServiceConfig)</c>, the "Server:" header will not be set
+ in messages from the server.</p>
+ <p>
+ Own Id: OTP-12661 Aux Id: seq12840 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.6</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ inets: parse correctly 'Set-Cookie' header with empty
+ value</p>
+ <p>
+ httpc_cookie should parse cookies with empty values and
+ no attributes set in the 'Set-Cookie' headers.</p>
+ <p>
+ Own Id: OTP-12455</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Add parsing of URI fragments to http_uri:parse</p>
+ <p>
+ This fixes a bug in httpc where redirection URIs could
+ lead to bad requests if they contained fragments.</p>
+ <p>
+ Own Id: OTP-12398</p>
+ </item>
+ <item>
+ <p>
+ httpc: http client now ignores invalid set-cookie headers</p>
+ <p>
+ Own Id: OTP-12430</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ mod_alias now handles https-URIs properly</p>
+ <p>
+ Consistent view of configuration parameter
+ keep_alive_timeout, should be presented in the
+ httpd:info/[1,2] function in the same unit as it is
+ inputted.</p>
+ <p>
+ Own Id: OTP-12436 Aux Id: seq12786 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Gracefully handle invalid content-lenght headers instead
+ of crashing in list_to_integer.</p>
+ <p>
+ Own Id: OTP-12429</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 5.10.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/inets/doc/src/ref_man.xml b/lib/inets/doc/src/ref_man.xml
index aaedf330b4..3afb020431 100644
--- a/lib/inets/doc/src/ref_man.xml
+++ b/lib/inets/doc/src/ref_man.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>1997</year><year>2013</year>
+ <year>1997</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -39,6 +39,7 @@
<xi:include href="httpc.xml"/>
<xi:include href="httpd.xml"/>
<xi:include href="httpd_conf.xml"/>
+ <xi:include href="httpd_custom_api.xml"/>
<xi:include href="httpd_socket.xml"/>
<xi:include href="httpd_util.xml"/>
<xi:include href="mod_alias.xml"/>
diff --git a/lib/inets/examples/httpd_load_test/hdlt_random_html.erl b/lib/inets/examples/httpd_load_test/hdlt_random_html.erl
index e3a572c61f..59073a3d23 100644
--- a/lib/inets/examples/httpd_load_test/hdlt_random_html.erl
+++ b/lib/inets/examples/httpd_load_test/hdlt_random_html.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -48,7 +48,10 @@ stop() ->
".
content(WorkSim, SzSim) ->
- {A, B, C} = now(),
+ {A, B, C} = {erlang:phash2([node()]),
+ inets_time_compat:monotonic_time(),
+ inets_time_compat:unique_integer()},
+
random:seed(A, B, C),
lists:sort([random:uniform(X) || X <- lists:seq(1, WorkSim)]),
lists:flatten(lists:duplicate(SzSim, "Dummy data ")).
diff --git a/lib/inets/examples/httpd_load_test/hdlt_slave.erl b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
index 52af9b5b90..41361418bc 100644
--- a/lib/inets/examples/httpd_load_test/hdlt_slave.erl
+++ b/lib/inets/examples/httpd_load_test/hdlt_slave.erl
@@ -180,7 +180,7 @@ ssh_slave_start(Host, ErlCmd) ->
?DEBUG("ssh_exec_erl -> done", []),
{ok, Connection, Channel};
Error3 ->
- ?LOG("failed exec comand: ~p", [Error3]),
+ ?LOG("failed exec command: ~p", [Error3]),
throw({error, {ssh_exec_failed, Error3}})
end.
diff --git a/lib/inets/src/ftp/ftp.erl b/lib/inets/src/ftp/ftp.erl
index 8e51b1be5a..7eebe8d5bf 100644
--- a/lib/inets/src/ftp/ftp.erl
+++ b/lib/inets/src/ftp/ftp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -2176,16 +2176,16 @@ handle_caller(#state{caller = {transfer_data, {Cmd, Bin, RemoteFile}}} =
%% Connect to FTP server at Host (default is TCP port 21)
%% in order to establish a control connection.
setup_ctrl_connection(Host, Port, Timeout, State) ->
- MsTime = millisec_time(),
+ MsTime = inets_time_compat:monotonic_time(),
case connect(Host, Port, Timeout, State) of
{ok, IpFam, CSock} ->
NewState = State#state{csock = {tcp, CSock}, ipfamily = IpFam},
activate_ctrl_connection(NewState),
- case Timeout - (millisec_time() - MsTime) of
+ case Timeout - inets_lib:millisec_passed(MsTime) of
Timeout2 when (Timeout2 >= 0) ->
{ok, NewState#state{caller = open}, Timeout2};
_ ->
- %% Oups: Simulate timeout
+ %% Oups: Simulate timeout
{ok, NewState#state{caller = open}, 0}
end;
Error ->
@@ -2501,10 +2501,6 @@ progress_report(Report, #state{progress = ProgressPid}) ->
ftp_progress:report(ProgressPid, Report).
-millisec_time() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
-
peername({tcp, Socket}) -> inet:peername(Socket);
peername({ssl, Socket}) -> ssl:peername(Socket).
diff --git a/lib/inets/src/http_client/httpc_cookie.erl b/lib/inets/src/http_client/httpc_cookie.erl
index 134115bdfa..35778d3ed5 100644
--- a/lib/inets/src/http_client/httpc_cookie.erl
+++ b/lib/inets/src/http_client/httpc_cookie.erl
@@ -115,8 +115,8 @@ maybe_dets_close(Db) ->
%%--------------------------------------------------------------------
-%% Func: insert(CookieDb) -> ok
-%% Purpose: Close the cookie db
+%% Func: insert(CookieDb, Cookie) -> ok
+%% Purpose: insert cookies into the cookie db
%%--------------------------------------------------------------------
%% If no persistent cookie database is defined we
@@ -334,9 +334,23 @@ add_domain(Str, #http_cookie{domain_default = true}) ->
add_domain(Str, #http_cookie{domain = Domain}) ->
Str ++ "; $Domain=" ++ Domain.
+is_set_cookie_valid("") ->
+ %% an empty Set-Cookie header is not valid
+ false;
+is_set_cookie_valid([$=|_]) ->
+ %% a Set-Cookie header without name is not valid
+ false;
+is_set_cookie_valid(SetCookieHeader) ->
+ %% a Set-Cookie header without name/value is not valid
+ case string:chr(SetCookieHeader, $=) of
+ 0 -> false;
+ _ -> true
+ end.
+
parse_set_cookies(CookieHeaders, DefaultPathDomain) ->
- %% empty Set-Cookie header is invalid according to RFC but some sites violate it
- SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders, Value /= ""],
+ %% filter invalid Set-Cookie headers
+ SetCookieHeaders = [Value || {"set-cookie", Value} <- CookieHeaders,
+ is_set_cookie_valid(Value)],
Cookies = [parse_set_cookie(SetCookieHeader, DefaultPathDomain) ||
SetCookieHeader <- SetCookieHeaders],
%% print_cookies("Parsed Cookies", Cookies),
@@ -348,6 +362,8 @@ parse_set_cookie(CookieHeader, {DefaultPath, DefaultDomain}) ->
Name = string:substr(CookieHeader, 1, Pos - 1),
{Value, Attrs} =
case string:substr(CookieHeader, Pos + 1) of
+ [] ->
+ {"", ""};
[$;|ValueAndAttrs] ->
{"", string:tokens(ValueAndAttrs, ";")};
ValueAndAttrs ->
diff --git a/lib/inets/src/http_client/httpc_handler.erl b/lib/inets/src/http_client/httpc_handler.erl
index 7f7328f1d9..9d832ef18b 100644
--- a/lib/inets/src/http_client/httpc_handler.erl
+++ b/lib/inets/src/http_client/httpc_handler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -87,7 +87,7 @@
%% block the httpc manager process in odd cases such as trying to call
%% a server that does not exist. (See OTP-6735) The only API function
%% sending messages to the handler process that can be called before
-%% init has compleated is cancel and that is not a problem! (Send and
+%% init has completed is cancel and that is not a problem! (Send and
%% stream will not be called before the first request has been sent and
%% the reply or part of it has arrived.)
%%--------------------------------------------------------------------
@@ -392,7 +392,7 @@ handle_call(info, _, State) ->
%% When the request in process has been canceled the handler process is
%% stopped and the pipelined requests will be reissued or remaining
%% requests will be sent on a new connection. This is is
-%% based on the assumption that it is proably cheaper to reissue the
+%% based on the assumption that it is probably cheaper to reissue the
%% requests than to wait for a potentiall large response that we then
%% only throw away. This of course is not always true maybe we could
%% do something smarter here?! If the request canceled is not
@@ -1122,7 +1122,7 @@ handle_http_body(Body, #state{headers = Headers,
handle_response(State#state{headers = NewHeaders,
body = NewBody});
_ ->
- {NewBody2, NewRequest} =
+ {NewBody2, _NewRequest} =
stream(NewBody, Request, Code),
handle_response(State#state{headers = NewHeaders,
body = NewBody2})
@@ -1330,7 +1330,7 @@ handle_keep_alive_queue(#state{status = keep_alive,
Session, <<>>,
State#state{keep_alive = KeepAlive});
{error, Reason} ->
- {stop, shutdown, {keepalive_failed, Reason}, State}
+ {stop, {shutdown, {keepalive_failed, Reason}}, State}
end
end
end.
@@ -1345,7 +1345,7 @@ handle_empty_queue(Session, ProfileName, TimeOut, State) ->
%% closed by the server, the client may want to close it.
NewState = activate_queue_timeout(TimeOut, State),
update_session(ProfileName, Session, #session.queue_length, 0),
- %% Note mfa will be initilized when a new request
+ %% Note mfa will be initialized when a new request
%% arrives.
{noreply,
NewState#state{request = undefined,
diff --git a/lib/inets/src/http_lib/http_internal.hrl b/lib/inets/src/http_lib/http_internal.hrl
index 53b776c4e7..54425740b5 100644
--- a/lib/inets/src/http_lib/http_internal.hrl
+++ b/lib/inets/src/http_lib/http_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,7 @@
-define(HTTP_MAX_URI_SIZE, nolimit).
-define(HTTP_MAX_VERSION_STRING, 8).
-define(HTTP_MAX_METHOD_STRING, 20).
+-define(HTTP_MAX_CONTENT_LENGTH, 100000000).
-ifndef(HTTP_DEFAULT_SSL_KIND).
-define(HTTP_DEFAULT_SSL_KIND, essl).
diff --git a/lib/inets/src/http_lib/http_request.erl b/lib/inets/src/http_lib/http_request.erl
index f295453bdd..a0833ddf01 100644
--- a/lib/inets/src/http_lib/http_request.erl
+++ b/lib/inets/src/http_lib/http_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,8 +21,16 @@
-include("http_internal.hrl").
--export([headers/2, http_headers/1, is_absolut_uri/1]).
+-export([headers/2, http_headers/1, is_absolut_uri/1, key_value/1]).
+
+key_value(KeyValueStr) ->
+ case lists:splitwith(fun($:) -> false; (_) -> true end, KeyValueStr) of
+ {Key, [$: | Value]} ->
+ {http_util:to_lower(string:strip(Key)), string:strip(Value)};
+ {_, []} ->
+ undefined
+ end.
%%-------------------------------------------------------------------------
%% headers(HeaderList, #http_request_h{}) -> #http_request_h{}
%% HeaderList - ["HeaderField:Value"]
@@ -34,14 +42,12 @@
%%-------------------------------------------------------------------------
headers([], Headers) ->
Headers;
-headers([Header | Tail], Headers) ->
- case lists:splitwith(fun($:) -> false; (_) -> true end, Header) of
- {Key, [$: | Value]} ->
- headers(Tail, headers(http_util:to_lower(string:strip(Key)),
- string:strip(Value), Headers));
- {_, []} ->
- headers(Tail, Headers)
- end.
+headers([{Key, Value} | Tail], Headers) ->
+ headers(Tail, headers(Key, Value, Headers));
+headers([undefined], Headers) ->
+ Headers;
+headers(KeyValues, Headers) ->
+ headers([key_value(KeyValue) || KeyValue <- KeyValues], Headers).
%%-------------------------------------------------------------------------
%% headers(#http_request_h{}) -> HeaderList
diff --git a/lib/inets/src/http_lib/http_uri.erl b/lib/inets/src/http_lib/http_uri.erl
index 5962001c3a..350a4bc169 100644
--- a/lib/inets/src/http_lib/http_uri.erl
+++ b/lib/inets/src/http_lib/http_uri.erl
@@ -90,8 +90,8 @@ parse(AbsURI, Opts) ->
{error, Reason};
{Scheme, DefaultPort, Rest} ->
case (catch parse_uri_rest(Scheme, DefaultPort, Rest, Opts)) of
- {ok, {UserInfo, Host, Port, Path, Query}} ->
- {ok, {Scheme, UserInfo, Host, Port, Path, Query}};
+ {ok, Result} ->
+ {ok, Result};
{error, Reason} ->
{error, {Reason, Scheme, AbsURI}};
_ ->
@@ -148,27 +148,22 @@ parse_scheme(AbsURI, Opts) ->
end.
parse_uri_rest(Scheme, DefaultPort, "//" ++ URIPart, Opts) ->
- {Authority, PathQuery} =
- case split_uri(URIPart, "/", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- case split_uri(URIPart, "\\?", URIPart, 1, 0) of
- Split = {_, _} ->
- Split;
- URIPart ->
- {URIPart,""}
- end
- end,
+ {Authority, PathQueryFragment} =
+ split_uri(URIPart, "[/?#]", {URIPart, ""}, 1, 0),
+ {RawPath, QueryFragment} =
+ split_uri(PathQueryFragment, "[?#]", {PathQueryFragment, ""}, 1, 0),
+ {Query, Fragment} =
+ split_uri(QueryFragment, "#", {QueryFragment, ""}, 1, 0),
{UserInfo, HostPort} = split_uri(Authority, "@", {"", Authority}, 1, 1),
{Host, Port} = parse_host_port(Scheme, DefaultPort, HostPort, Opts),
- {Path, Query} = parse_path_query(PathQuery),
- {ok, {UserInfo, Host, Port, Path, Query}}.
-
+ Path = path(RawPath),
+ case lists:keyfind(fragment, 1, Opts) of
+ {fragment, true} ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query, Fragment}};
+ _ ->
+ {ok, {Scheme, UserInfo, Host, Port, Path, Query}}
+ end.
-parse_path_query(PathQuery) ->
- {Path, Query} = split_uri(PathQuery, "\\?", {PathQuery, ""}, 1, 0),
- {path(Path), Query}.
%% In this version of the function, we no longer need
%% the Scheme argument, but just in case...
diff --git a/lib/inets/src/http_server/Makefile b/lib/inets/src/http_server/Makefile
index 2660d04d16..00bad51ff9 100644
--- a/lib/inets/src/http_server/Makefile
+++ b/lib/inets/src/http_server/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2005-2013. All Rights Reserved.
+# Copyright Ericsson AB 2005-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -46,6 +46,7 @@ MODULES = \
httpd_connection_sup\
httpd_cgi \
httpd_conf \
+ httpd_custom \
httpd_example \
httpd_esi \
httpd_file\
@@ -75,7 +76,6 @@ MODULES = \
mod_get \
mod_head \
mod_htaccess \
- mod_include \
mod_log \
mod_range \
mod_responsecontrol \
diff --git a/lib/inets/src/http_server/httpd_conf.erl b/lib/inets/src/http_server/httpd_conf.erl
index 27446ca7fe..a21eb915d4 100644
--- a/lib/inets/src/http_server/httpd_conf.erl
+++ b/lib/inets/src/http_server/httpd_conf.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -44,7 +44,7 @@
%% FilePath = string()
%% Result = {ok,Directory} | {error,Reason}
%% Directory = string()
-%% Reason = string() | enoent | eaccess | enotdir | FileInfo
+%% Reason = string() | enoent | eacces | enotdir | FileInfo
%% FileInfo = File info record
%%
%% Description: Checks if FilePath is a directory in which case it is
@@ -71,7 +71,7 @@ is_directory(_Type,_Access,FileInfo,_Directory) ->
%% FilePath = string()
%% Result = {ok,File} | {error,Reason}
%% File = string()
-%% Reason = string() | enoent | eaccess | enotdir | FileInfo
+%% Reason = string() | enoent | eacces | enotdir | FileInfo
%% FileInfo = File info record
%%
%% Description: Checks if FilePath is a regular file in which case it
@@ -205,13 +205,13 @@ load("MaxURISize " ++ MaxHeaderSize, []) ->
" is an invalid number of MaxHeaderSize")}
end;
-load("MaxBodySize " ++ MaxBodySize, []) ->
- case make_integer(MaxBodySize) of
+load("MaxContentLength " ++ Max, []) ->
+ case make_integer(Max) of
{ok, Integer} ->
- {ok, [], {max_body_size,Integer}};
+ {ok, [], {max_content_length, Integer}};
{error, _} ->
- {error, ?NICE(clean(MaxBodySize) ++
- " is an invalid number of MaxBodySize")}
+ {error, ?NICE(clean(Max) ++
+ " is an invalid number of MaxContentLength")}
end;
load("ServerName " ++ ServerName, []) ->
@@ -219,14 +219,14 @@ load("ServerName " ++ ServerName, []) ->
load("ServerTokens " ++ ServerTokens, []) ->
%% These are the valid *plain* server tokens:
- %% sprod, major, minor, minimum, os, full
+ %% none, prod, major, minor, minimum, os, full
%% It can also be a "private" server token: private:<any string>
case string:tokens(ServerTokens, [$:]) of
["private", Private] ->
{ok,[], {server_tokens, clean(Private)}};
[TokStr] ->
Tok = list_to_atom(clean(TokStr)),
- case lists:member(Tok, [prod, major, minor, minimum, os, full]) of
+ case lists:member(Tok, [none, prod, major, minor, minimum, os, full]) of
true ->
{ok,[], {server_tokens, Tok}};
false ->
@@ -337,7 +337,7 @@ load("MaxKeepAliveRequest " ++ MaxRequests, []) ->
load("KeepAliveTimeout " ++ Timeout, []) ->
case make_integer(Timeout) of
{ok, Integer} ->
- {ok, [], {keep_alive_timeout, Integer*1000}};
+ {ok, [], {keep_alive_timeout, Integer}};
{error, _} ->
{error, ?NICE(clean(Timeout)++" is an invalid KeepAliveTimeout")}
end;
@@ -569,6 +569,12 @@ validate_config_params([{max_body_size, Value} | Rest])
validate_config_params([{max_body_size, Value} | _]) ->
throw({max_body_size, Value});
+validate_config_params([{max_content_length, Value} | Rest])
+ when is_integer(Value) andalso (Value > 0) ->
+ validate_config_params(Rest);
+validate_config_params([{max_content_length, Value} | _]) ->
+ throw({max_content_length, Value});
+
validate_config_params([{server_name, Value} | Rest])
when is_list(Value) ->
validate_config_params(Rest);
@@ -635,7 +641,7 @@ validate_config_params([{max_keep_alive_request, Value} | Rest])
when is_integer(Value) andalso (Value > 0) ->
validate_config_params(Rest);
validate_config_params([{max_keep_alive_request, Value} | _]) ->
- throw({max_header_size, Value});
+ throw({max_keep_alive_request, Value});
validate_config_params([{keep_alive_timeout, Value} | Rest])
when is_integer(Value) andalso (Value >= 0) ->
@@ -779,8 +785,15 @@ fix_mime_types(ConfigList0) ->
[{"html","text/html"},{"htm","text/html"}]}
| ConfigList0]
end;
- _ ->
- ConfigList0
+ MimeTypes ->
+ case filelib:is_file(MimeTypes) of
+ true ->
+ {ok, MimeTypesList} = load_mime_types(MimeTypes),
+ ConfigList = proplists:delete(mime_types, ConfigList0),
+ [{mime_types, MimeTypesList} | ConfigList];
+ false ->
+ ConfigList0
+ end
end.
store({mime_types,MimeTypesList},ConfigList) ->
@@ -799,7 +812,7 @@ store({server_tokens, ServerTokens} = Entry, _ConfigList) ->
Server = server(ServerTokens),
{ok, [Entry, {server, Server}]};
store({keep_alive_timeout, KeepAliveTimeout}, _ConfigList) ->
- {ok, {keep_alive_timeout, KeepAliveTimeout * 1000}};
+ {ok, {keep_alive_timeout, KeepAliveTimeout}};
store(ConfigListEntry, _ConfigList) ->
{ok, ConfigListEntry}.
@@ -844,6 +857,8 @@ server(full = _ServerTokens) ->
OS = os_info(full),
lists:flatten(
io_lib:format("~s ~s OTP/~s", [?SERVER_SOFTWARE, OS, OTPRelease]));
+server(none = _ServerTokens) ->
+ "";
server({private, Server} = _ServerTokens) when is_list(Server) ->
%% The user provide its own
Server;
@@ -1293,7 +1308,7 @@ ssl_ca_certificate_file(ConfigDB) ->
end.
plain_server_tokens() ->
- [prod, major, minor, minimum, os, full].
+ [none, prod, major, minor, minimum, os, full].
error_report(Where,M,F,Error) ->
error_logger:error_report([{?MODULE, Where},
diff --git a/lib/inets/src/http_server/httpd_custom.erl b/lib/inets/src/http_server/httpd_custom.erl
new file mode 100644
index 0000000000..342469a579
--- /dev/null
+++ b/lib/inets/src/http_server/httpd_custom.erl
@@ -0,0 +1,69 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(httpd_custom).
+
+-export([response_header/1, request_header/1]).
+-export([customize_headers/3]).
+
+-include_lib("inets/src/inets_app/inets_internal.hrl").
+
+response_header(Header) ->
+ {true, httpify(Header)}.
+request_header(Header) ->
+ {true, Header}.
+
+customize_headers(?MODULE, Function, Arg) ->
+ ?MODULE:Function(Arg);
+customize_headers(Module, Function, Arg) ->
+ try Module:Function(Arg) of
+ {true, Value} ->
+ ?MODULE:Function(Value);
+ false ->
+ false
+ catch
+ _:_ ->
+ ?MODULE:Function(Arg)
+ end.
+
+httpify({Key0, Value}) ->
+ %% make sure first letter is capital (defacto standard)
+ Words1 = string:tokens(Key0, "-"),
+ Words2 = upify(Words1, []),
+ Key = new_key(Words2),
+ Key ++ ": " ++ Value ++ ?CRLF .
+
+new_key([]) ->
+ "";
+new_key([W]) ->
+ W;
+new_key([W1,W2]) ->
+ W1 ++ "-" ++ W2;
+new_key([W|R]) ->
+ W ++ "-" ++ new_key(R).
+
+upify([], Acc) ->
+ lists:reverse(Acc);
+upify([Key|Rest], Acc) ->
+ upify(Rest, [upify2(Key)|Acc]).
+
+upify2([C|Rest]) when (C >= $a) andalso (C =< $z) ->
+ [C-($a-$A)|Rest];
+upify2(Str) ->
+ Str.
diff --git a/lib/inets/src/http_server/httpd_request.erl b/lib/inets/src/http_server/httpd_request.erl
index 712c73599f..782120c284 100644
--- a/lib/inets/src/http_server/httpd_request.erl
+++ b/lib/inets/src/http_server/httpd_request.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,28 +42,28 @@
%%%=========================================================================
%%% Internal application API
%%%=========================================================================
-parse([Bin, MaxSizes]) ->
- ?hdrt("parse", [{bin, Bin}, {max_sizes, MaxSizes}]),
- parse_method(Bin, [], 0, proplists:get_value(max_method, MaxSizes), MaxSizes, []);
+parse([Bin, Options]) ->
+ ?hdrt("parse", [{bin, Bin}, {max_sizes, Options}]),
+ parse_method(Bin, [], 0, proplists:get_value(max_method, Options), Options, []);
parse(Unknown) ->
?hdrt("parse", [{unknown, Unknown}]),
exit({bad_args, Unknown}).
%% Functions that may be returned during the decoding process
%% if the input data is incompleate.
-parse_method([Bin, Method, Current, Max, MaxSizes, Result]) ->
- parse_method(Bin, Method, Current, Max, MaxSizes, Result).
+parse_method([Bin, Method, Current, Max, Options, Result]) ->
+ parse_method(Bin, Method, Current, Max, Options, Result).
-parse_uri([Bin, URI, Current, Max, MaxSizes, Result]) ->
- parse_uri(Bin, URI, Current, Max, MaxSizes, Result).
+parse_uri([Bin, URI, Current, Max, Options, Result]) ->
+ parse_uri(Bin, URI, Current, Max, Options, Result).
-parse_version([Bin, Rest, Version, Current, Max, MaxSizes, Result]) ->
- parse_version(<<Rest/binary, Bin/binary>>, Version, Current, Max, MaxSizes,
+parse_version([Bin, Rest, Version, Current, Max, Options, Result]) ->
+ parse_version(<<Rest/binary, Bin/binary>>, Version, Current, Max, Options,
Result).
-parse_headers([Bin, Rest, Header, Headers, Current, Max, MaxSizes, Result]) ->
+parse_headers([Bin, Rest, Header, Headers, Current, Max, Options, Result]) ->
parse_headers(<<Rest/binary, Bin/binary>>,
- Header, Headers, Current, Max, MaxSizes, Result).
+ Header, Headers, Current, Max, Options, Result).
whole_body([Bin, Body, Length]) ->
whole_body(<<Body/binary, Bin/binary>>, Length).
@@ -118,108 +118,123 @@ validate(Method, Uri, Version) ->
%% create it.
%% ----------------------------------------------------------------------
update_mod_data(ModData, Method, RequestURI, HTTPVersion, Headers)->
- ParsedHeaders = tagup_header(Headers),
- PersistentConn = get_persistens(HTTPVersion, ParsedHeaders,
+ PersistentConn = get_persistens(HTTPVersion, Headers,
ModData#mod.config_db),
{ok, ModData#mod{data = [],
method = Method,
absolute_uri = format_absolute_uri(RequestURI,
- ParsedHeaders),
+ Headers),
request_uri = format_request_uri(RequestURI),
http_version = HTTPVersion,
request_line = Method ++ " " ++ RequestURI ++
" " ++ HTTPVersion,
- parsed_header = ParsedHeaders,
+ parsed_header = Headers,
connection = PersistentConn}}.
%%%========================================================================
%%% Internal functions
%%%========================================================================
-parse_method(<<>>, Method, Current, Max, MaxSizes, Result) ->
- {?MODULE, parse_method, [Method, Current, Max, MaxSizes, Result]};
-parse_method(<<?SP, Rest/binary>>, Method, _Current, _Max, MaxSizes, Result) ->
- parse_uri(Rest, [], 0, proplists:get_value(max_uri, MaxSizes), MaxSizes,
+parse_method(<<>>, Method, Current, Max, Options, Result) ->
+ {?MODULE, parse_method, [Method, Current, Max, Options, Result]};
+parse_method(<<?SP, Rest/binary>>, Method, _Current, _Max, Options, Result) ->
+ parse_uri(Rest, [], 0, proplists:get_value(max_uri, Options), Options,
[string:strip(lists:reverse(Method)) | Result]);
-parse_method(<<Octet, Rest/binary>>, Method, Current, Max, MaxSizes, Result) when Current =< Max ->
- parse_method(Rest, [Octet | Method], Current + 1, Max, MaxSizes, Result);
+parse_method(<<Octet, Rest/binary>>, Method, Current, Max, Options, Result) when Current =< Max ->
+ parse_method(Rest, [Octet | Method], Current + 1, Max, Options, Result);
parse_method(_, _, _, Max, _, _) ->
%% We do not know the version of the client as it comes after the
%% method send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, Max, 413, "Method unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Method unreasonably long"}, lowest_version()}.
parse_uri(_, _, Current, MaxURI, _, _)
when (Current > MaxURI) andalso (MaxURI =/= nolimit) ->
%% We do not know the version of the client as it comes after the
%% uri send the lowest version in the response so that the client
%% will be able to handle it.
- {error, {too_long, MaxURI, 414, "URI unreasonably long"},lowest_version()};
-parse_uri(<<>>, URI, Current, Max, MaxSizes, Result) ->
- {?MODULE, parse_uri, [URI, Current, Max, MaxSizes, Result]};
-parse_uri(<<?SP, Rest/binary>>, URI, _, _, MaxSizes, Result) ->
- parse_version(Rest, [], 0, proplists:get_value(max_version, MaxSizes), MaxSizes,
+ {error, {size_error, MaxURI, 414, "URI unreasonably long"},lowest_version()};
+parse_uri(<<>>, URI, Current, Max, Options, Result) ->
+ {?MODULE, parse_uri, [URI, Current, Max, Options, Result]};
+parse_uri(<<?SP, Rest/binary>>, URI, _, _, Options, Result) ->
+ parse_version(Rest, [], 0, proplists:get_value(max_version, Options), Options,
[string:strip(lists:reverse(URI)) | Result]);
%% Can happen if it is a simple HTTP/0.9 request e.i "GET /\r\n\r\n"
-parse_uri(<<?CR, _Rest/binary>> = Data, URI, _, _, MaxSizes, Result) ->
- parse_version(Data, [], 0, proplists:get_value(max_version, MaxSizes), MaxSizes,
+parse_uri(<<?CR, _Rest/binary>> = Data, URI, _, _, Options, Result) ->
+ parse_version(Data, [], 0, proplists:get_value(max_version, Options), Options,
[string:strip(lists:reverse(URI)) | Result]);
-parse_uri(<<Octet, Rest/binary>>, URI, Current, Max, MaxSizes, Result) ->
- parse_uri(Rest, [Octet | URI], Current + 1, Max, MaxSizes, Result).
+parse_uri(<<Octet, Rest/binary>>, URI, Current, Max, Options, Result) ->
+ parse_uri(Rest, [Octet | URI], Current + 1, Max, Options, Result).
-parse_version(<<>>, Version, Current, Max, MaxSizes, Result) ->
- {?MODULE, parse_version, [<<>>, Version, Current, Max, MaxSizes, Result]};
-parse_version(<<?LF, Rest/binary>>, Version, Current, Max, MaxSizes, Result) ->
+parse_version(<<>>, Version, Current, Max, Options, Result) ->
+ {?MODULE, parse_version, [<<>>, Version, Current, Max, Options, Result]};
+parse_version(<<?LF, Rest/binary>>, Version, Current, Max, Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
- parse_version(<<?CR, ?LF, Rest/binary>>, Version, Current, Max, MaxSizes, Result);
-parse_version(<<?CR, ?LF, Rest/binary>>, Version, _, _, MaxSizes, Result) ->
- parse_headers(Rest, [], [], 0, proplists:get_value(max_header, MaxSizes), MaxSizes,
+ parse_version(<<?CR, ?LF, Rest/binary>>, Version, Current, Max, Options, Result);
+parse_version(<<?CR, ?LF, Rest/binary>>, Version, _, _, Options, Result) ->
+ parse_headers(Rest, [], [], 0, proplists:get_value(max_header, Options), Options,
[string:strip(lists:reverse(Version)) | Result]);
-parse_version(<<?CR>> = Data, Version, Current, Max, MaxSizes, Result) ->
- {?MODULE, parse_version, [Data, Version, Current, Max, MaxSizes, Result]};
-parse_version(<<Octet, Rest/binary>>, Version, Current, Max, MaxSizes, Result) when Current =< Max ->
- parse_version(Rest, [Octet | Version], Current + 1, Max, MaxSizes, Result);
+parse_version(<<?CR>> = Data, Version, Current, Max, Options, Result) ->
+ {?MODULE, parse_version, [Data, Version, Current, Max, Options, Result]};
+parse_version(<<Octet, Rest/binary>>, Version, Current, Max, Options, Result) when Current =< Max ->
+ parse_version(Rest, [Octet | Version], Current + 1, Max, Options, Result);
parse_version(_, _, _, Max,_,_) ->
- {error, {too_long, Max, 413, "Version string unreasonably long"}, lowest_version()}.
+ {error, {size_error, Max, 413, "Version string unreasonably long"}, lowest_version()}.
parse_headers(_, _, _, Current, Max, _, Result)
when Max =/= nolimit andalso Current > Max ->
HttpVersion = lists:nth(3, lists:reverse(Result)),
- {error, {too_long, Max, 413, "Headers unreasonably long"}, HttpVersion};
+ {error, {size_error, Max, 413, "Headers unreasonably long"}, HttpVersion};
-parse_headers(<<>>, Header, Headers, Current, Max, MaxSizes, Result) ->
+parse_headers(<<>>, Header, Headers, Current, Max, Options, Result) ->
{?MODULE, parse_headers, [<<>>, Header, Headers, Current, Max,
- MaxSizes, Result]};
-parse_headers(<<?CR,?LF,?LF,Body/binary>>, [], [], Current, Max, MaxSizes, Result) ->
+ Options, Result]};
+parse_headers(<<?CR,?LF,?LF,Body/binary>>, [], [], Current, Max, Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, [], [], Current, Max,
- MaxSizes, Result);
+ Options, Result);
-parse_headers(<<?LF,?LF,Body/binary>>, [], [], Current, Max, MaxSizes, Result) ->
+parse_headers(<<?LF,?LF,Body/binary>>, [], [], Current, Max, Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, [], [], Current, Max,
- MaxSizes, Result);
+ Options, Result);
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, [], [], _, _, _, Result) ->
NewResult = list_to_tuple(lists:reverse([Body, {#http_request_h{}, []} |
Result])),
{ok, NewResult};
parse_headers(<<?CR,?LF,?CR,?LF,Body/binary>>, Header, Headers, _, _,
- _, Result) ->
- HTTPHeaders = [lists:reverse(Header) | Headers],
- RequestHeaderRcord =
- http_request:headers(HTTPHeaders, #http_request_h{}),
- NewResult =
- list_to_tuple(lists:reverse([Body, {RequestHeaderRcord,
- HTTPHeaders} | Result])),
- {ok, NewResult};
+ Options, Result) ->
+ Customize = proplists:get_value(customize, Options),
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ FinalHeaders = lists:filtermap(fun(H) ->
+ httpd_custom:customize_headers(Customize, request_header, H)
+ end,
+ Headers),
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers(FinalHeaders, #http_request_h{}), FinalHeaders} | Result]))};
+ NewHeader ->
+ case check_header(NewHeader, Options) of
+ ok ->
+ FinalHeaders = lists:filtermap(fun(H) ->
+ httpd_custom:customize_headers(Customize, request_header, H)
+ end, [NewHeader | Headers]),
+ {ok, list_to_tuple(lists:reverse([Body, {http_request:headers(FinalHeaders,
+ #http_request_h{}),
+ FinalHeaders} | Result]))};
+
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
parse_headers(<<?CR,?LF,?CR>> = Data, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
{?MODULE, parse_headers, [Data, Header, Headers, Current, Max,
- MaxSizes, Result]};
-parse_headers(<<?LF>>, [], [], Current, Max, MaxSizes, Result) ->
+ Options, Result]};
+parse_headers(<<?LF>>, [], [], Current, Max, Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
- parse_headers(<<?CR,?LF>>, [], [], Current, Max, MaxSizes, Result);
+ parse_headers(<<?CR,?LF>>, [], [], Current, Max, Options, Result);
%% There where no headers, which is unlikely to happen.
parse_headers(<<?CR,?LF>>, [], [], _, _, _, Result) ->
@@ -228,37 +243,50 @@ parse_headers(<<?CR,?LF>>, [], [], _, _, _, Result) ->
{ok, NewResult};
parse_headers(<<?LF>>, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
- parse_headers(<<?CR,?LF>>, Header, Headers, Current, Max, MaxSizes, Result);
+ parse_headers(<<?CR,?LF>>, Header, Headers, Current, Max, Options, Result);
parse_headers(<<?CR,?LF>> = Data, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
{?MODULE, parse_headers, [Data, Header, Headers, Current, Max,
- MaxSizes, Result]};
+ Options, Result]};
parse_headers(<<?LF, Octet, Rest/binary>>, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
parse_headers(<<?CR,?LF, Octet, Rest/binary>>, Header, Headers, Current, Max,
- MaxSizes, Result);
+ Options, Result);
parse_headers(<<?CR,?LF, Octet, Rest/binary>>, Header, Headers, _, Max,
- MaxSizes, Result) ->
- parse_headers(Rest, [Octet], [lists:reverse(Header) | Headers],
- 0, Max, MaxSizes, Result);
+ Options, Result) ->
+ case http_request:key_value(lists:reverse(Header)) of
+ undefined -> %% Skip headers with missing :
+ parse_headers(Rest, [Octet], Headers,
+ 0, Max, Options, Result);
+ NewHeader ->
+ case check_header(NewHeader, Options) of
+ ok ->
+ parse_headers(Rest, [Octet], [NewHeader | Headers],
+ 0, Max, Options, Result);
+ {error, Reason} ->
+ HttpVersion = lists:nth(3, lists:reverse(Result)),
+ {error, Reason, HttpVersion}
+ end
+ end;
+
parse_headers(<<?CR>> = Data, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
{?MODULE, parse_headers, [Data, Header, Headers, Current, Max,
- MaxSizes, Result]};
+ Options, Result]};
parse_headers(<<?LF>>, Header, Headers, Current, Max,
- MaxSizes, Result) ->
+ Options, Result) ->
%% If ?CR is is missing RFC2616 section-19.3
parse_headers(<<?CR, ?LF>>, Header, Headers, Current, Max,
- MaxSizes, Result);
+ Options, Result);
parse_headers(<<Octet, Rest/binary>>, Header, Headers, Current,
- Max, MaxSizes, Result) ->
+ Max, Options, Result) ->
parse_headers(Rest, [Octet | Header], Headers, Current + 1, Max,
- MaxSizes, Result).
+ Options, Result).
whole_body(Body, Length) ->
case size(Body) of
@@ -388,29 +416,29 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
false
end.
-
-%%----------------------------------------------------------------------
-%% tagup_header
-%%
-%% Parses the header of a HTTP request and returns a key,value tuple
-%% list containing Name and Value of each header directive as of:
-%%
-%% Content-Type: multipart/mixed -> {"Content-Type", "multipart/mixed"}
-%%
-%% But in http/1.1 the field-names are case insencitive so now it must be
-%% Content-Type: multipart/mixed -> {"content-type", "multipart/mixed"}
-%% The standard furthermore says that leading and traling white space
-%% is not a part of the fieldvalue and shall therefore be removed.
-%%----------------------------------------------------------------------
-tagup_header([]) -> [];
-tagup_header([Line|Rest]) -> [tag(Line, [])|tagup_header(Rest)].
-
-tag([], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), ""};
-tag([$:|Rest], Tag) ->
- {http_util:to_lower(lists:reverse(Tag)), string:strip(Rest)};
-tag([Chr|Rest], Tag) ->
- tag(Rest, [Chr|Tag]).
-
lowest_version()->
"HTTP/0.9".
+
+check_header({"content-length", Value}, Maxsizes) ->
+ Max = proplists:get_value(max_content_length, Maxsizes),
+ MaxLen = length(integer_to_list(Max)),
+ case length(Value) =< MaxLen of
+ true ->
+ try
+ list_to_integer(Value)
+ of
+ I when I>= 0 ->
+ ok;
+ _ ->
+ {error, {size_error, Max, 411, "negative content-length"}}
+ catch _:_ ->
+ {error, {size_error, Max, 411, "content-length not an integer"}}
+ end;
+ false ->
+ {error, {size_error, Max, 413, "content-length unreasonably long"}}
+ end;
+check_header(_, _) ->
+ ok.
+
+
+
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 9bea58cc9e..9947e17b47 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -96,8 +96,9 @@ init([Manager, ConfigDB, AcceptTimeout]) ->
proc_lib:init_ack({ok, self()}),
{SocketType, Socket} = await_socket_ownership_transfer(AcceptTimeout),
-
- KeepAliveTimeOut = httpd_util:lookup(ConfigDB, keep_alive_timeout, 150000),
+
+ %%Timeout value is in seconds we want it in milliseconds
+ KeepAliveTimeOut = 1000 * httpd_util:lookup(ConfigDB, keep_alive_timeout, 150),
case http_transport:negotiate(SocketType, Socket, ?HANDSHAKE_TIMEOUT) of
{error, _Error} ->
@@ -119,11 +120,17 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
MaxHeaderSize = max_header_size(ConfigDB),
MaxURISize = max_uri_size(ConfigDB),
NrOfRequest = max_keep_alive_request(ConfigDB),
-
+ MaxContentLen = max_content_length(ConfigDB),
+ Customize = customize(ConfigDB),
+
{_, Status} = httpd_manager:new_connection(Manager),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen},
+ {customize, Customize}
+ ]]},
State = #state{mod = Mod,
manager = Manager,
@@ -207,7 +214,7 @@ handle_info({Proto, Socket, Data},
set_new_data_size(cancel_request_timeout(State), NewDataSize)
end,
handle_http_msg(Result, NewState);
- {error, {too_long, MaxSize, ErrCode, ErrStr}, Version} ->
+ {error, {size_error, MaxSize, ErrCode, ErrStr}, Version} ->
NewModData = ModData#mod{http_version = Version},
httpd_response:send_status(NewModData, ErrCode, ErrStr),
Reason = io_lib:format("~p: ~p max size is ~p~n",
@@ -444,8 +451,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
error_log(Reason, ModData),
{stop, normal, State#state{response_sent = true}};
_ ->
- Length =
- list_to_integer(Headers#http_request_h.'content-length'),
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case ((Length =< MaxBodySize) or (MaxBodySize == nolimit)) of
true ->
case httpd_request:whole_body(Body, Length) of
@@ -454,7 +460,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
ModData#mod.socket,
[{active, once}]),
{noreply, State#state{mfa =
- {Module, Function, Args}}};
+ {Module, Function, Args}}};
{ok, NewBody} ->
handle_response(
@@ -471,7 +477,7 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
handle_expect(#state{headers = Headers, mod =
#mod{config_db = ConfigDB} = ModData} = State,
MaxBodySize) ->
- Length = Headers#http_request_h.'content-length',
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
case expect(Headers, ModData#mod.http_version, ConfigDB) of
continue when (MaxBodySize > Length) orelse (MaxBodySize =:= nolimit) ->
httpd_response:send_status(ModData, 100, ""),
@@ -545,9 +551,15 @@ handle_next_request(#state{mod = #mod{connection = true} = ModData,
init_data = ModData#mod.init_data},
MaxHeaderSize = max_header_size(ModData#mod.config_db),
MaxURISize = max_uri_size(ModData#mod.config_db),
+ MaxContentLen = max_content_length(ModData#mod.config_db),
+ Customize = customize(ModData#mod.config_db),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
- {max_version, ?HTTP_MAX_VERSION_STRING}, {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_version, ?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, MaxContentLen},
+ {customize, Customize}
+ ]]},
TmpState = State#state{mod = NewModData,
mfa = MFA,
max_keep_alive_request = decrease(Max),
@@ -630,3 +642,8 @@ max_body_size(ConfigDB) ->
max_keep_alive_request(ConfigDB) ->
httpd_util:lookup(ConfigDB, max_keep_alive_request, infinity).
+max_content_length(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_content_length, ?HTTP_MAX_CONTENT_LENGTH).
+
+customize(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, customize, httpd_custom).
diff --git a/lib/inets/src/http_server/httpd_response.erl b/lib/inets/src/http_server/httpd_response.erl
index 0895729d05..71dc05e46d 100644
--- a/lib/inets/src/http_server/httpd_response.erl
+++ b/lib/inets/src/http_server/httpd_response.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -176,7 +176,7 @@ send_header(#mod{socket_type = Type,
StatusLine = [NewVer, " ", io_lib:write(NewStatusCode), " ",
httpd_util:reason_phrase(NewStatusCode), ?CRLF],
ConnectionHeader = get_connection(Conn, NewVer),
- Head = list_to_binary([StatusLine, Headers, ConnectionHeader , ?CRLF]),
+ Head = [StatusLine, Headers, ConnectionHeader , ?CRLF],
httpd_socket:deliver(Type, Sock, Head).
map_status_code("HTTP/1.0", Code)
@@ -286,42 +286,21 @@ create_header(ConfigDb, KeyValueTupleHeaders) ->
Date = httpd_util:rfc1123_date(),
ContentType = "text/html",
Server = server(ConfigDb),
- NewHeaders = add_default_headers([{"date", Date},
- {"content-type", ContentType},
- {"server", Server}],
- KeyValueTupleHeaders),
- lists:map(fun fix_header/1, NewHeaders).
-
-
+ Headers0 = add_default_headers([{"date", Date},
+ {"content-type", ContentType}
+ | if Server=="" -> [];
+ true -> [{"server", Server}]
+ end
+ ],
+ KeyValueTupleHeaders),
+ CustomizeCB = httpd_util:lookup(ConfigDb, customize, httpd_custom),
+ lists:filtermap(fun(H) ->
+ httpd_custom:customize_headers(CustomizeCB, response_header, H)
+ end,
+ [Header || Header <- Headers0]).
server(ConfigDb) ->
httpd_util:lookup(ConfigDb, server, ?SERVER_SOFTWARE).
-fix_header({Key0, Value}) ->
- %% make sure first letter is capital
- Words1 = string:tokens(Key0, "-"),
- Words2 = upify(Words1, []),
- Key = new_key(Words2),
- Key ++ ": " ++ Value ++ ?CRLF .
-
-new_key([]) ->
- "";
-new_key([W]) ->
- W;
-new_key([W1,W2]) ->
- W1 ++ "-" ++ W2;
-new_key([W|R]) ->
- W ++ "-" ++ new_key(R).
-
-upify([], Acc) ->
- lists:reverse(Acc);
-upify([Key|Rest], Acc) ->
- upify(Rest, [upify2(Key)|Acc]).
-
-upify2([C|Rest]) when (C >= $a) andalso (C =< $z) ->
- [C-($a-$A)|Rest];
-upify2(Str) ->
- Str.
-
add_default_headers([], Headers) ->
Headers;
diff --git a/lib/inets/src/http_server/mod_alias.erl b/lib/inets/src/http_server/mod_alias.erl
index 0b9fe4cfe0..5039cd56b5 100644
--- a/lib/inets/src/http_server/mod_alias.erl
+++ b/lib/inets/src/http_server/mod_alias.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -55,6 +55,7 @@ do(#mod{data = Data} = Info) ->
do_alias(#mod{config_db = ConfigDB,
request_uri = ReqURI,
+ socket_type = SocketType,
data = Data}) ->
{ShortPath, Path, AfterPath} =
real_name(ConfigDB, ReqURI, which_alias(ConfigDB)),
@@ -70,8 +71,9 @@ do_alias(#mod{config_db = ConfigDB,
(LastChar =/= $/)) ->
?hdrt("directory and last-char is a /", []),
ServerName = which_server_name(ConfigDB),
- Port = port_string( which_port(ConfigDB) ),
- URL = "http://" ++ ServerName ++ Port ++ ReqURI ++ "/",
+ Port = port_string(which_port(ConfigDB)),
+ Protocol = get_protocol(SocketType),
+ URL = Protocol ++ ServerName ++ Port ++ ReqURI ++ "/",
ReasonPhrase = httpd_util:reason_phrase(301),
Message = httpd_util:message(301, URL, ConfigDB),
{proceed,
@@ -94,6 +96,12 @@ port_string(80) ->
port_string(Port) ->
":" ++ integer_to_list(Port).
+get_protocol(ip_comm) ->
+ "http://";
+get_protocol(_) ->
+ %% Should clean up to have only one ssl type essl vs ssl is not relevant any more
+ "https://".
+
%% real_name
real_name(ConfigDB, RequestURI, []) ->
diff --git a/lib/inets/src/http_server/mod_include.erl b/lib/inets/src/http_server/mod_include.erl
deleted file mode 100644
index 35f45bdd33..0000000000
--- a/lib/inets/src/http_server/mod_include.erl
+++ /dev/null
@@ -1,598 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
-%%
-%% The contents of this file are subject to the Erlang Public License,
-%% Version 1.1, (the "License"); you may not use this file except in
-%% compliance with the License. You should have received a copy of the
-%% Erlang Public License along with this software. If not, it can be
-%% retrieved online at http://www.erlang.org/.
-%%
-%% Software distributed under the License is distributed on an "AS IS"
-%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
-%% the License for the specific language governing rights and limitations
-%% under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
--module(mod_include).
--export([do/1,parse/2,config/6,include/6,echo/6,fsize/6,flastmod/6,exec/6]).
-
--include("httpd.hrl").
--include("httpd_internal.hrl").
-
--define(VMODULE,"INCLUDE").
-
-%% do
-
-do(Info) ->
- case Info#mod.method of
- "GET" ->
- case proplists:get_value(status, Info#mod.data) of
- %% A status code has been generated!
- {_StatusCode, _PhraseArgs, _Reason} ->
- {proceed,Info#mod.data};
- %% No status code has been generated!
- undefined ->
- case proplists:get_value(response, Info#mod.data) of
- %% No response has been generated!
- undefined ->
- do_include(Info);
- %% A response has been generated or sent!
- _Response ->
- {proceed,Info#mod.data}
- end
- end;
- %% Not a GET method!
- _ ->
- {proceed,Info#mod.data}
- end.
-
-do_include(Info) ->
- Path = mod_alias:path(Info#mod.data,Info#mod.config_db,
- Info#mod.request_uri),
- Suffix = httpd_util:suffix(Path),
- case httpd_util:lookup_mime_default(Info#mod.config_db,Suffix) of
- "text/x-server-parsed-html" ->
- HeaderStart = [{content_type, "text/html"}],
- case send_in(Info, Path, HeaderStart, file:read_file_info(Path)) of
- {ok, ErrorLog, Size} ->
- {proceed, [{response, {already_sent, 200, Size}},
- {mime_type, "text/html"} |
- lists:append(ErrorLog, Info#mod.data)]};
- {error, Reason} ->
- {proceed,
- [{status,send_error(Reason,Info,Path)}|Info#mod.data]}
- end;
- _ -> %% Unknown mime type, ignore
- {proceed,Info#mod.data}
- end.
-
-
-%%
-%% config directive
-%%
-
-config(_Info, Context, ErrorLog, TagList, ValueList, R) ->
- case verify_tags("config",[errmsg,timefmt,sizefmt],
- TagList,ValueList) of
- ok ->
- {ok,update_context(TagList,ValueList,Context),ErrorLog,"",R};
- {error,Reason} ->
- {ok,Context,[{internal_info,Reason}|ErrorLog],
- proplists:get_value(errmsg,Context,""),R}
- end.
-
-update_context([],[],Context) ->
- Context;
-update_context([Tag|R1],[Value|R2],Context) ->
- update_context(R1,R2,[{Tag,Value}|Context]).
-
-verify_tags(Command,ValidTags,TagList,ValueList)
- when length(TagList) =:= length(ValueList) ->
- verify_tags(Command, ValidTags, TagList);
-verify_tags(Command, _ValidTags, _TagList, _ValueList) ->
- {error, ?NICE(Command ++ " directive has spurious tags")}.
-
-verify_tags(_Command, _ValidTags, []) ->
- ok;
-verify_tags(Command, ValidTags, [Tag|Rest]) ->
- case lists:member(Tag, ValidTags) of
- true ->
- verify_tags(Command, ValidTags, Rest);
- false ->
- {error, ?NICE(Command++" directive has a spurious tag ("++
- atom_to_list(Tag)++")")}
- end.
-
-%%
-%% include directive
-%%
-
-include(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
- Aliases = httpd_util:multi_lookup(Info#mod.config_db,alias),
- {_, Path, _AfterPath} =
- mod_alias:real_name(Info#mod.config_db, VirtualPath, Aliases),
- include(Info,Context,ErrorLog,R,Path);
-include(Info, Context, ErrorLog, [file], [FileName], R) ->
- Path = file(Info#mod.config_db, Info#mod.request_uri, FileName),
- include(Info, Context, ErrorLog, R, Path);
-include(_Info, Context, ErrorLog, _TagList, _ValueList, R) ->
- {ok, Context,
- [{internal_info,?NICE("include directive has a spurious tag")}|
- ErrorLog], proplists:get_value(errmsg, Context, ""), R}.
-
-include(Info, Context, ErrorLog, R, Path) ->
- case file:read_file(Path) of
- {ok, Body} ->
- {ok, NewContext, NewErrorLog, Result} =
- parse(Info, binary_to_list(Body), Context, ErrorLog, []),
- {ok, NewContext, NewErrorLog, Result, R};
- {error, _Reason} ->
- {ok, Context,
- [{internal_info, ?NICE("Can't open "++Path)}|ErrorLog],
- proplists:get_value(errmsg, Context, ""), R}
- end.
-
-file(ConfigDB, RequestURI, FileName) ->
- Aliases = httpd_util:multi_lookup(ConfigDB, alias),
- {_, Path, _AfterPath}
- = mod_alias:real_name(ConfigDB, RequestURI, Aliases),
- Pwd = filename:dirname(Path),
- filename:join(Pwd, FileName).
-
-%%
-%% echo directive
-%%
-
-echo(Info,Context,ErrorLog,[var],["DOCUMENT_NAME"],R) ->
- {ok,Context,ErrorLog,document_name(Info#mod.data,Info#mod.config_db,
- Info#mod.request_uri),R};
-echo(Info,Context,ErrorLog,[var],["DOCUMENT_URI"],R) ->
- {ok,Context,ErrorLog,document_uri(Info#mod.config_db,
- Info#mod.request_uri),R};
-echo(Info,Context,ErrorLog,[var],["QUERY_STRING_UNESCAPED"],R) ->
- {ok,Context,ErrorLog,query_string_unescaped(Info#mod.request_uri),R};
-echo(_Info,Context,ErrorLog,[var],["DATE_LOCAL"],R) ->
- {ok,Context,ErrorLog,date_local(),R};
-echo(_Info,Context,ErrorLog,[var],["DATE_GMT"],R) ->
- {ok,Context,ErrorLog,date_gmt(),R};
-echo(Info,Context,ErrorLog,[var],["LAST_MODIFIED"],R) ->
- {ok,Context,ErrorLog,last_modified(Info#mod.data,Info#mod.config_db,
- Info#mod.request_uri),R};
-echo(_Info, Context, ErrorLog, _TagList, _ValueList, R) ->
- {ok,Context,
- [{internal_info,?NICE("echo directive has a spurious tag")}|
- ErrorLog],"(none)",R}.
-
-document_name(Data,ConfigDB,RequestURI) ->
- Path = mod_alias:path(Data,ConfigDB,RequestURI),
- case inets_regexp:match(Path,"[^/]*\$") of
- {match,Start,Length} ->
- string:substr(Path,Start,Length);
- nomatch ->
- "(none)"
- end.
-
-document_uri(ConfigDB, RequestURI) ->
- Aliases = httpd_util:multi_lookup(ConfigDB, alias),
-
- {_, Path, AfterPath} = mod_alias:real_name(ConfigDB, RequestURI, Aliases),
-
- VirtualPath = string:substr(RequestURI, 1,
- length(RequestURI)-length(AfterPath)),
- {match, Start, Length} = inets_regexp:match(Path,"[^/]*\$"),
- FileName = string:substr(Path,Start,Length),
- case inets_regexp:match(VirtualPath, FileName++"\$") of
- {match, _, _} ->
- http_uri:decode(VirtualPath)++AfterPath;
- nomatch ->
- string:strip(http_uri:decode(VirtualPath),right,$/)++
- "/"++FileName++AfterPath
- end.
-
-query_string_unescaped(RequestURI) ->
- case inets_regexp:match(RequestURI,"[\?].*\$") of
- {match,Start,Length} ->
- %% Escape all shell-special variables with \
- escape(string:substr(RequestURI,Start+1,Length-1));
- nomatch ->
- "(none)"
- end.
-
-escape([]) -> [];
-escape([$;|R]) -> [$\\,$;|escape(R)];
-escape([$&|R]) -> [$\\,$&|escape(R)];
-escape([$(|R]) -> [$\\,$(|escape(R)];
-escape([$)|R]) -> [$\\,$)|escape(R)];
-escape([$||R]) -> [$\\,$||escape(R)];
-escape([$^|R]) -> [$\\,$^|escape(R)];
-escape([$<|R]) -> [$\\,$<|escape(R)];
-escape([$>|R]) -> [$\\,$>|escape(R)];
-escape([$\n|R]) -> [$\\,$\n|escape(R)];
-escape([$ |R]) -> [$\\,$ |escape(R)];
-escape([$\t|R]) -> [$\\,$\t|escape(R)];
-escape([C|R]) -> [C|escape(R)].
-
-date_local() ->
- {{Year,Month,Day},{Hour,Minute,Second}}=calendar:local_time(),
- %% Time format hard-wired to: "%a %b %e %T %Y" according to strftime(3)
- io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
- [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
- httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
-
-date_gmt() ->
- {{Year,Month,Day},{Hour,Minute,Second}}=calendar:universal_time(),
- %% Time format hard-wired to: "%a %b %e %T %Z %Y" according to strftime(3)
- io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w GMT ~w",
- [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
- httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
-
-last_modified(Data,ConfigDB,RequestURI) ->
- {ok,FileInfo}=file:read_file_info(mod_alias:path(Data,ConfigDB,RequestURI)),
- {{Year,Month,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
- io_lib:format("~s ~s ~2w ~2.2.0w:~2.2.0w:~2.2.0w ~w",
- [httpd_util:day(calendar:day_of_the_week(Year,Month,Day)),
- httpd_util:month(Month),Day,Hour,Minute,Second,Year]).
-
-%%
-%% fsize directive
-%%
-
-fsize(Info,Context,ErrorLog,[virtual],[VirtualPath],R) ->
- Aliases = httpd_util:multi_lookup(Info#mod.config_db,alias),
- {_,Path, _AfterPath}=
- mod_alias:real_name(Info#mod.config_db,VirtualPath,Aliases),
- fsize(Info, Context, ErrorLog, R, Path);
-fsize(Info,Context,ErrorLog,[file],[FileName],R) ->
- Path = file(Info#mod.config_db,Info#mod.request_uri,FileName),
- fsize(Info,Context,ErrorLog,R,Path);
-fsize(_Info, Context, ErrorLog, _TagList, _ValueList, R) ->
- {ok,Context,[{internal_info,?NICE("fsize directive has a spurious tag")}|
- ErrorLog],proplists:get_value(errmsg,Context,""),R}.
-
-fsize(_Info, Context, ErrorLog, R, Path) ->
- case file:read_file_info(Path) of
- {ok,FileInfo} ->
- case proplists:get_value(sizefmt,Context) of
- "bytes" ->
- {ok,Context,ErrorLog,
- integer_to_list(FileInfo#file_info.size),R};
- "abbrev" ->
- Size = integer_to_list(trunc(FileInfo#file_info.size/1024+1))++"k",
- {ok,Context,ErrorLog,Size,R};
- Value->
- {ok,Context,
- [{internal_info,
- ?NICE("fsize directive has a spurious tag value ("++
- Value++")")}|
- ErrorLog],
- proplists:get_value(errmsg, Context, ""), R}
- end;
- {error, _Reason} ->
- {ok,Context,[{internal_info,?NICE("Can't open "++Path)}|ErrorLog],
- proplists:get_value(errmsg,Context,""),R}
- end.
-
-%%
-%% flastmod directive
-%%
-
-flastmod(#mod{config_db = Db} = Info,
- Context, ErrorLog, [virtual], [VirtualPath],R) ->
- Aliases = httpd_util:multi_lookup(Db,alias),
- {_,Path, _AfterPath} = mod_alias:real_name(Db, VirtualPath, Aliases),
- flastmod(Info,Context,ErrorLog,R,Path);
-flastmod(#mod{config_db = Db, request_uri = RequestUri} = Info,
- Context, ErrorLog, [file], [FileName], R) ->
- Path = file(Db, RequestUri, FileName),
- flastmod(Info, Context, ErrorLog, R, Path);
-flastmod(_Info, Context, ErrorLog, _TagList, _ValueList, R) ->
- {ok,Context,
- [{internal_info,?NICE("flastmod directive has a spurious tag")}|
- ErrorLog],proplists:get_value(errmsg,Context,""),R}.
-
-flastmod(_Info, Context, ErrorLog, R, File) ->
- case file:read_file_info(File) of
- {ok, FileInfo} ->
- {{Yr,Mon,Day},{Hour,Minute,Second}}=FileInfo#file_info.mtime,
- Result =
- io_lib:format("~s ~s ~2w ~w:~w:~w ~w",
- [httpd_util:day(
- calendar:day_of_the_week(Yr,Mon, Day)),
- httpd_util:month(Mon),Day,Hour,Minute,Second, Yr]),
- {ok, Context, ErrorLog, Result, R};
- {error, _Reason} ->
- {ok,Context,[{internal_info,?NICE("Can't open "++File)}|ErrorLog],
- proplists:get_value(errmsg,Context,""),R}
- end.
-
-%%
-%% exec directive
-%%
-
-exec(Info,Context,ErrorLog,[cmd],[Command],R) ->
- cmd(Info,Context,ErrorLog,R,Command);
-exec(Info,Context,ErrorLog,[cgi],[RequestURI],R) ->
- cgi(Info,Context,ErrorLog,R,RequestURI);
-exec(_Info, Context, ErrorLog, _TagList, _ValueList, R) ->
- {ok, Context,
- [{internal_info,?NICE("exec directive has a spurious tag")}|
- ErrorLog], proplists:get_value(errmsg,Context,""),R}.
-
-%% cmd
-
-cmd(Info, Context, ErrorLog, R, Command) ->
- process_flag(trap_exit,true),
- Env = env(Info),
- Dir = filename:dirname(Command),
- Port = (catch open_port({spawn,Command},[stream,{cd,Dir},{env,Env}])),
- case Port of
- P when is_port(P) ->
- {NewErrorLog, Result} = proxy(Port, ErrorLog),
- {ok, Context, NewErrorLog, Result, R};
- {'EXIT', Reason} ->
- exit({open_port_failed,Reason,
- [{uri,Info#mod.request_uri},{script,Command},
- {env,Env},{dir,Dir}]});
- O ->
- exit({open_port_failed,O,
- [{uri,Info#mod.request_uri},{script,Command},
- {env,Env},{dir,Dir}]})
- end.
-
-env(Info) ->
- [{"DOCUMENT_NAME",document_name(Info#mod.data,Info#mod.config_db,
- Info#mod.request_uri)},
- {"DOCUMENT_URI", document_uri(Info#mod.config_db, Info#mod.request_uri)},
- {"QUERY_STRING_UNESCAPED", query_string_unescaped(Info#mod.request_uri)},
- {"DATE_LOCAL", date_local()},
- {"DATE_GMT", date_gmt()},
- {"LAST_MODIFIED", last_modified(Info#mod.data, Info#mod.config_db,
- Info#mod.request_uri)}
- ].
-
-%% cgi
-
-cgi(Info, Context, ErrorLog, R, RequestURI) ->
- ScriptAliases = httpd_util:multi_lookup(Info#mod.config_db, script_alias),
- case mod_alias:real_script_name(Info#mod.config_db, RequestURI,
- ScriptAliases) of
- {Script, AfterScript} ->
- exec_script(Info,Script,AfterScript,ErrorLog,Context,R);
- not_a_script ->
- {ok, Context,
- [{internal_info, ?NICE(RequestURI++" is not a script")}|
- ErrorLog], proplists:get_value(errmsg, Context, ""),R}
- end.
-
-remove_header([]) ->
- [];
-remove_header([$\n,$\n|Rest]) ->
- Rest;
-remove_header([_C|Rest]) ->
- remove_header(Rest).
-
-
-exec_script(#mod{config_db = Db, request_uri = RequestUri} = Info,
- Script, _AfterScript, ErrorLog, Context, R) ->
- process_flag(trap_exit,true),
- Aliases = httpd_util:multi_lookup(Db, alias),
- {_, Path, AfterPath} = mod_alias:real_name(Db, RequestUri, Aliases),
- Env = env(Info) ++ mod_cgi:env(Info, Path, AfterPath),
- Dir = filename:dirname(Path),
- Port = (catch open_port({spawn,Script},[stream,{env, Env},{cd, Dir}])),
- case Port of
- P when is_port(P) ->
- %% Send entity body to port.
- Res = case Info#mod.entity_body of
- [] ->
- true;
- EntityBody ->
- (catch port_command(Port, EntityBody))
- end,
- case Res of
- {'EXIT', Reason} ->
- exit({open_cmd_failed,Reason,
- [{mod,?MODULE},{port,Port},
- {uri,RequestUri},
- {script,Script},{env,Env},{dir,Dir},
- {ebody_size,sz(Info#mod.entity_body)}]});
- true ->
- {NewErrorLog, Result} = proxy(Port, ErrorLog),
- {ok, Context, NewErrorLog, remove_header(Result), R}
- end;
- {'EXIT', Reason} ->
- exit({open_port_failed,Reason,
- [{mod,?MODULE},{uri,RequestUri},{script,Script},
- {env,Env},{dir,Dir}]});
- O ->
- exit({open_port_failed,O,
- [{mod,?MODULE},{uri,RequestUri},{script,Script},
- {env,Env},{dir,Dir}]})
- end.
-
-
-%%
-%% Port communication
-%%
-
-proxy(Port, ErrorLog) ->
- process_flag(trap_exit, true),
- proxy(Port, ErrorLog, []).
-
-proxy(Port, ErrorLog, Result) ->
- receive
- {Port, {data, Response}} ->
- proxy(Port, ErrorLog, lists:append(Result,Response));
- {'EXIT', Port, normal} when is_port(Port) ->
- process_flag(trap_exit, false),
- {ErrorLog, Result};
- {'EXIT', Port, _Reason} when is_port(Port) ->
- process_flag(trap_exit, false),
- {[{internal_info,
- ?NICE("Scrambled output from CGI-script")}|ErrorLog],
- Result};
- {'EXIT', Pid, Reason} when is_pid(Pid) ->
- process_flag(trap_exit, false),
- {'EXIT', Pid, Reason};
- %% This should not happen!
- _WhatEver ->
- process_flag(trap_exit, false),
- {ErrorLog, Result}
- end.
-
-
-%% ------
-%% Temporary until I figure out a way to fix send_in_chunks
-%% (comments and directives that start in one chunk but end
-%% in another is not handled).
-%%
-
-send_in(Info, Path, Head, {ok,FileInfo}) ->
- case file:read_file(Path) of
- {ok, Bin} ->
- send_in1(Info, binary_to_list(Bin), Head, FileInfo);
- {error, Reason} ->
- {error, {read,Reason}}
- end;
-send_in(_Info , _Path, _Head,{error,Reason}) ->
- {error, {open,Reason}}.
-
-send_in1(Info, Data, Head, FileInfo) ->
- {ok, _Context, Err, ParsedBody} = parse(Info,Data,?DEFAULT_CONTEXT,[],[]),
- Size = length(ParsedBody),
- LastModified = case catch httpd_util:rfc1123_date(FileInfo#file_info.mtime) of
- Date when is_list(Date) -> [{last_modified,Date}];
- _ -> []
- end,
- Head1 = case Info#mod.http_version of
- "HTTP/1.1"->
- Head ++ [{content_length, integer_to_list(Size)},
- {etag, httpd_util:create_etag(FileInfo,Size)}|
- LastModified];
- _->
- %% i.e http/1.0 and http/0.9
- Head ++ [{content_length, integer_to_list(Size)}|
- LastModified]
- end,
- httpd_response:send_header(Info, 200, Head1),
- httpd_socket:deliver(Info#mod.socket_type,Info#mod.socket, ParsedBody),
- {ok, Err, Size}.
-
-
-parse(Info,Body) ->
- parse(Info, Body, ?DEFAULT_CONTEXT, [], []).
-
-parse(_Info, [], Context, ErrorLog, Result) ->
- {ok, Context, lists:reverse(ErrorLog), lists:reverse(Result)};
-parse(Info,[$<,$!,$-,$-,$#|R1],Context,ErrorLog,Result) ->
- case catch parse0(R1,Context) of
- {parse_error,Reason} ->
- parse(Info,R1,Context,[{internal_info,?NICE(Reason)}|ErrorLog],
- [$#,$-,$-,$!,$<|Result]);
- {ok,Context,Command,TagList,ValueList,R2} ->
- {ok,NewContext,NewErrorLog,MoreResult,R3}=
- handle(Info,Context,ErrorLog,Command,TagList,ValueList,R2),
- parse(Info,R3,NewContext,NewErrorLog,
- lists:reverse(MoreResult)++Result)
- end;
-parse(Info,[$<,$!,$-,$-|R1],Context,ErrorLog,Result) ->
- case catch parse5(R1,[],0) of
- {parse_error,Reason} ->
- parse(Info,R1,Context,
- [{internal_info,?NICE(Reason)}|ErrorLog],Result);
- {Comment,R2} ->
- parse(Info,R2,Context,ErrorLog,Comment++Result)
- end;
-parse(Info,[C|R],Context,ErrorLog,Result) ->
- parse(Info,R,Context,ErrorLog,[C|Result]).
-
-handle(Info,Context,ErrorLog,Command,TagList,ValueList,R) ->
- case catch apply(?MODULE,Command,[Info,Context,ErrorLog,TagList,ValueList,
- R]) of
- {'EXIT',{undef,_}} ->
- throw({parse_error,"Unknown command "++atom_to_list(Command)++
- " in parsed doc"});
- Result ->
- Result
- end.
-
-parse0([], _Context) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse0([$-,$-,$>|_R], _Context) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse0([$ |R], Context) ->
- parse0(R,Context);
-parse0(String, Context) ->
- parse1(String, Context,"").
-
-parse1([], _Context, _Command) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse1([$-,$-,$>|_R], _Context, _Command) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse1([$ |R], Context, Command) ->
- parse2(R,Context,list_to_atom(lists:reverse(Command)),[],[],"");
-parse1([C|R], Context, Command) ->
- parse1(R,Context,[C|Command]).
-
-parse2([], _Context, _Command, _TagList, _ValueList, _Tag) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse2([$-,$-,$>|R], Context, Command, TagList, ValueList, _Tag) ->
- {ok,Context,Command,TagList,ValueList,R};
-parse2([$ |R],Context,Command,TagList,ValueList,Tag) ->
- parse2(R,Context,Command,TagList,ValueList,Tag);
-parse2([$=|R],Context,Command,TagList,ValueList,Tag) ->
- parse3(R,Context,Command,[list_to_atom(lists:reverse(Tag))|TagList],
- ValueList);
-parse2([C|R],Context,Command,TagList,ValueList,Tag) ->
- parse2(R,Context,Command,TagList,ValueList,[C|Tag]).
-
-parse3([], _Context, _Command, _TagList, _ValueList) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse3([$-,$-,$>|_R], _Context, _Command, _TagList, _ValueList) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse3([$ |R], Context, Command, TagList, ValueList) ->
- parse3(R, Context, Command, TagList, ValueList);
-parse3([$"|R], Context, Command, TagList, ValueList) ->
- parse4(R,Context,Command,TagList,ValueList,"");
-parse3(_String, _Context, _Command, _TagList, _ValueList) ->
- throw({parse_error,"Premature EOF in parsed file"}).
-
-parse4([], _Context, _Command, _TagList, _ValueList, _Value) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse4([$-,$-,$>|_R], _Context, _Command, _TagList, _ValueList, _Value) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse4([$"|R],Context,Command,TagList,ValueList,Value) ->
- parse2(R,Context,Command,TagList,[lists:reverse(Value)|ValueList],"");
-parse4([C|R],Context,Command,TagList,ValueList,Value) ->
- parse4(R,Context,Command,TagList,ValueList,[C|Value]).
-
-parse5([], _Comment, _Depth) ->
- throw({parse_error,"Premature EOF in parsed file"});
-parse5([$<,$!,$-,$-|R],Comment,Depth) ->
- parse5(R,[$-,$-,$!,$<|Comment],Depth+1);
-parse5([$-,$-,$>|R],Comment,0) ->
- {">--"++Comment++"--!<",R};
-parse5([$-,$-,$>|R],Comment,Depth) ->
- parse5(R,[$>,$-,$-|Comment],Depth-1);
-parse5([C|R],Comment,Depth) ->
- parse5(R,[C|Comment],Depth).
-
-
-sz(B) when is_binary(B) -> {binary,size(B)};
-sz(L) when is_list(L) -> {list,length(L)};
-sz(_) -> undefined.
-
-%% send_error - Handle failure to send the file
-%%
-send_error({open,Reason},Info,Path) ->
- httpd_file:handle_error(Reason, "open", Info, Path);
-send_error({read,Reason},Info,Path) ->
- httpd_file:handle_error(Reason, "read", Info, Path).
-
-
-
-
diff --git a/lib/inets/src/inets_app/Makefile b/lib/inets/src/inets_app/Makefile
index 22426eee79..926585f198 100644
--- a/lib/inets/src/inets_app/Makefile
+++ b/lib/inets/src/inets_app/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2005-2012. All Rights Reserved.
+# Copyright Ericsson AB 2005-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -47,7 +47,9 @@ MODULES = \
inets_app \
inets_sup \
inets_regexp \
- inets_trace
+ inets_trace \
+ inets_lib \
+ inets_time_compat
INTERNAL_HRL_FILES = inets_internal.hrl
EXTERNAL_HRL_FILES = ../../include/httpd.hrl \
diff --git a/lib/inets/src/inets_app/inets.app.src b/lib/inets/src/inets_app/inets.app.src
index 9eae962d03..6ba9795d9e 100644
--- a/lib/inets/src/inets_app/inets.app.src
+++ b/lib/inets/src/inets_app/inets.app.src
@@ -1,7 +1,7 @@
%% This is an -*- erlang -*- file.
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,7 +26,9 @@
inets_app,
inets_service,
inets_regexp,
- inets_trace,
+ inets_trace,
+ inets_lib,
+ inets_time_compat,
%% FTP
ftp,
@@ -61,6 +63,7 @@
httpd_cgi,
httpd_connection_sup,
httpd_conf,
+ httpd_custom,
httpd_esi,
httpd_example,
httpd_file,
@@ -90,7 +93,6 @@
mod_get,
mod_head,
mod_htaccess,
- mod_include,
mod_log,
mod_range,
mod_responsecontrol,
diff --git a/lib/inets/src/inets_app/inets_lib.erl b/lib/inets/src/inets_app/inets_lib.erl
new file mode 100644
index 0000000000..fa6adaebd0
--- /dev/null
+++ b/lib/inets/src/inets_app/inets_lib.erl
@@ -0,0 +1,49 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(inets_lib).
+
+-export([millisec_passed/1, formated_timestamp/0, format_timestamp/1]).
+
+
+
+%% Help function, elapsed milliseconds since T0
+millisec_passed({_,_,_} = T0 ) ->
+ %% OTP 17 and earlier
+ timer:now_diff(inets_time_compat:timestamp(), T0) div 1000;
+
+millisec_passed(T0) ->
+ %% OTP 18
+ erlang:convert_time_unit(erlang:monotonic_time() - T0,
+ native,
+ micro_seconds) div 1000.
+
+%% Return formated time stamp (e.g. 2015:03:16 10:05:23 1234)
+formated_timestamp() ->
+ format_timestamp( os:timestamp() ).
+
+%% Return formated time stamp (e.g. 2015:03:16 10:05:23 1234)
+format_timestamp({_N1, _N2, N3} = Tme) ->
+ {Date, Time} = calendar:now_to_datetime(Tme),
+ {YYYY,MM,DD} = Date,
+ {Hour,Min,Sec} = Time,
+ FormatDate =
+ io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
+ [YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
+ lists:flatten(FormatDate).
diff --git a/lib/inets/src/inets_app/inets_time_compat.erl b/lib/inets/src/inets_app/inets_time_compat.erl
new file mode 100644
index 0000000000..d6297d9caf
--- /dev/null
+++ b/lib/inets/src/inets_app/inets_time_compat.erl
@@ -0,0 +1,71 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%% This module is created to be able to execute on ERTS versions both
+%% earlier and later than 7.0.
+
+-module(inets_time_compat).
+
+%% We don't want warnings about the use of erlang:now/0 in
+%% this module.
+-compile(nowarn_deprecated_function).
+
+-export([monotonic_time/0,
+ timestamp/0,
+ unique_integer/0,
+ unique_integer/1]).
+
+monotonic_time() ->
+ try
+ erlang:monotonic_time()
+ catch
+ error:undef ->
+ %% Use Erlang system time as monotonic time
+ erlang_system_time_fallback()
+ end.
+
+timestamp() ->
+ try
+ erlang:timestamp()
+ catch
+ error:undef ->
+ erlang:now()
+ end.
+
+unique_integer() ->
+ try
+ erlang:unique_integer()
+ catch
+ error:undef ->
+ erlang_system_time_fallback()
+ end.
+
+unique_integer(Modifiers) ->
+ try
+ erlang:unique_integer(Modifiers)
+ catch
+ error:badarg ->
+ erlang:error(badarg, [Modifiers]);
+ error:undef ->
+ erlang_system_time_fallback()
+ end.
+
+erlang_system_time_fallback() ->
+ {MS, S, US} = erlang:now(),
+ (MS*1000000+S)*1000000+US.
diff --git a/lib/inets/src/inets_app/inets_trace.erl b/lib/inets/src/inets_app/inets_trace.erl
index 8911f65897..cb6d6d8bdb 100644
--- a/lib/inets/src/inets_app/inets_trace.erl
+++ b/lib/inets/src/inets_app/inets_trace.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2011-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2011-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -228,21 +228,24 @@ handle_trace({trace_ts, _Who, call,
[_Sev, "stop trace", stop_trace, [stop_trace]]},
Timestamp},
{_, standard_io} = Fd) ->
- (catch io:format(standard_io, "stop trace at ~s~n", [format_timestamp(Timestamp)])),
+ (catch io:format(standard_io, "stop trace at ~s~n",
+ [inets_lib:format_timestamp(Timestamp)])),
Fd;
handle_trace({trace_ts, _Who, call,
{?MODULE, report_event,
[_Sev, "stop trace", stop_trace, [stop_trace]]},
Timestamp},
standard_io = Fd) ->
- (catch io:format(Fd, "stop trace at ~s~n", [format_timestamp(Timestamp)])),
+ (catch io:format(Fd, "stop trace at ~s~n",
+ [inets_lib:format_timestamp(Timestamp)])),
Fd;
handle_trace({trace_ts, _Who, call,
{?MODULE, report_event,
[_Sev, "stop trace", stop_trace, [stop_trace]]},
Timestamp},
{_Service, Fd}) ->
- (catch io:format(Fd, "stop trace at ~s~n", [format_timestamp(Timestamp)])),
+ (catch io:format(Fd, "stop trace at ~s~n",
+ [inets_lib:format_timestamp(Timestamp)])),
(catch file:close(Fd)),
closed_file;
handle_trace({trace_ts, _Who, call,
@@ -250,7 +253,8 @@ handle_trace({trace_ts, _Who, call,
[_Sev, "stop trace", stop_trace, [stop_trace]]},
Timestamp},
Fd) ->
- (catch io:format(Fd, "stop trace at ~s~n", [format_timestamp(Timestamp)])),
+ (catch io:format(Fd, "stop trace at ~s~n",
+ [inets_lib:format_timestamp(Timestamp)])),
(catch file:close(Fd)),
closed_file;
handle_trace({trace_ts, Who, call,
@@ -280,7 +284,7 @@ print_inets_trace(Fd, Sev, Timestamp, Who, Label, Service, Content) ->
do_print_inets_trace(Fd, Sev, Timestamp, Who, Label, Service, Content).
do_print_inets_trace(Fd, Sev, Timestamp, Who, Label, Service, Content) ->
- Ts = format_timestamp(Timestamp),
+ Ts = inets_lib:format_timestamp(Timestamp),
io:format(Fd, "[inets ~w trace ~w ~w ~s] ~s "
"~n Content: ~p"
"~n",
@@ -307,7 +311,7 @@ do_print_trace(Fd, {trace, Who, What, Where, Extra}) ->
"~n", [Who, What, Where, Extra]);
do_print_trace(Fd, {trace_ts, Who, What, Where, When}) ->
- Ts = format_timestamp(When),
+ Ts = inets_lib:format_timestamp(When),
io:format(Fd, "[trace ~s]"
"~n Who: ~p"
"~n What: ~p"
@@ -315,7 +319,7 @@ do_print_trace(Fd, {trace_ts, Who, What, Where, When}) ->
"~n", [Ts, Who, What, Where]);
do_print_trace(Fd, {trace_ts, Who, What, Where, Extra, When}) ->
- Ts = format_timestamp(When),
+ Ts = inets_lib:format_timestamp(When),
io:format(Fd, "[trace ~s]"
"~n Who: ~p"
"~n What: ~p"
@@ -330,7 +334,7 @@ do_print_trace(Fd, {seq_trace, What, Where}) ->
"~n", [What, Where]);
do_print_trace(Fd, {seq_trace, What, Where, When}) ->
- Ts = format_timestamp(When),
+ Ts = inets_lib:format_timestamp(When),
io:format(Fd, "[seq trace ~s]"
"~n What: ~p"
"~n Where: ~p"
@@ -345,13 +349,3 @@ do_print_trace(Fd, Trace) ->
"~n", [Trace]).
-format_timestamp({_N1, _N2, N3} = Now) ->
- {Date, Time} = calendar:now_to_datetime(Now),
- {YYYY,MM,DD} = Date,
- {Hour,Min,Sec} = Time,
- FormatDate =
- io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
- [YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
- lists:flatten(FormatDate).
-
-
diff --git a/lib/inets/src/tftp/tftp_logger.erl b/lib/inets/src/tftp/tftp_logger.erl
index 0c3620e665..231a705371 100644
--- a/lib/inets/src/tftp/tftp_logger.erl
+++ b/lib/inets/src/tftp/tftp_logger.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -84,8 +84,8 @@ info_msg(Format, Data) ->
%%-------------------------------------------------------------------
add_timestamp(Format, Data) ->
- Now = {_MegaSecs, _Secs, _MicroSecs} = erlang:now(),
- {{_Y, _Mo, _D}, {H, Mi, S}} = calendar:now_to_universal_time(Now),
+ Time = inets_time_compat:timestamp(),
+ {{_Y, _Mo, _D}, {H, Mi, S}} = calendar:now_to_universal_time(Time),
%% {"~p-~s-~sT~s:~s:~sZ,~6.6.0w tftp: " ++ Format ++ "\n",
%% [Y, t(Mo), t(D), t(H), t(Mi), t(S), MicroSecs | Data]}.
{"~s:~s:~s tftp: " ++ Format, [t(H), t(Mi), t(S) | Data]}.
diff --git a/lib/inets/src/tftp/tftp_sup.erl b/lib/inets/src/tftp/tftp_sup.erl
index 1cafcc1069..7a0dcffc90 100644
--- a/lib/inets/src/tftp/tftp_sup.erl
+++ b/lib/inets/src/tftp/tftp_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -93,7 +93,7 @@ unique_name(Options) ->
{value, {_, Port}} when is_integer(Port), Port > 0 ->
{tftpd, Port};
_ ->
- {tftpd, erlang:now()}
+ {tftpd, inets_time_compat:unique_integer([positive])}
end.
default_kill_after() ->
diff --git a/lib/inets/test/erl_make_certs.erl b/lib/inets/test/erl_make_certs.erl
index 22dc951ac1..6c168a5704 100644
--- a/lib/inets/test/erl_make_certs.erl
+++ b/lib/inets/test/erl_make_certs.erl
@@ -204,7 +204,7 @@ issuer_der(Issuer) ->
Subject.
subject(undefined, IsRootCA) ->
- User = if IsRootCA -> "RootCA"; true -> user() end,
+ User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end,
Opts = [{email, User ++ "@erlang.org"},
{name, User},
{city, "Stockholm"},
@@ -215,14 +215,6 @@ subject(undefined, IsRootCA) ->
subject(Opts, _) ->
subject(Opts).
-user() ->
- case os:getenv("USER") of
- false ->
- "test_user";
- User ->
- User
- end.
-
subject(SubjectOpts) when is_list(SubjectOpts) ->
Encode = fun(Opt) ->
{Type,Value} = subject_enc(Opt),
diff --git a/lib/inets/test/ftp_suite_lib.erl b/lib/inets/test/ftp_suite_lib.erl
index daee1bdcdc..b637832101 100644
--- a/lib/inets/test/ftp_suite_lib.erl
+++ b/lib/inets/test/ftp_suite_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1352,9 +1352,9 @@ do_delete(Pid, Config) ->
ok.
do_mkdir(Pid) ->
- {A, B, C} = erlang:now(),
- NewDir = "nisse_" ++ integer_to_list(A) ++ "_" ++
- integer_to_list(B) ++ "_" ++ integer_to_list(C),
+ NewDir = "earl_" ++
+ integer_to_list(inets_time_compat:unique_integer([positive])),
+
ok = ftp:cd(Pid, "incoming"),
{ok, CurrDir} = ftp:pwd(Pid),
{error, efnamena} = ftp:mkdir(Pid, NewDir++"\r\nCWD ."),
diff --git a/lib/inets/test/http_format_SUITE.erl b/lib/inets/test/http_format_SUITE.erl
index d4a3f28f38..5952e9fd6e 100644
--- a/lib/inets/test/http_format_SUITE.erl
+++ b/lib/inets/test/http_format_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -355,10 +355,12 @@ http_request(Config) when is_list(Config) ->
"http://www.erlang.org",
"HTTP/1.1",
{#http_request_h{host = "www.erlang.org", te = []},
- ["te: ","host:www.erlang.org"]}, <<>>} =
+ [{"te", []}, {"host", "www.erlang.org"}]}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]],
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]],
HttpHead),
HttpHead1 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -369,7 +371,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead1),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead1),
HttpHead2 = ["GET http://www.erlang.org HTTP/1.1" ++
@@ -380,7 +384,9 @@ http_request(Config) when is_list(Config) ->
{#http_request_h{}, []}, <<>>} =
parse(httpd_request, parse, [[{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version, ?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], HttpHead2),
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH}
+ ]], HttpHead2),
%% Note the following body is not related to the headers above
HttpBody = ["<HTML>\n<HEAD>\n<TITLE> dummy </TITLE>\n</HEAD>\n<BODY>\n",
diff --git a/lib/inets/test/httpc_SUITE.erl b/lib/inets/test/httpc_SUITE.erl
index 63f8bc5bc6..ab7ffadf75 100644
--- a/lib/inets/test/httpc_SUITE.erl
+++ b/lib/inets/test/httpc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -92,6 +92,7 @@ only_simulated() ->
cookie,
cookie_profile,
empty_set_cookie,
+ invalid_set_cookie,
trace,
stream_once,
stream_single_chunk,
@@ -570,6 +571,18 @@ empty_set_cookie(Config) when is_list(Config) ->
ok = httpc:set_options([{cookies, disabled}]).
%%-------------------------------------------------------------------------
+invalid_set_cookie(doc) ->
+ ["Test ignoring invalid Set-Cookie header"];
+invalid_set_cookie(Config) when is_list(Config) ->
+ ok = httpc:set_options([{cookies, enabled}]),
+
+ URL = url(group_name(Config), "/invalid_set_cookie.html", Config),
+ {ok, {{_,200,_}, [_|_], [_|_]}} =
+ httpc:request(get, {URL, []}, [], []),
+
+ ok = httpc:set_options([{cookies, disabled}]).
+
+%%-------------------------------------------------------------------------
headers_as_is(doc) ->
["Test the option headers_as_is"];
headers_as_is(Config) when is_list(Config) ->
@@ -1275,8 +1288,11 @@ dummy_server_init(Caller, ip_comm, Inet, _) ->
dummy_ipcomm_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
- [], ListenSocket);
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH},
+ {customize, httpd_custom}
+ ]]},
+ [], ListenSocket);
dummy_server_init(Caller, ssl, Inet, SSLOptions) ->
BaseOpts = [binary, {reuseaddr,true}, {active, false} |
@@ -1290,7 +1306,10 @@ dummy_ssl_server_init(Caller, BaseOpts, Inet) ->
dummy_ssl_server_loop({httpd_request, parse, [[{max_uri, ?HTTP_MAX_URI_SIZE},
{max_method, ?HTTP_MAX_METHOD_STRING},
{max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH},
+ {customize, httpd_custom}
+ ]]},
[], ListenSocket).
dummy_ipcomm_server_loop(MFA, Handlers, ListenSocket) ->
@@ -1367,16 +1386,22 @@ handle_request(Module, Function, Args, Socket) ->
stop ->
stop;
<<>> ->
- {httpd_request, parse, [[<<>>, [{max_uri, ?HTTP_MAX_URI_SIZE},
- {max_header, ?HTTP_MAX_HEADER_SIZE},
- {max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]]]};
+ {httpd_request, parse, [[{max_uri,?HTTP_MAX_URI_SIZE},
+ {max_header, ?HTTP_MAX_HEADER_SIZE},
+ {max_version,?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH},
+ {customize, httpd_custom}
+ ]]};
Data ->
handle_request(httpd_request, parse,
[Data, [{max_uri, ?HTTP_MAX_URI_SIZE},
{max_header, ?HTTP_MAX_HEADER_SIZE},
- {max_version,?HTTP_MAX_VERSION_STRING},
- {max_method, ?HTTP_MAX_METHOD_STRING}]], Socket)
+ {max_version,?HTTP_MAX_VERSION_STRING},
+ {max_method, ?HTTP_MAX_METHOD_STRING},
+ {max_content_length, ?HTTP_MAX_CONTENT_LENGTH},
+ {customize, httpd_custom}
+ ]], Socket)
end;
NewMFA ->
NewMFA
@@ -1466,7 +1491,7 @@ dummy_ssl_server_hang_loop(_) ->
ensure_host_header_with_port([]) ->
false;
-ensure_host_header_with_port(["host: " ++ Host| _]) ->
+ensure_host_header_with_port([{"host", Host}| _]) ->
case string:tokens(Host, [$:]) of
[_ActualHost, _Port] ->
true;
@@ -1478,7 +1503,7 @@ ensure_host_header_with_port([_|T]) ->
auth_header([]) ->
auth_header_not_found;
-auth_header(["authorization:" ++ Value | _]) ->
+auth_header([{"authorization", Value} | _]) ->
{ok, string:strip(Value)};
auth_header([_ | Tail]) ->
auth_header(Tail).
@@ -1495,7 +1520,7 @@ handle_auth("Basic " ++ UserInfo, Challange, DefaultResponse) ->
check_cookie([]) ->
ct:fail(no_cookie_header);
-check_cookie(["cookie:" ++ _Value | _]) ->
+check_cookie([{"cookie", _} | _]) ->
ok;
check_cookie([_Head | Tail]) ->
check_cookie(Tail).
@@ -1715,6 +1740,14 @@ handle_uri(_,"/empty_set_cookie.html",_,_,_,_) ->
"Content-Length:32\r\n\r\n"++
"<HTML><BODY>foobar</BODY></HTML>";
+handle_uri(_,"/invalid_set_cookie.html",_,_,_,_) ->
+ "HTTP/1.1 200 ok\r\n" ++
+ "set-cookie: =\r\n" ++
+ "set-cookie: name=\r\n" ++
+ "set-cookie: name-or-value\r\n" ++
+ "Content-Length:32\r\n\r\n"++
+ "<HTML><BODY>foobar</BODY></HTML>";
+
handle_uri(_,"/missing_crlf.html",_,_,_,_) ->
"HTTP/1.1 200 ok" ++
"Content-Length:32\r\n" ++
@@ -1905,12 +1938,13 @@ run_clients(NumClients, ServerPort, SeqNumServer) ->
wait4clients([], _Timeout) ->
ok;
wait4clients(Clients, Timeout) when Timeout > 0 ->
- Time = now_ms(),
+ Time = inets_time_compat:monotonic_time(),
+
receive
{'DOWN', _MRef, process, Pid, normal} ->
{value, {Id, _, _}} = lists:keysearch(Pid, 2, Clients),
NewClients = lists:keydelete(Id, 1, Clients),
- wait4clients(NewClients, Timeout - (now_ms() - Time));
+ wait4clients(NewClients, Timeout - inets_lib:millisec_passed(Time));
{'DOWN', _MRef, process, Pid, Reason} ->
{value, {Id, _, _}} = lists:keysearch(Pid, 2, Clients),
ct:fail({bad_client_termination, Id, Reason})
@@ -2003,14 +2037,10 @@ parse_connection_type(Request) ->
"keep-alive" -> keep_alive
end.
-%% Time in milli seconds
-now_ms() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
-
set_random_seed() ->
- {_, _, Micros} = now(),
- A = erlang:phash2([make_ref(), self(), Micros]),
+ Unique = inets_time_compat:unique_integer(),
+
+ A = erlang:phash2([make_ref(), self(), Unique]),
random:seed(A, A, A).
diff --git a/lib/inets/test/httpd_SUITE.erl b/lib/inets/test/httpd_SUITE.erl
index 4010597657..c90887bcf3 100644
--- a/lib/inets/test/httpd_SUITE.erl
+++ b/lib/inets/test/httpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -53,6 +53,8 @@ all() ->
{group, https_basic},
{group, http_limit},
{group, https_limit},
+ {group, http_custom},
+ {group, https_custom},
{group, http_basic_auth},
{group, https_basic_auth},
{group, http_auth_api},
@@ -66,7 +68,8 @@ all() ->
{group, http_security},
{group, https_security},
{group, http_reload},
- {group, https_reload}
+ {group, https_reload},
+ {group, http_mime_types}
].
groups() ->
@@ -75,6 +78,8 @@ groups() ->
{https_basic, [], basic_groups()},
{http_limit, [], [{group, limit}]},
{https_limit, [], [{group, limit}]},
+ {http_custom, [], [{group, custom}]},
+ {https_custom, [], [{group, custom}]},
{http_basic_auth, [], [{group, basic_auth}]},
{https_basic_auth, [], [{group, basic_auth}]},
{http_auth_api, [], [{group, auth_api}]},
@@ -89,7 +94,9 @@ groups() ->
{https_security, [], [{group, security}]},
{http_reload, [], [{group, reload}]},
{https_reload, [], [{group, reload}]},
+ {http_mime_types, [], [alias_1_1, alias_1_0, alias_0_9]},
{limit, [], [max_clients_1_1, max_clients_1_0, max_clients_0_9]},
+ {custom, [], [customize]},
{reload, [], [non_disturbing_reconfiger_dies,
disturbing_reconfiger_dies,
non_disturbing_1_1,
@@ -127,11 +134,11 @@ http_get() ->
get,
%%actions, Add configuration so that this test mod_action
esi,
- ssi,
content_length,
bad_hex,
missing_CR,
max_header,
+ max_content_length,
ipv6
].
@@ -176,6 +183,7 @@ end_per_suite(_Config) ->
%%--------------------------------------------------------------------
init_per_group(Group, Config0) when Group == https_basic;
Group == https_limit;
+ Group == https_custom;
Group == https_basic_auth;
Group == https_auth_api;
Group == https_auth_api_dets;
@@ -186,12 +194,14 @@ init_per_group(Group, Config0) when Group == https_basic;
init_ssl(Group, Config0);
init_per_group(Group, Config0) when Group == http_basic;
Group == http_limit;
+ Group == http_custom;
Group == http_basic_auth;
Group == http_auth_api;
Group == http_auth_api_dets;
Group == http_auth_api_mnesia;
Group == http_security;
- Group == http_reload
+ Group == http_reload;
+ Group == http_mime_types
->
ok = start_apps(Group),
init_httpd(Group, [{type, ip_comm} | Config0]);
@@ -235,7 +245,8 @@ end_per_group(Group, _Config) when Group == http_basic;
Group == http_auth_api_mnesia;
Group == http_htaccess;
Group == http_security;
- Group == http_reload
+ Group == http_reload;
+ Group == http_mime_types
->
inets:stop();
end_per_group(Group, _Config) when Group == https_basic;
@@ -551,22 +562,6 @@ ipv6(Config) when is_list(Config) ->
end.
%%-------------------------------------------------------------------------
-ssi() ->
- [{doc, "HTTP GET server side include test"}].
-ssi(Config) when is_list(Config) ->
- Version = ?config(http_version, Config),
- Host = ?config(host, Config),
- Type = ?config(type, Config),
- ok = httpd_test_lib:verify_request(?config(type, Config), Host, ?config(port, Config),
- transport_opts(Type, Config),
- ?config(node, Config),
- http_request("GET /fsize.shtml ", Version, Host),
- [{statuscode, 200},
- {header, "Content-Type", "text/html"},
- {header, "Date"},
- {header, "Server"},
- {version, Version}]).
-%%-------------------------------------------------------------------------
htaccess_1_1(Config) when is_list(Config) ->
htaccess([{http_version, "HTTP/1.1"} | Config]).
@@ -856,6 +851,24 @@ cgi_chunked_encoding_test(Config) when is_list(Config) ->
?config(node, Config),
Requests).
%%-------------------------------------------------------------------------
+alias_1_1() ->
+ [{doc, "Test mod_alias"}].
+
+alias_1_1(Config) when is_list(Config) ->
+ alias([{http_version, "HTTP/1.1"} | Config]).
+
+alias_1_0() ->
+ [{doc, "Test mod_alias"}].
+
+alias_1_0(Config) when is_list(Config) ->
+ alias([{http_version, "HTTP/1.0"} | Config]).
+
+alias_0_9() ->
+ [{doc, "Test mod_alias"}].
+
+alias_0_9(Config) when is_list(Config) ->
+ alias([{http_version, "HTTP/0.9"} | Config]).
+
alias() ->
[{doc, "Test mod_alias"}].
@@ -914,7 +927,6 @@ trace(Config) when is_list(Config) ->
Cb = ?config(version_cb, Config),
Cb:trace(?config(type, Config), ?config(port, Config),
?config(host, Config), ?config(node, Config)).
-
%%-------------------------------------------------------------------------
light() ->
["Test light load"].
@@ -972,6 +984,30 @@ missing_CR(Config) ->
{version, Version}]).
%%-------------------------------------------------------------------------
+customize() ->
+ [{doc, "Test filtering of headers with custom callback"}].
+
+customize(Config) when is_list(Config) ->
+ Version = "HTTP/1.1",
+ Host = ?config(host, Config),
+ Type = ?config(type, Config),
+ ok = httpd_test_lib:verify_request(?config(type, Config), Host,
+ ?config(port, Config),
+ transport_opts(Type, Config),
+ ?config(node, Config),
+ http_request("GET /index.html ", Version, Host),
+ [{statuscode, 200},
+ {header, "Content-Type", "text/html"},
+ {header, "Date"},
+ {no_header, "Server"},
+ {version, Version}]).
+
+response_header({"server", _}) ->
+ false;
+response_header(Header) ->
+ {true, Header}.
+
+%%-------------------------------------------------------------------------
max_header() ->
["Denial Of Service (DOS) attack, prevented by max_header"].
max_header(Config) when is_list(Config) ->
@@ -979,13 +1015,22 @@ max_header(Config) when is_list(Config) ->
Host = ?config(host, Config),
case Version of
"HTTP/0.9" ->
- {skip, no_implemented};
+ {skip, not_implemented};
_ ->
dos_hostname(?config(type, Config), ?config(port, Config), Host,
?config(node, Config), Version, ?MAX_HEADER_SIZE)
end.
%%-------------------------------------------------------------------------
+max_content_length() ->
+ ["Denial Of Service (DOS) attack, prevented by max_content_length"].
+max_content_length(Config) when is_list(Config) ->
+ Version = ?config(http_version, Config),
+ Host = ?config(host, Config),
+ garbage_content_length(?config(type, Config), ?config(port, Config), Host,
+ ?config(node, Config), Version).
+
+%%-------------------------------------------------------------------------
security_1_1(Config) when is_list(Config) ->
security([{http_version, "HTTP/1.1"} | Config]).
@@ -1266,22 +1311,26 @@ setup_server_dirs(ServerRoot, DocRoot, DataDir) ->
CgiDir = filename:join(ServerRoot, "cgi-bin"),
AuthDir = filename:join(ServerRoot, "auth"),
PicsDir = filename:join(ServerRoot, "icons"),
+ ConfigDir = filename:join(ServerRoot, "config"),
ok = file:make_dir(ServerRoot),
ok = file:make_dir(DocRoot),
ok = file:make_dir(CgiDir),
ok = file:make_dir(AuthDir),
ok = file:make_dir(PicsDir),
+ ok = file:make_dir(ConfigDir),
DocSrc = filename:join(DataDir, "server_root/htdocs"),
AuthSrc = filename:join(DataDir, "server_root/auth"),
CgiSrc = filename:join(DataDir, "server_root/cgi-bin"),
PicsSrc = filename:join(DataDir, "server_root/icons"),
+ ConfigSrc = filename:join(DataDir, "server_root/config"),
inets_test_lib:copy_dirs(DocSrc, DocRoot),
inets_test_lib:copy_dirs(AuthSrc, AuthDir),
inets_test_lib:copy_dirs(CgiSrc, CgiDir),
inets_test_lib:copy_dirs(PicsSrc, PicsDir),
+ inets_test_lib:copy_dirs(ConfigSrc, ConfigDir),
Cgi = case test_server:os_type() of
{win32, _} ->
@@ -1302,24 +1351,27 @@ setup_server_dirs(ServerRoot, DocRoot, DataDir) ->
start_apps(Group) when Group == https_basic;
Group == https_limit;
+ Group == https_custom;
Group == https_basic_auth;
Group == https_auth_api;
Group == https_auth_api_dets;
Group == https_auth_api_mnesia;
- Group == http_htaccess;
- Group == http_security;
- Group == http_reload
+ Group == https_htaccess;
+ Group == https_security;
+ Group == https_reload
->
inets_test_lib:start_apps([inets, asn1, crypto, public_key, ssl]);
start_apps(Group) when Group == http_basic;
Group == http_limit;
+ Group == http_custom;
Group == http_basic_auth;
Group == http_auth_api;
Group == http_auth_api_dets;
Group == http_auth_api_mnesia;
- Group == https_htaccess;
- Group == https_security;
- Group == https_reload->
+ Group == http_htaccess;
+ Group == http_security;
+ Group == http_reload;
+ Group == http_mime_types->
inets_test_lib:start_apps([inets]).
server_start(_, HttpdConfig) ->
@@ -1368,7 +1420,13 @@ server_config(http_reload, Config) ->
server_config(https_reload, Config) ->
[{keep_alive_timeout, 2}] ++ server_config(https, Config);
server_config(http_limit, Config) ->
- [{max_clients, 1}] ++ server_config(http, Config);
+ [{max_clients, 1},
+ %% Make sure option checking code is run
+ {max_content_length, 100000002}] ++ server_config(http, Config);
+server_config(http_custom, Config) ->
+ [{custom, ?MODULE}] ++ server_config(http, Config);
+server_config(https_custom, Config) ->
+ [{custom, ?MODULE}] ++ server_config(https, Config);
server_config(https_limit, Config) ->
[{max_clients, 1}] ++ server_config(https, Config);
server_config(http_basic_auth, Config) ->
@@ -1405,6 +1463,11 @@ server_config(http_security, Config) ->
server_config(https_security, Config) ->
ServerRoot = ?config(server_root, Config),
tl(auth_conf(ServerRoot)) ++ security_conf(ServerRoot) ++ server_config(https, Config);
+server_config(http_mime_types, Config0) ->
+ Config1 = basic_conf() ++ server_config(http, Config0),
+ ServerRoot = ?config(server_root, Config0),
+ MimeTypesFile = filename:join([ServerRoot,"config", "mime.types"]),
+ [{mime_types, MimeTypesFile} | proplists:delete(mime_types, Config1)];
server_config(http, Config) ->
ServerRoot = ?config(server_root, Config),
@@ -1814,7 +1877,7 @@ dos_hostname(Type, Port, Host, Node, Version, Max) ->
ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
dos_hostname_request(TooLongHeader, Version),
- [{statuscode, dos_code(Version)},
+ [{statuscode, request_entity_too_large_code(Version)},
{version, Version}]).
dos_hostname_request(Host, Version) ->
dos_http_request("GET / ", Version, Host).
@@ -1824,11 +1887,32 @@ dos_http_request(Request, "HTTP/1.1" = Version, Host) ->
dos_http_request(Request, Version, Host) ->
Request ++ Version ++ "\r\nhost:" ++ Host ++ "\r\n\r\n".
-dos_code("HTTP/1.0") ->
+request_entity_too_large_code("HTTP/1.0") ->
403; %% 413 not defined in HTTP/1.0
-dos_code(_) ->
+request_entity_too_large_code(_) ->
413.
+length_required_code("HTTP/1.0") ->
+ 403; %% 411 not defined in HTTP/1.0
+length_required_code(_) ->
+ 411.
+
+garbage_content_length(Type, Port, Host, Node, Version) ->
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host, "aaaa"),
+ [{statuscode, length_required_code(Version)},
+ {version, Version}]),
+ ok = httpd_test_lib:verify_request(Type, Host, Port, Node,
+ garbage_content_length_request("GET / ", Version, Host,
+ lists:duplicate($a, 100)),
+ [{statuscode, request_entity_too_large_code(Version)},
+ {version, Version}]).
+
+garbage_content_length_request(Request, Version, Host, Garbage) ->
+ http_request(Request, Version, Host,
+ {"content-length:" ++ Garbage, "Body with garbage content length indicator"}).
+
+
update_password(Node, ServerRoot, _Address, Port, AuthPrefix, Dir, Old, New)->
Directory = filename:join([ServerRoot, "htdocs", AuthPrefix ++ Dir]),
rpc:call(Node, mod_auth, update_password,
diff --git a/lib/inets/test/httpd_SUITE_data/server_root/config/mime.types b/lib/inets/test/httpd_SUITE_data/server_root/config/mime.types
new file mode 100644
index 0000000000..b68cff21a6
--- /dev/null
+++ b/lib/inets/test/httpd_SUITE_data/server_root/config/mime.types
@@ -0,0 +1,4 @@
+text/html html
+text/html htm
+text/html shtml
+image/gif gif
diff --git a/lib/inets/test/httpd_time_test.erl b/lib/inets/test/httpd_time_test.erl
index 0bb457f9b9..7dd61a5517 100644
--- a/lib/inets/test/httpd_time_test.erl
+++ b/lib/inets/test/httpd_time_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2011. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -116,13 +116,14 @@ main(N, SocketType, Host, Port, Time)
loop(Pollers, Timeout) ->
d("loop -> entry when"
"~n Timeout: ~p", [Timeout]),
- Start = t(),
+ Start = inets_time_compat:monotonic_time(),
+
receive
{'EXIT', Pid, {poller_stat_failure, SocketType, Host, Port, Time, Reason}} ->
case is_poller(Pid, Pollers) of
true ->
error_msg("received unexpected exit from poller ~p~n"
- "befor completion of test "
+ "before completion of test "
"after ~p micro sec"
"~n SocketType: ~p"
"~n Host: ~p"
@@ -133,7 +134,7 @@ loop(Pollers, Timeout) ->
false ->
error_msg("received unexpected ~p from ~p"
"befor completion of test", [Reason, Pid]),
- loop(Pollers, to(Timeout, Start))
+ loop(Pollers, Timeout - inets_lib:millisec_passed(Start))
end;
{poller_stat_failure, Pid, {SocketType, Host, Port, Time, Reason}} ->
@@ -412,35 +413,6 @@ validate(ExpStatusCode, _SocketType, _Socket, Response) ->
end.
-trash_the_rest(Socket, N) ->
- receive
- {ssl, Socket, Trash} ->
- trash_the_rest(Socket, add(N,sz(Trash)));
- {ssl_closed, Socket} ->
- N;
- {ssl_error, Socket, Error} ->
- exit({connection_error, Error});
-
- {tcp, Socket, Trash} ->
- trash_the_rest(Socket, add(N,sz(Trash)));
- {tcp_closed, Socket} ->
- N;
- {tcp_error, Socket, Error} ->
- exit({connection_error, Error})
-
- after 10000 ->
- exit({connection_timed_out, N})
- end.
-
-
-add(N1,N2) when is_integer(N1) andalso is_integer(N2) ->
- N1 + N2;
-add(N1,_) when is_integer(N1) ->
- N1;
-add(_,N2) when is_integer(N2) ->
- N2.
-
-
sz(L) when is_list(L) ->
length(lists:flatten(L));
sz(B) when is_binary(B) ->
@@ -505,17 +477,6 @@ status_to_message(Code) -> io_lib:format("Unknown status code: ~p",[Code]).
%% ----------------------------------------------------------------
-to(To, Start) ->
- To - (t() - Start).
-
-%% Time in milli seconds
-t() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
-
-
-%% ----------------------------------------------------------------
-
% close(Socket) ->
diff --git a/lib/inets/test/inets_SUITE.erl b/lib/inets/test/inets_SUITE.erl
index 6510c70d08..a07dc79c02 100644
--- a/lib/inets/test/inets_SUITE.erl
+++ b/lib/inets/test/inets_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -549,25 +549,12 @@ tsf(Reason) ->
tsp(F) ->
tsp(F, []).
tsp(F, A) ->
- Timestamp = formated_timestamp(),
+ Timestamp = inets_lib:formated_timestamp(),
test_server:format("** ~s ** ~p ~p:" ++ F ++ "~n", [Timestamp, self(), ?MODULE | A]).
i(F) ->
i(F, []).
i(F, A) ->
- Timestamp = formated_timestamp(),
+ Timestamp = inets_lib:formated_timestamp(),
io:format("*** ~s ~w:" ++ F ++ "~n", [Timestamp, ?MODULE | A]).
-
-formated_timestamp() ->
- format_timestamp( os:timestamp() ).
-
-format_timestamp({_N1, _N2, N3} = Now) ->
- {Date, Time} = calendar:now_to_datetime(Now),
- {YYYY,MM,DD} = Date,
- {Hour,Min,Sec} = Time,
- FormatDate =
- io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
- [YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
- lists:flatten(FormatDate).
-
diff --git a/lib/inets/test/inets_app_test.erl b/lib/inets/test/inets_app_test.erl
index eabfa69f7c..22d6e25c87 100644
--- a/lib/inets/test/inets_app_test.erl
+++ b/lib/inets/test/inets_app_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -32,19 +32,6 @@
%% Test server callbacks
-init_per_testcase(undef_funcs, Config) ->
- NewConfig = lists:keydelete(watchdog, 1, Config),
- Dog = test_server:timetrap(inets_test_lib:minutes(10)),
-
- %% We need to check if there is a point to run this test.
- %% On some platforms, crypto will not build, which in turn
- %% causes ssl to not build (at this time, this will
- %% change in the future).
- %% So, we first check if we can start crypto, and if not,
- %% we skip this test case!
- ?ENSURE_STARTED(crypto),
-
- [{watchdog, Dog}| NewConfig];
init_per_testcase(_, Config) ->
Config.
@@ -54,7 +41,7 @@ end_per_testcase(_Case, Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
all() ->
- [fields, modules, exportall, app_depend, undef_funcs].
+ [fields, modules, exportall, app_depend].
groups() ->
[].
@@ -244,56 +231,6 @@ check_apps([App|Apps]) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-undef_funcs(suite) ->
- [];
-undef_funcs(doc) ->
- [];
-undef_funcs(Config) when is_list(Config) ->
- App = inets,
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
- Root = code:root_dir(),
- LibDir = code:lib_dir(App),
- EbinDir = filename:join([LibDir,"ebin"]),
- XRefTestName = undef_funcs_make_name(App, xref_test_name),
- {ok, XRef} = xref:start(XRefTestName),
- ok = xref:set_default(XRef,
- [{verbose,false},{warnings,false}]),
- XRefName = undef_funcs_make_name(App, xref_name),
- {ok, XRefName} = xref:add_release(XRef, Root, {name, XRefName}),
- {ok, App} = xref:replace_application(XRef, App, EbinDir),
- {ok, Undefs} = xref:analyze(XRef, undefined_function_calls),
- xref:stop(XRef),
- analyze_undefined_function_calls(Undefs, Mods, []).
-
-analyze_undefined_function_calls([], _, []) ->
- ok;
-analyze_undefined_function_calls([], _, AppUndefs) ->
- exit({suite_failed, {undefined_function_calls, AppUndefs}});
-analyze_undefined_function_calls([{{Mod, _F, _A}, _C} = AppUndef|Undefs],
- AppModules, AppUndefs) ->
- %% Check that this module is our's
- case lists:member(Mod,AppModules) of
- true ->
- {Calling,Called} = AppUndef,
- {Mod1,Func1,Ar1} = Calling,
- {Mod2,Func2,Ar2} = Called,
- io:format("undefined function call: "
- "~n ~w:~w/~w calls ~w:~w/~w~n",
- [Mod1,Func1,Ar1,Mod2,Func2,Ar2]),
- analyze_undefined_function_calls(Undefs, AppModules,
- [AppUndef|AppUndefs]);
- false ->
- io:format("dropping ~p~n", [Mod]),
- analyze_undefined_function_calls(Undefs, AppModules, AppUndefs)
- end.
-
-%% This function is used simply to avoid cut-and-paste errors later...
-undef_funcs_make_name(App, PostFix) ->
- list_to_atom(atom_to_list(App) ++ "_" ++ atom_to_list(PostFix)).
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
fail(Reason) ->
exit({suite_failed, Reason}).
diff --git a/lib/inets/test/inets_test_lib.erl b/lib/inets/test/inets_test_lib.erl
index 4be9d9c8b3..7485971d3e 100644
--- a/lib/inets/test/inets_test_lib.erl
+++ b/lib/inets/test/inets_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -498,13 +498,6 @@ close(essl,Socket) ->
close(ip_comm,Socket) ->
catch gen_tcp:close(Socket).
-millis() ->
- erlang:now().
-
-millis_diff(A,B) ->
- T1 = (element(1,A)*1000000) + element(2,A) + (element(3,A)/1000000),
- T2 = (element(1,B)*1000000) + element(2,B) + (element(3,B)/1000000),
- T1 - T2.
hours(N) -> trunc(N * 1000 * 60 * 60).
minutes(N) -> trunc(N * 1000 * 60).
@@ -546,7 +539,7 @@ flush() ->
tsp(F) ->
tsp(F, []).
tsp(F, A) ->
- Timestamp = formated_timestamp(),
+ Timestamp = inets_lib:formated_timestamp(),
ct:pal("*** ~s ~p ~p " ++ F ++ "~n",
[Timestamp, node(), self() | A]).
@@ -559,18 +552,6 @@ tss(Time) ->
timestamp() ->
http_util:timestamp().
-formated_timestamp() ->
- format_timestamp( os:timestamp() ).
-
-format_timestamp({_N1, _N2, N3} = Now) ->
- {Date, Time} = calendar:now_to_datetime(Now),
- {YYYY,MM,DD} = Date,
- {Hour,Min,Sec} = Time,
- FormatDate =
- io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w 4~w",
- [YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
- lists:flatten(FormatDate).
-
start_apps(Apps) ->
lists:foreach(fun(App) ->
application:stop(App),
diff --git a/lib/inets/test/old_httpd_SUITE.erl b/lib/inets/test/old_httpd_SUITE.erl
index 74c11f71ba..39b0b08645 100644
--- a/lib/inets/test/old_httpd_SUITE.erl
+++ b/lib/inets/test/old_httpd_SUITE.erl
@@ -2072,13 +2072,13 @@ create_config(Config, Access, FileName) ->
"Modules mod_alias mod_htaccess mod_auth "
"mod_security "
"mod_responsecontrol mod_trace mod_esi "
- "mod_actions mod_cgi mod_include mod_dir "
+ "mod_actions mod_cgi mod_dir "
"mod_range mod_get "
"mod_head mod_log mod_disk_log";
_ ->
"Modules mod_alias mod_auth mod_security "
"mod_responsecontrol mod_trace mod_esi "
- "mod_actions mod_cgi mod_include mod_dir "
+ "mod_actions mod_cgi mod_dir "
"mod_range mod_get "
"mod_head mod_log mod_disk_log"
end,
@@ -2436,7 +2436,7 @@ create_ipv6_config(Config, FileName, Ipv6Address) ->
MaxHdrAct = io_lib:format("~p", [close]),
Mod_order = "Modules mod_alias mod_auth mod_esi mod_actions mod_cgi"
- " mod_include mod_dir mod_get mod_head"
+ " mod_dir mod_get mod_head"
" mod_log mod_disk_log mod_trace",
SSL =
diff --git a/lib/inets/test/uri_SUITE.erl b/lib/inets/test/uri_SUITE.erl
index 9ba09e1474..f75e347d0c 100644
--- a/lib/inets/test/uri_SUITE.erl
+++ b/lib/inets/test/uri_SUITE.erl
@@ -46,6 +46,7 @@ all() ->
userinfo,
scheme,
queries,
+ fragments,
escaped,
hexed_query
].
@@ -105,6 +106,42 @@ queries(Config) when is_list(Config) ->
{ok, {http,[],"localhost",8888,"/foobar.html","?foo=bar&foobar=42"}} =
http_uri:parse("http://localhost:8888/foobar.html?foo=bar&foobar=42").
+fragments(Config) when is_list(Config) ->
+ {ok, {http,[],"localhost",80,"/",""}} =
+ http_uri:parse("http://localhost#fragment"),
+ {ok, {http,[],"localhost",80,"/path",""}} =
+ http_uri:parse("http://localhost/path#fragment"),
+ {ok, {http,[],"localhost",80,"/","?query"}} =
+ http_uri:parse("http://localhost?query#fragment"),
+ {ok, {http,[],"localhost",80,"/path","?query"}} =
+ http_uri:parse("http://localhost/path?query#fragment"),
+ {ok, {http,[],"localhost",80,"/","","#fragment"}} =
+ http_uri:parse("http://localhost#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#fragment"}} =
+ http_uri:parse("http://localhost/path#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#fragment"}} =
+ http_uri:parse("http://localhost?query#fragment", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#fragment"}} =
+ http_uri:parse("http://localhost/path?query#fragment",
+ [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","",""}} =
+ http_uri:parse("http://localhost", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","",""}} =
+ http_uri:parse("http://localhost/path", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query",""}} =
+ http_uri:parse("http://localhost?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query",""}} =
+ http_uri:parse("http://localhost/path?query", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","","#"}} =
+ http_uri:parse("http://localhost#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","","#"}} =
+ http_uri:parse("http://localhost/path#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/","?query","#"}} =
+ http_uri:parse("http://localhost?query#", [{fragment,true}]),
+ {ok, {http,[],"localhost",80,"/path","?query","#"}} =
+ http_uri:parse("http://localhost/path?query#", [{fragment,true}]),
+ ok.
+
escaped(Config) when is_list(Config) ->
{ok, {http,[],"www.somedomain.com",80,"/%2Eabc",[]}} =
http_uri:parse("http://www.somedomain.com/%2Eabc"),
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index dbae5e4b3c..f52347e39e 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2001-2014. All Rights Reserved.
+# Copyright Ericsson AB 2001-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -18,6 +18,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 5.10.4
+INETS_VSN = 6.0
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/jinterface/doc/src/jinterface_users_guide.xml b/lib/jinterface/doc/src/jinterface_users_guide.xml
index 5dfe5c0c6d..238f90ce38 100644
--- a/lib/jinterface/doc/src/jinterface_users_guide.xml
+++ b/lib/jinterface/doc/src/jinterface_users_guide.xml
@@ -223,6 +223,14 @@ OtpNode node = new OtpNode("gurka"); </code>
</section>
<section>
+ <title>Transport Factory</title>
+ <p>All necessary connections are made using methods of
+ <seealso marker="java/com/ericsson/otp/erlang/OtpTransportFactory">OtpTransportFactory</seealso>
+ interface. Default OtpTransportFactory implementation is based on standard Socket class.
+ User may provide custom transport factory as needed. See java doc for details.</p>
+ </section>
+
+ <section>
<title>Sending and Receiving Messages</title>
<p>Messages sent with this package must be instances of
<seealso marker="java/com/ericsson/otp/erlang/OtpErlangObject">OtpErlangObject</seealso>
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
index 1b0fe3e2e6..ab8fa06c1b 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
@@ -20,7 +20,7 @@
package com.ericsson.otp.erlang;
import java.io.IOException;
-import java.net.Socket;
+import java.io.OutputStream;
import java.util.Random;
/**
@@ -84,7 +84,7 @@ public abstract class AbstractConnection extends Thread {
private volatile boolean done = false;
protected boolean connected = false; // connection status
- protected Socket socket; // communication channel
+ protected OtpTransport socket; // communication channel
protected OtpPeer peer; // who are we connected to
protected OtpLocalNode localNode; // this nodes id
String name; // local name of this connection
@@ -126,7 +126,7 @@ public abstract class AbstractConnection extends Thread {
* Accept an incoming connection from a remote node. Used by
* {@link OtpSelf#accept() OtpSelf.accept()} to create a connection based on
* data received when handshaking with the peer node, when the remote node
- * is the connection intitiator.
+ * is the connection initiator.
*
* @exception java.io.IOException
* if it was not possible to connect to the peer.
@@ -134,20 +134,17 @@ public abstract class AbstractConnection extends Thread {
* @exception OtpAuthException
* if handshake resulted in an authentication error
*/
- protected AbstractConnection(final OtpLocalNode self, final Socket s)
+ protected AbstractConnection(final OtpLocalNode self, final OtpTransport s)
throws IOException, OtpAuthException {
localNode = self;
- peer = new OtpPeer();
+ peer = new OtpPeer(self.transportFactory);
socket = s;
- socket.setTcpNoDelay(true);
-
traceLevel = defaultLevel;
setDaemon(true);
if (traceLevel >= handshakeThreshold) {
- System.out.println("<- ACCEPT FROM " + s.getInetAddress() + ":"
- + s.getPort());
+ System.out.println("<- ACCEPT FROM " + s);
}
// get his info
@@ -189,6 +186,8 @@ public abstract class AbstractConnection extends Thread {
// now get a connection between the two...
port = OtpEpmd.lookupPort(peer);
+ if (port == 0)
+ throw new IOException("No remote node found - cannot connect");
// now find highest common dist value
if (peer.proto != self.proto || self.distHigh < peer.distLow
@@ -523,7 +522,9 @@ public abstract class AbstractConnection extends Thread {
// received tick? send tock!
if (len == 0) {
synchronized (this) {
- socket.getOutputStream().write(tock);
+ OutputStream out = socket.getOutputStream();
+ out.write(tock);
+ out.flush();
}
}
@@ -837,8 +838,11 @@ public abstract class AbstractConnection extends Thread {
}
}
- header.writeTo(socket.getOutputStream());
- payload.writeTo(socket.getOutputStream());
+ // group flush op in favour of possible ssh-tunneled stream
+ OutputStream out = socket.getOutputStream();
+ header.writeTo(out);
+ payload.writeTo(out);
+ out.flush();
} catch (final IOException e) {
close();
throw e;
@@ -859,7 +863,7 @@ public abstract class AbstractConnection extends Thread {
+ e);
}
}
- header.writeTo(socket.getOutputStream());
+ header.writeToAndFlush(socket.getOutputStream());
} catch (final IOException e) {
close();
throw e;
@@ -913,7 +917,8 @@ public abstract class AbstractConnection extends Thread {
}
/* this method now throws exception if we don't get full read */
- protected int readSock(final Socket s, final byte[] b) throws IOException {
+ protected int readSock(final OtpTransport s, final byte[] b)
+ throws IOException {
int got = 0;
final int len = b.length;
int i;
@@ -980,8 +985,7 @@ public abstract class AbstractConnection extends Thread {
protected void doConnect(final int port) throws IOException,
OtpAuthException {
try {
- socket = new Socket(peer.host(), port);
- socket.setTcpNoDelay(true);
+ socket = peer.createTransport(peer.host(), port);
if (traceLevel >= handshakeThreshold) {
System.out.println("-> MD5 CONNECT TO " + peer.host() + ":"
@@ -1077,7 +1081,7 @@ public abstract class AbstractConnection extends Thread {
obuf.write4BE(aflags);
obuf.write(str.getBytes());
- obuf.writeTo(socket.getOutputStream());
+ obuf.writeToAndFlush(socket.getOutputStream());
if (traceLevel >= handshakeThreshold) {
System.out.println("-> " + "HANDSHAKE sendName" + " flags="
@@ -1098,7 +1102,7 @@ public abstract class AbstractConnection extends Thread {
obuf.write4BE(challenge);
obuf.write(str.getBytes());
- obuf.writeTo(socket.getOutputStream());
+ obuf.writeToAndFlush(socket.getOutputStream());
if (traceLevel >= handshakeThreshold) {
System.out.println("-> " + "HANDSHAKE sendChallenge" + " flags="
@@ -1232,7 +1236,7 @@ public abstract class AbstractConnection extends Thread {
obuf.write1(ChallengeReply);
obuf.write4BE(challenge);
obuf.write(digest);
- obuf.writeTo(socket.getOutputStream());
+ obuf.writeToAndFlush(socket.getOutputStream());
if (traceLevel >= handshakeThreshold) {
System.out.println("-> " + "HANDSHAKE sendChallengeReply"
@@ -1294,7 +1298,7 @@ public abstract class AbstractConnection extends Thread {
obuf.write1(ChallengeAck);
obuf.write(digest);
- obuf.writeTo(socket.getOutputStream());
+ obuf.writeToAndFlush(socket.getOutputStream());
if (traceLevel >= handshakeThreshold) {
System.out.println("-> " + "HANDSHAKE sendChallengeAck"
@@ -1341,7 +1345,7 @@ public abstract class AbstractConnection extends Thread {
obuf.write1(ChallengeStatus);
obuf.write(status.getBytes());
- obuf.writeTo(socket.getOutputStream());
+ obuf.writeToAndFlush(socket.getOutputStream());
if (traceLevel >= handshakeThreshold) {
System.out.println("-> " + "HANDSHAKE sendStatus" + " status="
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java
index 6f07d8171e..0a33984b31 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractNode.java
@@ -64,13 +64,14 @@ import java.net.UnknownHostException;
* instead.
* </p>
*/
-public class AbstractNode {
+public class AbstractNode implements OtpTransportFactory {
static String localHost = null;
String node;
String host;
String alive;
String cookie;
static String defaultCookie = null;
+ final OtpTransportFactory transportFactory;
// Node types
static final int NTYPE_R6 = 110; // 'n' post-r5, all nodes
@@ -146,21 +147,41 @@ public class AbstractNode {
}
}
- protected AbstractNode() {
+ protected AbstractNode(final OtpTransportFactory transportFactory) {
+ this.transportFactory = transportFactory;
}
/**
- * Create a node with the given name and the default cookie.
+ * Create a node with the given name and default cookie and transport
+ * factory.
*/
protected AbstractNode(final String node) {
- this(node, defaultCookie);
+ this(node, defaultCookie, new OtpSocketTransportFactory());
}
/**
- * Create a node with the given name and cookie.
+ * Create a node with the given name, transport factory and the default
+ * cookie.
+ */
+ protected AbstractNode(final String node,
+ final OtpTransportFactory transportFactory) {
+ this(node, defaultCookie, transportFactory);
+ }
+
+ /**
+ * Create a node with the given name, cookie and default transport factory.
*/
protected AbstractNode(final String name, final String cookie) {
+ this(name, cookie, new OtpSocketTransportFactory());
+ }
+
+ /**
+ * Create a node with the given name, cookie and transport factory.
+ */
+ protected AbstractNode(final String name, final String cookie,
+ final OtpTransportFactory transportFactory) {
this.cookie = cookie;
+ this.transportFactory = transportFactory;
final int i = name.indexOf('@', 0);
if (i < 0) {
@@ -268,4 +289,19 @@ public class AbstractNode {
}
return home;
}
+
+ public OtpTransport createTransport(final String addr, final int port)
+ throws IOException {
+ return transportFactory.createTransport(addr, port);
+ }
+
+ public OtpTransport createTransport(final InetAddress addr, final int port)
+ throws IOException {
+ return transportFactory.createTransport(addr, port);
+ }
+
+ public OtpServerTransport createServerTransport(final int port)
+ throws IOException {
+ return transportFactory.createServerTransport(port);
+ }
}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java
index 2c9b7766bc..af0926f939 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpConnection.java
@@ -19,7 +19,6 @@
package com.ericsson.otp.erlang;
import java.io.IOException;
-import java.net.Socket;
/**
* Maintains a connection between a Java process and a remote Erlang, Java or C
@@ -63,8 +62,8 @@ public class OtpConnection extends AbstractConnection {
* error
*/
// package scope
- OtpConnection(final OtpSelf self, final Socket s) throws IOException,
- OtpAuthException {
+ OtpConnection(final OtpSelf self, final OtpTransport s)
+ throws IOException, OtpAuthException {
super(self, s);
this.self = self;
queue = new GenericQueue();
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java
index 4d80f61d52..b0e3e81fca 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpCookedConnection.java
@@ -19,7 +19,6 @@
package com.ericsson.otp.erlang;
import java.io.IOException;
-import java.net.Socket;
/**
* <p>
@@ -78,8 +77,8 @@ public class OtpCookedConnection extends AbstractConnection {
* error
*/
// package scope
- OtpCookedConnection(final OtpNode self, final Socket s) throws IOException,
- OtpAuthException {
+ OtpCookedConnection(final OtpNode self, final OtpTransport s)
+ throws IOException, OtpAuthException {
super(self, s);
this.self = self;
links = new Links(25);
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java
index 796babee1b..6c7c8fe951 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpEpmd.java
@@ -21,13 +21,12 @@ package com.ericsson.otp.erlang;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.InetAddress;
-import java.net.Socket;
/**
* Provides methods for registering, unregistering and looking up nodes with the
* Erlang portmapper daemon (Epmd). For each registered node, Epmd maintains
* information about the port on which incoming connections are accepted, as
- * well as which versions of the Erlang communication protocolt the node
+ * well as which versions of the Erlang communication protocol the node
* supports.
*
* <p>
@@ -136,7 +135,7 @@ public class OtpEpmd {
*/
public static boolean publishPort(final OtpLocalNode node)
throws IOException {
- Socket s = null;
+ OtpTransport s = null;
s = r4_publish(node);
@@ -156,16 +155,16 @@ public class OtpEpmd {
* This method does not report any failures.
*/
public static void unPublishPort(final OtpLocalNode node) {
- Socket s = null;
+ OtpTransport s = null;
try {
- s = new Socket((String) null, EpmdPort.get());
+ s = node.createTransport((String) null, EpmdPort.get());
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
obuf.write2BE(node.alive().length() + 1);
obuf.write1(stopReq);
obuf.writeN(node.alive().getBytes());
- obuf.writeTo(s.getOutputStream());
+ obuf.writeToAndFlush(s.getOutputStream());
// don't even wait for a response (is there one?)
if (traceLevel >= traceThreshold) {
System.out.println("-> UNPUBLISH " + node + " port="
@@ -187,12 +186,12 @@ public class OtpEpmd {
private static int r4_lookupPort(final AbstractNode node)
throws IOException {
int port = 0;
- Socket s = null;
+ OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
- s = new Socket(node.host(), EpmdPort.get());
+ s = node.createTransport(node.host(), EpmdPort.get());
// build and send epmd request
// length[2], tag[1], alivename[n] (length = n+1)
@@ -201,7 +200,7 @@ public class OtpEpmd {
obuf.writeN(node.alive().getBytes());
// send request
- obuf.writeTo(s.getOutputStream());
+ obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> LOOKUP (r4) " + node);
@@ -242,7 +241,7 @@ public class OtpEpmd {
System.out.println("<- (no response)");
}
throw new IOException("Nameserver not responding on " + node.host()
- + " when looking up " + node.alive());
+ + " when looking up " + node.alive(), e);
} catch (final OtpErlangDecodeException e) {
if (traceLevel >= traceThreshold) {
System.out.println("<- (invalid response)");
@@ -276,14 +275,14 @@ public class OtpEpmd {
* fatal. If we manage to successfully communicate with an r4 epmd, we
* return either the socket, or null, depending on the result.
*/
- private static Socket r4_publish(final OtpLocalNode node)
+ private static OtpTransport r4_publish(final OtpLocalNode node)
throws IOException {
- Socket s = null;
+ OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
- s = new Socket((String) null, EpmdPort.get());
+ s = node.createTransport((String) null, EpmdPort.get());
obuf.write2BE(node.alive().length() + 13);
@@ -301,7 +300,7 @@ public class OtpEpmd {
obuf.write2BE(0); // No extra
// send request
- obuf.writeTo(s.getOutputStream());
+ obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> PUBLISH (r4) " + node + " port="
@@ -356,23 +355,34 @@ public class OtpEpmd {
}
public static String[] lookupNames() throws IOException {
- return lookupNames(InetAddress.getByName(null));
+ return lookupNames(InetAddress.getByName(null),
+ new OtpSocketTransportFactory());
+ }
+
+ public static String[] lookupNames(
+ final OtpTransportFactory transportFactory) throws IOException {
+ return lookupNames(InetAddress.getByName(null), transportFactory);
}
public static String[] lookupNames(final InetAddress address)
throws IOException {
- Socket s = null;
+ return lookupNames(address, new OtpSocketTransportFactory());
+ }
+
+ public static String[] lookupNames(final InetAddress address,
+ final OtpTransportFactory transportFactory) throws IOException {
+ OtpTransport s = null;
try {
@SuppressWarnings("resource")
final OtpOutputStream obuf = new OtpOutputStream();
try {
- s = new Socket(address, EpmdPort.get());
+ s = transportFactory.createTransport(address, EpmdPort.get());
obuf.write2BE(1);
obuf.write1(names4req);
// send request
- obuf.writeTo(s.getOutputStream());
+ obuf.writeToAndFlush(s.getOutputStream());
if (traceLevel >= traceThreshold) {
System.out.println("-> NAMES (r4) ");
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java
index 990e50ddcd..268261ec10 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangList.java
@@ -297,6 +297,54 @@ public class OtpErlangList extends OtpErlangObject implements
return getLastTail().equals(l.getLastTail());
}
+ @Override
+ public <T> boolean match(final OtpErlangObject term, final T bindings) {
+ if (!(term instanceof OtpErlangList)) {
+ return false;
+ }
+ final OtpErlangList that = (OtpErlangList) term;
+
+ final int thisArity = this.arity();
+ final int thatArity = that.arity();
+ final OtpErlangObject thisTail = this.getLastTail();
+ final OtpErlangObject thatTail = that.getLastTail();
+
+ if (thisTail == null) {
+ if (thisArity != thatArity || thatTail != null) {
+ return false;
+ }
+ } else {
+ if (thisArity > thatArity) {
+ return false;
+ }
+ }
+ for (int i = 0; i < thisArity; i++) {
+ if (!elementAt(i).match(that.elementAt(i), bindings)) {
+ return false;
+ }
+ }
+ if (thisTail == null) {
+ return true;
+ }
+ return thisTail.match(that.getNthTail(thisArity), bindings);
+ }
+
+ @Override
+ public <T> OtpErlangObject bind(final T binds) throws OtpErlangException {
+ final OtpErlangList list = (OtpErlangList) this.clone();
+
+ final int a = list.elems.length;
+ for (int i = 0; i < a; i++) {
+ list.elems[i] = list.elems[i].bind(binds);
+ }
+
+ if (list.lastTail != null) {
+ list.lastTail = list.lastTail.bind(binds);
+ }
+
+ return list;
+ }
+
public OtpErlangObject getLastTail() {
return lastTail;
}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java
index 7f2621923a..a8cd9d5392 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangMap.java
@@ -18,6 +18,11 @@
*/
package com.ericsson.otp.erlang;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
/**
* Provides a Java representation of Erlang maps. Maps are created from one or
* more arbitrary Erlang terms.
@@ -31,10 +36,14 @@ public class OtpErlangMap extends OtpErlangObject {
// don't change this!
private static final long serialVersionUID = -6410770117696198497L;
- private static final OtpErlangObject[] NO_ELEMENTS = new OtpErlangObject[0];
+ private HashMap<OtpErlangObject, OtpErlangObject> map;
- private OtpErlangObject[] keys = NO_ELEMENTS;
- private OtpErlangObject[] values = NO_ELEMENTS;
+ /**
+ * Create an empty map.
+ */
+ public OtpErlangMap() {
+ map = new HashMap<OtpErlangObject, OtpErlangObject>();
+ }
/**
* Create a map from an array of keys and an array of values.
@@ -82,30 +91,20 @@ public class OtpErlangMap extends OtpErlangObject {
} else if (kcount != vcount) {
throw new java.lang.IllegalArgumentException(
"Map keys and values must have same arity");
- } else if (vcount < 1) {
- this.keys = NO_ELEMENTS;
- this.values = NO_ELEMENTS;
- } else {
- this.keys = new OtpErlangObject[vcount];
- for (int i = 0; i < vcount; i++) {
- if (keys[kstart + i] != null) {
- this.keys[i] = keys[kstart + i];
- } else {
- throw new java.lang.IllegalArgumentException(
- "Map key cannot be null (element" + (kstart + i)
- + ")");
- }
+ }
+ map = new HashMap<OtpErlangObject, OtpErlangObject>(vcount);
+ OtpErlangObject key, val;
+ for (int i = 0; i < vcount; i++) {
+ if ((key = keys[kstart + i]) == null) {
+ throw new java.lang.IllegalArgumentException(
+ "Map key cannot be null (element" + (kstart + i) + ")");
}
- this.values = new OtpErlangObject[vcount];
- for (int i = 0; i < vcount; i++) {
- if (values[vstart + i] != null) {
- this.values[i] = values[vstart + i];
- } else {
- throw new java.lang.IllegalArgumentException(
- "Map value cannot be null (element" + (vstart + i)
- + ")");
- }
+ if ((val = values[vstart + i]) == null) {
+ throw new java.lang.IllegalArgumentException(
+ "Map value cannot be null (element" + (vstart + i)
+ + ")");
}
+ put(key, val);
}
}
@@ -125,16 +124,15 @@ public class OtpErlangMap extends OtpErlangObject {
final int arity = buf.read_map_head();
if (arity > 0) {
- keys = new OtpErlangObject[arity];
- values = new OtpErlangObject[arity];
-
+ map = new HashMap<OtpErlangObject, OtpErlangObject>(arity);
for (int i = 0; i < arity; i++) {
- keys[i] = buf.read_any();
- values[i] = buf.read_any();
+ OtpErlangObject key, val;
+ key = buf.read_any();
+ val = buf.read_any();
+ put(key, val);
}
} else {
- keys = NO_ELEMENTS;
- values = NO_ELEMENTS;
+ map = new HashMap<OtpErlangObject, OtpErlangObject>();
}
}
@@ -144,7 +142,33 @@ public class OtpErlangMap extends OtpErlangObject {
* @return the number of elements contained in the map.
*/
public int arity() {
- return keys.length;
+ return map.size();
+ }
+
+ /**
+ * Put value corresponding to key into the map. For detailed behavior
+ * description see {@link Map#put(Object, Object)}.
+ *
+ * @param key
+ * key to associate value with
+ * @param value
+ * value to associate with key
+ * @return previous value associated with key or null
+ */
+ public OtpErlangObject put(final OtpErlangObject key,
+ final OtpErlangObject value) {
+ return map.put(key, value);
+ }
+
+ /**
+ * removes mapping for the key if present.
+ *
+ * @param key
+ * key for which mapping is to be remove
+ * @return value associated with key or null
+ */
+ public OtpErlangObject remove(final OtpErlangObject key) {
+ return map.remove(key);
}
/**
@@ -156,15 +180,7 @@ public class OtpErlangMap extends OtpErlangObject {
* @return the requested value, of null if key is not a valid key.
*/
public OtpErlangObject get(final OtpErlangObject key) {
- if (key == null) {
- return null;
- }
- for (int i = 0; i < keys.length; i++) {
- if (key.equals(keys[i])) {
- return values[i];
- }
- }
- return null;
+ return map.get(key);
}
/**
@@ -173,9 +189,7 @@ public class OtpErlangMap extends OtpErlangObject {
* @return an array containing all of the map's keys.
*/
public OtpErlangObject[] keys() {
- final OtpErlangObject[] res = new OtpErlangObject[arity()];
- System.arraycopy(keys, 0, res, 0, res.length);
- return res;
+ return map.keySet().toArray(new OtpErlangObject[arity()]);
}
/**
@@ -184,9 +198,16 @@ public class OtpErlangMap extends OtpErlangObject {
* @return an array containing all of the map's values.
*/
public OtpErlangObject[] values() {
- final OtpErlangObject[] res = new OtpErlangObject[arity()];
- System.arraycopy(values, 0, res, 0, res.length);
- return res;
+ return map.values().toArray(new OtpErlangObject[arity()]);
+ }
+
+ /**
+ * make Set view of the map key-value pairs
+ *
+ * @return a set containing key-value pairs
+ */
+ public Set<Entry<OtpErlangObject, OtpErlangObject>> entrySet() {
+ return map.entrySet();
}
/**
@@ -196,19 +217,20 @@ public class OtpErlangMap extends OtpErlangObject {
*/
@Override
public String toString() {
- int i;
final StringBuffer s = new StringBuffer();
- final int arity = values.length;
s.append("#{");
- for (i = 0; i < arity; i++) {
- if (i > 0) {
+ boolean first = true;
+ for (final Map.Entry<OtpErlangObject, OtpErlangObject> e : entrySet()) {
+ if (first) {
+ first = false;
+ } else {
s.append(",");
}
- s.append(keys[i].toString());
+ s.append(e.getKey().toString());
s.append(" => ");
- s.append(values[i].toString());
+ s.append(e.getValue().toString());
}
s.append("}");
@@ -224,13 +246,13 @@ public class OtpErlangMap extends OtpErlangObject {
*/
@Override
public void encode(final OtpOutputStream buf) {
- final int arity = values.length;
+ final int arity = arity();
buf.write_map_head(arity);
- for (int i = 0; i < arity; i++) {
- buf.write_any(keys[i]);
- buf.write_any(values[i]);
+ for (final Map.Entry<OtpErlangObject, OtpErlangObject> e : entrySet()) {
+ buf.write_any(e.getKey());
+ buf.write_any(e.getValue());
}
}
@@ -256,15 +278,46 @@ public class OtpErlangMap extends OtpErlangObject {
if (a != t.arity()) {
return false;
}
+ if (a == 0) {
+ return true;
+ }
- for (int i = 0; i < a; i++) {
- if (!keys[i].equals(t.keys[i])) {
- return false; // early exit
+ OtpErlangObject key, val;
+ for (final Map.Entry<OtpErlangObject, OtpErlangObject> e : entrySet()) {
+ key = e.getKey();
+ val = e.getValue();
+ final OtpErlangObject v = t.get(key);
+ if (v == null || !val.equals(v)) {
+ return false;
}
}
- for (int i = 0; i < a; i++) {
- if (!values[i].equals(t.values[i])) {
- return false; // early exit
+
+ return true;
+ }
+
+ @Override
+ public <T> boolean match(final OtpErlangObject term, final T binds) {
+ if (!(term instanceof OtpErlangMap)) {
+ return false;
+ }
+
+ final OtpErlangMap t = (OtpErlangMap) term;
+ final int a = arity();
+
+ if (a > t.arity()) {
+ return false;
+ }
+ if (a == 0) {
+ return true;
+ }
+
+ OtpErlangObject key, val;
+ for (final Map.Entry<OtpErlangObject, OtpErlangObject> e : entrySet()) {
+ key = e.getKey();
+ val = e.getValue();
+ final OtpErlangObject v = t.get(key);
+ if (v == null || !val.match(v, binds)) {
+ return false;
}
}
@@ -272,23 +325,31 @@ public class OtpErlangMap extends OtpErlangObject {
}
@Override
+ public <T> OtpErlangObject bind(final T binds) throws OtpErlangException {
+ final OtpErlangMap ret = new OtpErlangMap();
+
+ OtpErlangObject key, val;
+ for (final Map.Entry<OtpErlangObject, OtpErlangObject> e : entrySet()) {
+ key = e.getKey();
+ val = e.getValue();
+ ret.put(key, val.bind(binds));
+ }
+
+ return ret;
+ }
+
+ @Override
protected int doHashCode() {
final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9);
- final int a = arity();
- hash.combine(a);
- for (int i = 0; i < a; i++) {
- hash.combine(keys[i].hashCode());
- }
- for (int i = 0; i < a; i++) {
- hash.combine(values[i].hashCode());
- }
+ hash.combine(map.hashCode());
return hash.valueOf();
}
@Override
+ @SuppressWarnings("unchecked")
public Object clone() {
final OtpErlangMap newMap = (OtpErlangMap) super.clone();
- newMap.values = values.clone();
+ newMap.map = (HashMap<OtpErlangObject, OtpErlangObject>) map.clone();
return newMap;
}
}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java
index 7ab160bcdd..9339d3749b 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangObject.java
@@ -80,6 +80,32 @@ public abstract class OtpErlangObject implements Serializable, Cloneable {
@Override
public abstract boolean equals(Object o);
+ /**
+ * Perform match operation against given term.
+ *
+ * @param term
+ * the object to match
+ * @param binds
+ * variable bindings
+ * @return true if match succeeded
+ */
+ public <T> boolean match(final OtpErlangObject term, final T binds) {
+ return equals(term);
+ }
+
+ /**
+ * Make new Erlang term replacing variables with the respective values from
+ * bindings argument(s).
+ *
+ * @param binds
+ * variable bindings
+ * @return new term
+ * @throws OtpErlangException
+ */
+ public <T> OtpErlangObject bind(final T binds) throws OtpErlangException {
+ return this;
+ }
+
@Override
public int hashCode() {
if (hashCodeValue == 0) {
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java
index af2559e62e..ef0a453de1 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpErlangTuple.java
@@ -236,6 +236,35 @@ public class OtpErlangTuple extends OtpErlangObject {
}
@Override
+ public <T> boolean match(final OtpErlangObject term, final T bindings) {
+ if (!(term instanceof OtpErlangTuple)) {
+ return false;
+ }
+ final OtpErlangTuple t = (OtpErlangTuple) term;
+ final int a = elems.length;
+ if (a != t.elems.length) {
+ return false;
+ }
+ for (int i = 0; i < a; i++) {
+ if (!elems[i].match(t.elems[i], bindings)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public <T> OtpErlangObject bind(final T binds) throws OtpErlangException {
+ final OtpErlangTuple tuple = (OtpErlangTuple) this.clone();
+ final int a = tuple.elems.length;
+ for (int i = 0; i < a; i++) {
+ final OtpErlangObject e = tuple.elems[i];
+ tuple.elems[i] = e.bind(binds);
+ }
+ return tuple;
+ }
+
+ @Override
protected int doHashCode() {
final OtpErlangObject.Hash hash = new OtpErlangObject.Hash(9);
final int a = arity();
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java
index b996ba6f6c..dd1d299297 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpLocalNode.java
@@ -29,12 +29,7 @@ public class OtpLocalNode extends AbstractNode {
private int refId[];
protected int port;
- protected java.net.Socket epmd;
-
- protected OtpLocalNode() {
- super();
- init();
- }
+ protected OtpTransport epmd;
/**
* Create a node with the given name and the default cookie.
@@ -45,6 +40,16 @@ public class OtpLocalNode extends AbstractNode {
}
/**
+ * Create a node with the given name, transport factory and the default
+ * cookie.
+ */
+ protected OtpLocalNode(final String node,
+ final OtpTransportFactory transportFactory) {
+ super(node, transportFactory);
+ init();
+ }
+
+ /**
* Create a node with the given name and cookie.
*/
protected OtpLocalNode(final String node, final String cookie) {
@@ -52,6 +57,15 @@ public class OtpLocalNode extends AbstractNode {
init();
}
+ /**
+ * Create a node with the given name, cookie and transport factory.
+ */
+ protected OtpLocalNode(final String node, final String cookie,
+ final OtpTransportFactory transportFactory) {
+ super(node, cookie, transportFactory);
+ init();
+ }
+
private void init() {
serial = 0;
pidCount = 1;
@@ -77,7 +91,7 @@ public class OtpLocalNode extends AbstractNode {
* @param s
* The socket connecting this node to Epmd.
*/
- protected void setEpmd(final java.net.Socket s) {
+ protected void setEpmd(final OtpTransport s) {
epmd = s;
}
@@ -86,7 +100,7 @@ public class OtpLocalNode extends AbstractNode {
*
* @return The socket connecting this node to Epmd.
*/
- protected java.net.Socket getEpmd() {
+ protected OtpTransport getEpmd() {
return epmd;
}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java
index d5edd135cf..7512d34c21 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpNode.java
@@ -20,8 +20,6 @@ package com.ericsson.otp.erlang;
import java.io.IOException;
import java.lang.ref.WeakReference;
-import java.net.ServerSocket;
-import java.net.Socket;
import java.util.Collection;
import java.util.Enumeration;
import java.util.Hashtable;
@@ -97,7 +95,39 @@ public class OtpNode extends OtpLocalNode {
*
*/
public OtpNode(final String node) throws IOException {
- this(node, defaultCookie, 0);
+ super(node);
+
+ init(0);
+ }
+
+ /**
+ * <p>
+ * Create a node using the default cookie. The default cookie is found by
+ * reading the first line of the .erlang.cookie file in the user's home
+ * directory. The home directory is obtained from the System property
+ * "user.home".
+ * </p>
+ *
+ * <p>
+ * If the file does not exist, an empty string is used. This method makes no
+ * attempt to create the file.
+ * </p>
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * if communication could not be initialized.
+ *
+ */
+ public OtpNode(final String node,
+ final OtpTransportFactory transportFactory) throws IOException {
+ super(node, transportFactory);
+
+ init(0);
}
/**
@@ -128,6 +158,28 @@ public class OtpNode extends OtpLocalNode {
* the authorization cookie that will be used by this node when
* it communicates with other nodes.
*
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * if communication could not be initialized.
+ *
+ */
+ public OtpNode(final String node, final String cookie,
+ final OtpTransportFactory transportFactory) throws IOException {
+ this(node, cookie, 0, transportFactory);
+ }
+
+ /**
+ * Create a node.
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param cookie
+ * the authorization cookie that will be used by this node when
+ * it communicates with other nodes.
+ *
* @param port
* the port number you wish to use for incoming connections.
* Specifying 0 lets the system choose an available port.
@@ -143,6 +195,34 @@ public class OtpNode extends OtpLocalNode {
init(port);
}
+ /**
+ * Create a node.
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param cookie
+ * the authorization cookie that will be used by this node when
+ * it communicates with other nodes.
+ *
+ * @param port
+ * the port number you wish to use for incoming connections.
+ * Specifying 0 lets the system choose an available port.
+ *
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * if communication could not be initialized.
+ *
+ */
+ public OtpNode(final String node, final String cookie, final int port,
+ final OtpTransportFactory transportFactory) throws IOException {
+ super(node, cookie, transportFactory);
+
+ init(port);
+ }
+
private synchronized void init(final int aport) throws IOException {
if (!initDone) {
connections = new Hashtable<String, OtpCookedConnection>(17,
@@ -681,12 +761,12 @@ public class OtpNode extends OtpLocalNode {
* this thread simply listens for incoming connections
*/
public class Acceptor extends Thread {
- private final ServerSocket sock;
+ private final OtpServerTransport sock;
private final int acceptorPort;
private volatile boolean done = false;
Acceptor(final int port) throws IOException {
- sock = new ServerSocket(port);
+ sock = createServerTransport(port);
acceptorPort = sock.getLocalPort();
OtpNode.this.port = acceptorPort;
@@ -720,7 +800,7 @@ public class OtpNode extends OtpLocalNode {
localStatus(node, false, null);
}
- private void closeSock(final ServerSocket s) {
+ private void closeSock(final OtpServerTransport s) {
try {
if (s != null) {
s.close();
@@ -729,7 +809,7 @@ public class OtpNode extends OtpLocalNode {
}
}
- private void closeSock(final Socket s) {
+ private void closeSock(final OtpTransport s) {
try {
if (s != null) {
s.close();
@@ -744,7 +824,7 @@ public class OtpNode extends OtpLocalNode {
@Override
public void run() {
- Socket newsock = null;
+ OtpTransport newsock = null;
OtpCookedConnection conn = null;
localStatus(node, true, null);
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java
index b8493b57ff..2ec583ff5c 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpOutputStream.java
@@ -21,6 +21,7 @@ package com.ericsson.otp.erlang;
// import java.io.OutputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
@@ -202,6 +203,16 @@ public class OtpOutputStream extends ByteArrayOutputStream {
super.count += len;
}
+ @Override
+ public synchronized void writeTo(OutputStream out) throws IOException {
+ super.writeTo(out);
+ }
+
+ public synchronized void writeToAndFlush(OutputStream out) throws IOException {
+ super.writeTo(out);
+ out.flush();
+ }
+
/**
* Write the low byte of a value to the stream.
*
@@ -887,7 +898,7 @@ public class OtpOutputStream extends ByteArrayOutputStream {
if (oos.size() < 5) {
// fast path for small terms
try {
- oos.writeTo(this);
+ oos.writeToAndFlush(this);
// if the term is written as a compressed term, the output
// stream is closed, so we do this here, too
close();
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java
index 2c79c04247..cb09b40f47 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpPeer.java
@@ -32,8 +32,8 @@ public class OtpPeer extends AbstractNode {
* common protocol version we both support
*/
- OtpPeer() {
- super();
+ OtpPeer(final OtpTransportFactory transportFactory) {
+ super(transportFactory);
}
/**
@@ -47,6 +47,19 @@ public class OtpPeer extends AbstractNode {
}
/**
+ * Create a peer node with custom transport factory.
+ *
+ * @param node
+ * the name of the node.
+ * @param transportFactory
+ * custom transport factory
+ */
+ public OtpPeer(final String node, final OtpTransportFactory
+ transportFactory) {
+ super(node, transportFactory);
+ }
+
+ /**
* Create a connection to a remote node.
*
* @param self
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java
index 166dac5701..74afbbcca6 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSelf.java
@@ -19,8 +19,6 @@
package com.ericsson.otp.erlang;
import java.io.IOException;
-import java.net.ServerSocket;
-import java.net.Socket;
import java.net.UnknownHostException;
/**
@@ -48,7 +46,7 @@ import java.net.UnknownHostException;
*
*/
public class OtpSelf extends OtpLocalNode {
- private final ServerSocket sock;
+ private final OtpServerTransport sock;
private final OtpErlangPid pid;
/**
@@ -67,12 +65,43 @@ public class OtpSelf extends OtpLocalNode {
* @param node
* the name of this node.
*
+ * @exception IOException
+ * in case of server transport failure
+ *
*/
public OtpSelf(final String node) throws IOException {
this(node, defaultCookie, 0);
}
/**
+ * <p>
+ * Create a self node using the default cookie and custom transport factory.
+ * The default cookie is found by reading the first line of the
+ * .erlang.cookie file in the user's home directory. The home directory is
+ * obtained from the System property "user.home".
+ * </p>
+ *
+ * <p>
+ * If the file does not exist, an empty string is used. This method makes no
+ * attempt to create the file.
+ * </p>
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * in case of server transport failure
+ *
+ */
+ public OtpSelf(final String node,
+ final OtpTransportFactory transportFactory) throws IOException {
+ this(node, defaultCookie, 0, transportFactory);
+ }
+
+ /**
* Create a self node.
*
* @param node
@@ -81,16 +110,92 @@ public class OtpSelf extends OtpLocalNode {
* @param cookie
* the authorization cookie that will be used by this node when
* it communicates with other nodes.
+ *
+ * @exception IOException
+ * in case of server transport failure
*/
public OtpSelf(final String node, final String cookie) throws IOException {
this(node, cookie, 0);
}
+ /**
+ * Create a self node.
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param cookie
+ * the authorization cookie that will be used by this node when
+ * it communicates with other nodes.
+ *
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * in case of server transport failure
+ */
+ public OtpSelf(final String node, final String cookie,
+ final OtpTransportFactory transportFactory) throws IOException {
+ this(node, cookie, 0, transportFactory);
+ }
+
+ /**
+ * Create a self node.
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param cookie
+ * the authorization cookie that will be used by this node when
+ * it communicates with other nodes.
+ *
+ * @param port
+ * the port number you wish to use for incoming connections.
+ * Specifying 0 lets the system choose an available port.
+ *
+ * @exception IOException
+ * in case of server transport failure
+ */
public OtpSelf(final String node, final String cookie, final int port)
throws IOException {
super(node, cookie);
- sock = new ServerSocket(port);
+ sock = createServerTransport(port);
+
+ if (port != 0) {
+ this.port = port;
+ } else {
+ this.port = sock.getLocalPort();
+ }
+
+ pid = createPid();
+ }
+
+ /**
+ * Create a self node.
+ *
+ * @param node
+ * the name of this node.
+ *
+ * @param cookie
+ * the authorization cookie that will be used by this node when
+ * it communicates with other nodes.
+ *
+ * @param port
+ * the port number you wish to use for incoming connections.
+ * Specifying 0 lets the system choose an available port.
+ *
+ * @param transportFactory
+ * the transport factory to use when creating connections.
+ *
+ * @exception IOException
+ * in case of server transport failure
+ */
+ public OtpSelf(final String node, final String cookie, final int port,
+ final OtpTransportFactory transportFactory) throws IOException {
+ super(node, cookie, transportFactory);
+
+ sock = createServerTransport(port);
if (port != 0) {
this.port = port;
@@ -179,7 +284,7 @@ public class OtpSelf extends OtpLocalNode {
* authorized to connect.
*/
public OtpConnection accept() throws IOException, OtpAuthException {
- Socket newsock = null;
+ OtpTransport newsock = null;
while (true) {
try {
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerSocketTransport.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerSocketTransport.java
new file mode 100644
index 0000000000..0e25b6bfb7
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerSocketTransport.java
@@ -0,0 +1,68 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+import java.net.Socket;
+
+/**
+ * Default socket-based server transport
+ *
+ * @author Dmitriy Kargapolov
+ */
+public class OtpServerSocketTransport implements OtpServerTransport {
+
+ /**
+ * Underlying server socket
+ */
+ private final ServerSocket socket;
+
+ /**
+ * @see ServerSocket#ServerSocket(int)
+ */
+ public OtpServerSocketTransport(final int port) throws IOException {
+ socket = new ServerSocket(port);
+ }
+
+ /**
+ * @see ServerSocket#getLocalPort()
+ */
+ public int getLocalPort() {
+ return socket.getLocalPort();
+ }
+
+ /**
+ * @see ServerSocket#accept()
+ */
+ public OtpTransport accept() throws IOException {
+ final Socket sock = socket.accept();
+ sock.setTcpNoDelay(true);
+ return new OtpSocketTransport(sock);
+ }
+
+ /**
+ * @see ServerSocket#close()
+ */
+ public void close() throws IOException {
+ socket.close();
+ }
+
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerTransport.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerTransport.java
new file mode 100644
index 0000000000..4d31380bee
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpServerTransport.java
@@ -0,0 +1,46 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.net.ServerSocket;
+
+/**
+ * Server-side connection-oriented transport interface.
+ *
+ * @author Dmitriy Kargapolov
+ */
+public interface OtpServerTransport {
+
+ /**
+ * @see ServerSocket#getLocalPort()
+ */
+ int getLocalPort();
+
+ /**
+ * @see ServerSocket#accept()
+ */
+ OtpTransport accept() throws IOException;
+
+ /**
+ * @see ServerSocket#close()
+ */
+ void close() throws IOException;
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransport.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransport.java
new file mode 100644
index 0000000000..f690ab59ed
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransport.java
@@ -0,0 +1,89 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.InetAddress;
+import java.net.Socket;
+import java.net.UnknownHostException;
+
+/**
+ * Default socket-based client transport
+ *
+ * @author Dmitriy Kargapolov
+ */
+public class OtpSocketTransport implements OtpTransport {
+
+ /**
+ * Underlying socket
+ */
+ private final Socket socket;
+
+ /**
+ * @see Socket#Socket(String, int)
+ */
+ public OtpSocketTransport(final String addr, final int port)
+ throws UnknownHostException, IOException {
+ socket = new Socket(addr, port);
+ socket.setTcpNoDelay(true);
+ }
+
+ /**
+ * @see Socket#Socket(InetAddress, int)
+ */
+ public OtpSocketTransport(final InetAddress addr, final int port)
+ throws UnknownHostException, IOException {
+ socket = new Socket(addr, port);
+ socket.setTcpNoDelay(true);
+ }
+
+ /**
+ * Socket wrapping constructor
+ *
+ * @param s
+ * socket to wrap
+ */
+ public OtpSocketTransport(final Socket s) {
+ socket = s;
+ }
+
+ /**
+ * @see Socket#getInputStream()
+ */
+ public InputStream getInputStream() throws IOException {
+ return socket.getInputStream();
+ }
+
+ /**
+ * @see Socket#getOutputStream()
+ */
+ public OutputStream getOutputStream() throws IOException {
+ return socket.getOutputStream();
+ }
+
+ /**
+ * @see Socket#close()
+ */
+ public void close() throws IOException {
+ socket.close();
+ }
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransportFactory.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransportFactory.java
new file mode 100644
index 0000000000..f6b5bfc86d
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpSocketTransportFactory.java
@@ -0,0 +1,56 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.net.InetAddress;
+
+/**
+ * Default socket-based transport factory
+ *
+ * @author Dmitriy Kargapolov
+ */
+public class OtpSocketTransportFactory implements OtpTransportFactory {
+
+ /**
+ * @see OtpTransportFactory#createTransport(String, int)
+ */
+ public OtpTransport createTransport(final String addr, final int port)
+ throws IOException {
+ return new OtpSocketTransport(addr, port);
+ }
+
+ /**
+ * @see OtpTransportFactory#createTransport(InetAddress, int)
+ */
+ public OtpTransport createTransport(final InetAddress addr, final int port)
+ throws IOException {
+ return new OtpSocketTransport(addr, port);
+ }
+
+ /**
+ * @see OtpTransportFactory#createServerTransport(int)
+ */
+ public OtpServerTransport createServerTransport(final int port)
+ throws IOException {
+ return new OtpServerSocketTransport(port);
+ }
+
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransport.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransport.java
new file mode 100644
index 0000000000..51c62d9ef0
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransport.java
@@ -0,0 +1,49 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.Socket;
+
+/**
+ * Client-side connection-oriented transport interface.
+ *
+ * @author Dmitriy Kargapolov
+ */
+public interface OtpTransport {
+
+ /**
+ * @see Socket#getInputStream()
+ */
+ public abstract InputStream getInputStream() throws IOException;
+
+ /**
+ * @see Socket#getOutputStream()
+ */
+ public abstract OutputStream getOutputStream() throws IOException;
+
+ /**
+ * @see Socket#close()
+ */
+ public abstract void close() throws IOException;
+
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransportFactory.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransportFactory.java
new file mode 100644
index 0000000000..bd404daea5
--- /dev/null
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpTransportFactory.java
@@ -0,0 +1,124 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+package com.ericsson.otp.erlang;
+
+import java.io.IOException;
+import java.net.InetAddress;
+
+/**
+ * Factory class used to create client- and server-side transport instances. One
+ * static instance of class implementing this interface is created when program
+ * loaded. Default implementation used is {@link OtpSocketTransportFactory}.
+ * JInterface user can specify custom transport factory implementing this
+ * interface in the following ways:
+ * <dl>
+ * <dt>defining static class as internal to class holding main() method</dt>
+ * <dd>In the systems, where main class can be retrieved with
+ * <code>System.getProperty("sun.java.command")</code>, user can define static
+ * class <b>OtpErlangSystemTuner</b> internal to the main class, providing at
+ * least one static method with the name <b>getOtpTransportFactory</b>, with no
+ * parameters, returning object of class implementing
+ * <b>OtpTransportFactory</b>, for example:
+ *
+ * <pre>
+ *
+ * public class MyMainClass {
+ *
+ * public static class OtpErlangSystemTuner {
+ * ...
+ * public static OtpTransportFactory getOtpTransportFactory() {
+ * return new MyTransportFactory();
+ * }
+ * }
+ *
+ * public static class MyTransportFactory implements OtpTransportFactory {
+ * ...
+ * }
+ *
+ * public static void main(String[] args) {
+ * ...
+ * }
+ * }
+ *
+ *
+ * </pre>
+ *
+ * </dd>
+ *
+ * <dt>specifying factory class in the system properties</dt>
+ * <dd>User-defined transport factory class may be specified via system property
+ * <b>OtpTransportFactory</b>, for example:
+ *
+ * <pre>
+ *
+ * package com.my.company;
+ *
+ * public static class MyTransportFactory implements OtpTransportFactory {
+ * ...
+ * }
+ * </pre>
+ *
+ * In such case program may be run with
+ * -DOtpTransportFactory=com.my.company.MyTransportFactory, or other way of
+ * setting system property <i>before execution of static initializers</i> may be
+ * used.</dd>
+ * </dl>
+ *
+ * @author Dmitriy Kargapolov
+ */
+public interface OtpTransportFactory {
+
+ /**
+ * Create instance of {@link OtpTransport}
+ *
+ * @param addr
+ * host name or IP address string
+ * @param port
+ * port number
+ * @return new socket object
+ * @throws IOException
+ */
+ public abstract OtpTransport createTransport(String addr, int port)
+ throws IOException;
+
+ /**
+ * Create instance of {@link OtpTransport}
+ *
+ * @param addr
+ * peer address
+ * @param port
+ * port number
+ * @return new socket object
+ * @throws IOException
+ */
+ public abstract OtpTransport createTransport(InetAddress addr, int port)
+ throws IOException;
+
+ /**
+ * Create instance of {@link OtpServerTransport}
+ *
+ * @param port
+ * port number to listen on
+ * @return new socket object
+ * @throws IOException
+ */
+ public OtpServerTransport createServerTransport(int port)
+ throws IOException;
+}
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/java_files b/lib/jinterface/java_src/com/ericsson/otp/erlang/java_files
index 62fa7f990e..a0f19bc1aa 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/java_files
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/java_files
@@ -53,7 +53,13 @@ COMM = \
OtpOutputStream \
OtpPeer \
OtpSelf \
- OtpServer
+ OtpServer \
+ OtpServerSocketTransport \
+ OtpServerTransport \
+ OtpSocketTransport \
+ OtpSocketTransportFactory \
+ OtpTransport \
+ OtpTransportFactory
ERL = \
OtpErlangAtom \
diff --git a/lib/jinterface/test/jinterface_SUITE.erl b/lib/jinterface/test/jinterface_SUITE.erl
index 00abc97ff5..73bab98559 100644
--- a/lib/jinterface/test/jinterface_SUITE.erl
+++ b/lib/jinterface/test/jinterface_SUITE.erl
@@ -22,7 +22,8 @@
init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2]).
--export([nodename/1, register_and_whereis/1, get_names/1, boolean_atom/1,
+-export([transport_factory/1,
+ nodename/1, register_and_whereis/1, get_names/1, boolean_atom/1,
node_ping/1, mbox_ping/1,
java_erlang_send_receive/1,
java_internal_send_receive_same_node/1,
@@ -39,7 +40,8 @@
status_handler_localStatus/1, status_handler_remoteStatus/1,
status_handler_connAttempt/1,
maps/1,
- fun_equals/1
+ fun_equals/1,
+ core_match_bind/1
]).
-include_lib("common_test/include/ct.hrl").
@@ -103,12 +105,14 @@ end_per_group(_GroupName, Config) ->
fundamental() ->
[
+ transport_factory, % TransportFactoryTest.java
nodename, % Nodename.java
register_and_whereis, % RegisterAndWhereis.java
get_names, % GetNames.java
boolean_atom, % BooleanAtom.java
maps, % Maps.java
- fun_equals % FunEquals.java
+ fun_equals, % FunEquals.java
+ core_match_bind % CoreMatchBind.java
].
ping() ->
@@ -201,6 +205,16 @@ end_per_testcase(_Case,Config) ->
%%%-----------------------------------------------------------------
%%% TEST CASES
%%%-----------------------------------------------------------------
+transport_factory(doc) ->
+ ["TransportFactoryTest.java: Test custom OTP Transport Factory"];
+transport_factory(suite) ->
+ [];
+transport_factory(Config) when is_list(Config) ->
+ ok = jitu:java(?config(java, Config),
+ ?config(data_dir, Config),
+ "TransportFactoryTest").
+
+%%%-----------------------------------------------------------------
nodename(doc) ->
["Nodename.java: "
"Test OtpNode.node(), OtpNode.alive() and OtpNode.host()"];
@@ -705,6 +719,18 @@ fun_equals(Config) when is_list(Config) ->
[]).
%%%-----------------------------------------------------------------
+core_match_bind(doc) ->
+ ["CoreMatchBind.java: "
+ "Test OtpErlangObject.match() and bind()"];
+core_match_bind(suite) ->
+ [];
+core_match_bind(Config) when is_list(Config) ->
+ ok = jitu:java(?config(java, Config),
+ ?config(data_dir, Config),
+ "CoreMatchBind",
+ []).
+
+%%%-----------------------------------------------------------------
%%% INTERNAL FUNCTIONS
%%%-----------------------------------------------------------------
send_receive(TestCaseTag,Fun,Config) ->
diff --git a/lib/jinterface/test/jinterface_SUITE_data/CoreMatchBind.java b/lib/jinterface/test/jinterface_SUITE_data/CoreMatchBind.java
new file mode 100644
index 0000000000..a78a63093e
--- /dev/null
+++ b/lib/jinterface/test/jinterface_SUITE_data/CoreMatchBind.java
@@ -0,0 +1,584 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2000-2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+import com.ericsson.otp.erlang.OtpErlangException;
+import com.ericsson.otp.erlang.OtpErlangInt;
+import com.ericsson.otp.erlang.OtpErlangList;
+import com.ericsson.otp.erlang.OtpErlangMap;
+import com.ericsson.otp.erlang.OtpErlangObject;
+import com.ericsson.otp.erlang.OtpErlangTuple;
+import com.ericsson.otp.erlang.OtpOutputStream;
+
+public class CoreMatchBind {
+
+ @SuppressWarnings("serial")
+ private static class DumbObject extends OtpErlangObject {
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName();
+ }
+
+ @Override
+ public void encode(final OtpOutputStream buf) {
+ fail("unexpected encode() call");
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ fail("unexpected equals() call");
+ return false;
+ }
+
+ }
+
+ @SuppressWarnings("serial")
+ private static class BoundObject extends OtpErlangObject {
+
+ @Override
+ public String toString() {
+ return this.getClass().getSimpleName();
+ }
+
+ @Override
+ public void encode(final OtpOutputStream buf) {
+ fail("unexpected encode() call");
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ fail("unexpected equals() call");
+ return false;
+ }
+
+ }
+
+ @SuppressWarnings("serial")
+ private static class TestObject extends OtpErlangObject {
+
+ private final Binder binder;
+ private DumbObject dumb;
+ private boolean flag;
+ private BoundObject obj;
+
+ public TestObject(final boolean flag, final Binder binder,
+ final DumbObject dumb) {
+ this.flag = flag;
+ this.binder = binder;
+ this.dumb = dumb;
+ }
+
+ public TestObject(final Binder binder, final BoundObject obj) {
+ this.binder = binder;
+ this.obj = obj;
+ }
+
+ public DumbObject getDumb() {
+ return dumb;
+ }
+
+ @Override
+ public String toString() {
+ return flag ? "T" : "F";
+ }
+
+ @Override
+ public void encode(final OtpOutputStream buf) {
+ fail("unexpected encode() call");
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ if (obj == null) {
+ fail("unexpected equals() call");
+ }
+ return o == obj;
+ }
+
+ @Override
+ public <T> boolean match(final OtpErlangObject term, final T binds) {
+ if (binds != binder) {
+ fail("invalid binder");
+ }
+ if (term != dumb) {
+ fail("invalid object");
+ }
+ return flag;
+ }
+
+ @Override
+ public <T> OtpErlangObject bind(final T binds)
+ throws OtpErlangException {
+ if (binds != binder) {
+ fail("invalid binder");
+ }
+ return obj;
+ }
+
+ }
+
+ /*
+ * "always matched" object
+ */
+ @SuppressWarnings("serial")
+ private static class Any extends OtpErlangObject {
+
+ @Override
+ public String toString() {
+ return "any";
+ }
+
+ @Override
+ public void encode(final OtpOutputStream buf) {
+ fail("unexpected encode() call");
+ }
+
+ @Override
+ public boolean equals(final Object o) {
+ fail("unexpected equals() call");
+ return false;
+ }
+
+ @Override
+ public <T> boolean match(final OtpErlangObject term, final T binds) {
+ return true;
+ }
+ }
+
+ private static class Binder {
+ // make object pair for match() testing
+ TestObject makeTest(final boolean flag) {
+ return new TestObject(flag, this, new DumbObject());
+ }
+
+ // make object pair for bind() testing
+ TestObject makeTest() {
+ return new TestObject(this, new BoundObject());
+ }
+ }
+
+ private static void isNotNull(final Object o) throws Exception {
+ if (o == null) {
+ throw new Exception("not null expected");
+ }
+ }
+
+ private static void fail(final String string) {
+ System.err.println(string);
+ new Throwable().printStackTrace(System.err);
+ System.exit(1);
+ }
+
+ private static void isT(final boolean b) throws Exception {
+ if (!b) {
+ throw new Exception("true expected");
+ }
+ }
+
+ private static void isF(final boolean b) throws Exception {
+ if (b) {
+ throw new Exception("false expected");
+ }
+ }
+
+ private static void equals(final OtpErlangObject a, final OtpErlangObject b)
+ throws Exception {
+ if (!a.equals(b)) {
+ throw new Exception(a + " != " + b);
+ }
+ }
+
+ /*
+ * scalar match test - match particular test object (producing given result)
+ * against particular dumb object passing particular bindings object; ensure
+ * all participants are used as expected in match behavior, check result.
+ */
+ private static void scalar_match_test() throws Exception {
+ final Binder bind = new Binder();
+
+ final TestObject t = bind.makeTest(true);
+ isT(t.match(t.getDumb(), bind));
+
+ final TestObject f = bind.makeTest(false);
+ isF(f.match(f.getDumb(), bind));
+ }
+
+ /*
+ * scalar bind test - ensure right object generated based on bindings
+ */
+ private static void scalar_bind_test() throws Exception {
+ final Binder bind = new Binder();
+ final TestObject t = bind.makeTest();
+ final OtpErlangObject o = t.bind(bind);
+ isNotNull(o);
+ equals(t, o);
+ }
+
+ /*
+ * used by tuple_arity_match_test()
+ */
+ private static OtpErlangObject mkTuplePattern(final int n) {
+ final Any a[] = new Any[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = new Any();
+ }
+ return new OtpErlangTuple(a);
+ }
+
+ /*
+ * used by tuple_arity_match_test()
+ */
+ private static OtpErlangObject mkTupleObject(final int n) {
+ final DumbObject a[] = new DumbObject[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = new DumbObject();
+ }
+ return new OtpErlangTuple(a);
+ }
+
+ /*
+ * ensure only tuples of the same arity can match
+ */
+ private static void tuple_arity_match_test(final int m, final int n)
+ throws Exception {
+ final Binder bind = new Binder();
+ for (int i = m; i < n; i++) {
+ for (int j = m; j < n; j++) {
+ final OtpErlangObject p = mkTuplePattern(i);
+ final OtpErlangObject o = mkTupleObject(j);
+ if (i == j) {
+ isT(p.match(o, bind));
+ } else {
+ isF(p.match(o, bind));
+ }
+ }
+ }
+ }
+
+ /*
+ * tuple match test - ensure elements of tuple are matched to corresponding
+ * elements of tested object and result is logical "and" over all elements.
+ */
+ private static void tuple_match_test(final int n) throws Exception {
+ final Binder bind = new Binder();
+ final int max = 1 << n;
+ final TestObject a[] = new TestObject[n];
+ final DumbObject d[] = new DumbObject[n];
+ for (int k = 0; k < max; k++) {
+ for (int m = 1, i = 0; m < max; m = m << 1, i++) {
+ d[i] = new DumbObject();
+ a[i] = new TestObject((k & m) != 0, bind, d[i]);
+ }
+ final OtpErlangObject tpl = new OtpErlangTuple(a);
+ final OtpErlangObject obj = new OtpErlangTuple(d);
+ if (k + 1 < max) {
+ isF(tpl.match(obj, bind));
+ } else {
+ isT(tpl.match(obj, bind));
+ }
+ }
+ }
+
+ /*
+ * tuple bind test - ensure result is a tuple where each element is a result
+ * of binding of corresponding pattern element using provided bindings.
+ */
+ private static void tuple_bind_test(final int n) throws Exception {
+ final Binder bind = new Binder();
+ final TestObject a[] = new TestObject[n];
+ final OtpErlangObject b[] = new OtpErlangObject[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = bind.makeTest();
+ b[i] = a[i].obj;
+ }
+ final OtpErlangObject t = new OtpErlangTuple(a);
+ final OtpErlangObject o = t.bind(bind);
+ isNotNull(o);
+ equals(t, o);
+ }
+
+ private static OtpErlangObject mkListPattern(final int n, final boolean tail)
+ throws OtpErlangException {
+ final Any a[] = new Any[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = new Any();
+ }
+ return tail ? new OtpErlangList(a, new Any()) : new OtpErlangList(a);
+ }
+
+ private static OtpErlangObject mkListObject(final int n, final boolean tail)
+ throws OtpErlangException {
+ final DumbObject a[] = new DumbObject[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = new DumbObject();
+ }
+ return tail ? new OtpErlangList(a, new DumbObject())
+ : new OtpErlangList(a);
+ }
+
+ /*
+ * ensure only lists of the same arity and same tail presence can match
+ */
+ private static void list_arity_match_test(final int m, final int n)
+ throws Exception {
+ final Binder bind = new Binder();
+ for (int i = m; i < n; i++) {
+ for (int j = m; j < n; j++) {
+ for (int k = 0; k < 2; k++) {
+ if (i == 0 && k == 1) {
+ continue;
+ }
+ for (int l = 0; l < 2; l++) {
+ if (j == 0 && l == 1) {
+ continue;
+ }
+ final OtpErlangObject p = mkListPattern(i, k == 1);
+ final OtpErlangObject o = mkListObject(j, l == 1);
+ if (i == j && k == l || k == 1 && i <= j) {
+ isT(p.match(o, bind));
+ } else {
+ isF(p.match(o, bind));
+ }
+ }
+ }
+ }
+ }
+ }
+
+ /*
+ * lists match test - ensure elements of lists are matched to corresponding
+ * elements of tested object and result is logical "and" over all elements,
+ * count tails as well
+ */
+ private static void list_match_test(final int n) throws Exception {
+ final Binder bind = new Binder();
+ final int max = 1 << n;
+ final TestObject a[] = new TestObject[n];
+ final DumbObject d[] = new DumbObject[n];
+ final DumbObject e[] = new DumbObject[n + 1];
+ for (int k = 0; k < max; k++) {
+ for (int m = 1, i = 0; m < max; m = m << 1, i++) {
+ d[i] = new DumbObject();
+ e[i] = d[i];
+ a[i] = new TestObject((k & m) != 0, bind, d[i]);
+ }
+ for (int i = n; i < n + 1; i++) {
+ e[i] = new DumbObject();
+ }
+ final OtpErlangObject lst = new OtpErlangList(a);
+ final OtpErlangObject obj = new OtpErlangList(d);
+ final OtpErlangObject ext = new OtpErlangList(e);
+ final OtpErlangObject eTl = new OtpErlangList(e, new DumbObject());
+
+ if (n > 0) {
+ final DumbObject dTail = new DumbObject();
+ final TestObject tTail = new TestObject(true, bind, dTail);
+ final TestObject fTail = new TestObject(false, bind, dTail);
+ final OtpErlangObject fTailLst = new OtpErlangList(a, fTail);
+ final OtpErlangObject tTailLst = new OtpErlangList(a, tTail);
+ final OtpErlangObject tailObj = new OtpErlangList(d, dTail);
+
+ // match lists with non-matching tails is always false
+ isF(fTailLst.match(tailObj, bind));
+
+ // match list with no tail to list with tail is always false
+ isF(lst.match(tailObj, bind));
+
+ // matching lists with matching tails
+ if (k + 1 < max) {
+ isF(tTailLst.match(tailObj, bind));
+ } else {
+ isT(tTailLst.match(tailObj, bind));
+ }
+
+ // matching shorter pattern with last tail to longer list
+ // with or with no extra tail; matching list pattern
+ // with last tail to same length list with no tail.
+ final Any aTail = new Any();
+ final OtpErlangObject shortLst = new OtpErlangList(a, aTail);
+ if (k + 1 < max) {
+ isF(shortLst.match(obj, bind)); // same arity
+ isF(shortLst.match(ext, bind)); // pattern arity is less
+ isF(shortLst.match(eTl, bind)); //
+ } else {
+ isT(shortLst.match(obj, bind)); // same arity
+ isT(shortLst.match(ext, bind)); // pattern arity is less
+ isT(shortLst.match(eTl, bind)); //
+ }
+ }
+
+ // matching lists with no tails
+ if (k + 1 < max) {
+ isF(lst.match(obj, bind));
+ } else {
+ isT(lst.match(obj, bind));
+ }
+
+ // extra-length object, no tail in "pattern"
+ isF(lst.match(ext, bind));
+ }
+ }
+
+ /*
+ * list bind test - ensure result is a list where each element is a result
+ * of binding of corresponding pattern element using provided bindings.
+ */
+ private static void list_bind_test(final int n) throws Exception {
+ final Binder bind = new Binder();
+ final TestObject a[] = new TestObject[n];
+ final OtpErlangObject b[] = new OtpErlangObject[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = bind.makeTest();
+ b[i] = a[i].obj;
+ }
+ OtpErlangObject t = new OtpErlangList(a);
+ OtpErlangObject o = t.bind(bind);
+ isNotNull(o);
+ equals(t, o);
+ if (n > 0) {
+ // improper list case
+ t = new OtpErlangList(a, bind.makeTest());
+ o = t.bind(bind);
+ isNotNull(o);
+ equals(t, o);
+ }
+ }
+
+ /*
+ * map match test - object may have more keys than pattern
+ */
+ private static void map_match_test(final int m, final int n)
+ throws Exception {
+ final Binder bind = new Binder();
+
+ // pattern side - m elements
+ final OtpErlangObject k1[] = new OtpErlangObject[m];
+ final TestObject a[] = new TestObject[m];
+
+ // object side - n elements
+ final OtpErlangObject k2[] = new OtpErlangObject[n];
+ final DumbObject d[] = new DumbObject[n];
+
+ final int max = Math.max(m, n);
+ final int mskHi = 1 << max;
+ final int full = (1 << m) - 1;
+ for (int k = 0; k < mskHi; k++) {
+ for (int msk = 1, i = 0; msk < mskHi; msk = msk << 1, i++) {
+ if (i < n) {
+ k2[i] = new OtpErlangInt(i);
+ d[i] = new DumbObject();
+ }
+ if (i < m) {
+ k1[i] = new OtpErlangInt(i);
+ a[i] = new TestObject((k & msk) != 0, bind, i < n ? d[i]
+ : new DumbObject());
+ }
+ }
+ final OtpErlangObject map = new OtpErlangMap(k1, a); // m items
+ final OtpErlangObject obj = new OtpErlangMap(k2, d); // n items
+ if ((k & full) == full && m <= n) {
+ isT(map.match(obj, bind));
+ } else {
+ isF(map.match(obj, bind));
+ }
+ }
+ }
+
+ /*
+ * map bind test - ensure result is a map where each element is a result of
+ * binding of corresponding pattern element using provided bindings.
+ */
+ private static void map_bind_test(final int n) throws Exception {
+ final Binder bind = new Binder();
+ final TestObject a[] = new TestObject[n];
+ final OtpErlangObject b[] = new OtpErlangObject[n];
+ final OtpErlangObject k[] = new OtpErlangObject[n];
+ for (int i = 0; i < n; i++) {
+ a[i] = bind.makeTest();
+ b[i] = a[i].obj;
+ k[i] = new OtpErlangInt(i);
+ }
+ final OtpErlangObject t = new OtpErlangMap(k, a);
+ final OtpErlangObject o = t.bind(bind);
+ isNotNull(o);
+ equals(t, o);
+ }
+
+ public static void main(final String[] args) {
+ try {
+ scalar_match_test();
+ System.out.println("scalar_match_test() passed");
+
+ scalar_bind_test();
+ System.out.println("scalar_bind_test() passed");
+
+ for (int m = 0; m < 16; m++) {
+ for (int n = 0; n < 16; n++) {
+ tuple_arity_match_test(m, n);
+ }
+ }
+ System.out.println("tuple_arity_match_test() passed");
+
+ for (int n = 0; n < 16; n++) {
+ tuple_match_test(n);
+ }
+ System.out.println("tuple_match_test() passed");
+
+ for (int n = 0; n < 16; n++) {
+ tuple_bind_test(n);
+ }
+ System.out.println("tuple_bind_test() passed");
+
+ for (int m = 0; m < 16; m++) {
+ for (int n = 0; n < 16; n++) {
+ list_arity_match_test(m, n);
+ }
+ }
+ System.out.println("list_arity_match_test() passed");
+
+ for (int n = 0; n < 16; n++) {
+ list_match_test(n);
+ }
+ System.out.println("list_match_test() passed");
+
+ for (int n = 0; n < 16; n++) {
+ list_bind_test(n);
+ }
+ System.out.println("list_bind_test() passed");
+
+ for (int m = 0; m < 12; m++) {
+ for (int n = 0; n < 12; n++) {
+ map_match_test(m, n);
+ }
+ }
+ System.out.println("map_match_test() passed");
+
+ for (int n = 0; n < 16; n++) {
+ map_bind_test(n);
+ }
+ System.out.println("map_bind_test() passed");
+
+ } catch (final Exception e) {
+ e.printStackTrace();
+ System.exit(1);
+ }
+
+ System.out.println("ok");
+ }
+}
diff --git a/lib/jinterface/test/jinterface_SUITE_data/Makefile.src b/lib/jinterface/test/jinterface_SUITE_data/Makefile.src
index cd68f1ead5..a4a69000c6 100644
--- a/lib/jinterface/test/jinterface_SUITE_data/Makefile.src
+++ b/lib/jinterface/test/jinterface_SUITE_data/Makefile.src
@@ -38,6 +38,7 @@ JINTERFACE_CLASSPATH = @jinterface_classpath@
CLASSPATH = .@PS@$(JINTERFACE_CLASSPATH)@PS@
JAVA_FILES = \
+ TransportFactoryTest.java \
Nodename.java \
RegisterAndWhereis.java \
GetNames.java \
@@ -48,7 +49,8 @@ JAVA_FILES = \
MboxLinkUnlink.java \
NodeStatusHandler.java \
Maps.java \
- FunEquals.java
+ FunEquals.java \
+ CoreMatchBind.java
CLASS_FILES = $(JAVA_FILES:.java=.class)
diff --git a/lib/jinterface/test/jinterface_SUITE_data/TransportFactoryTest.java b/lib/jinterface/test/jinterface_SUITE_data/TransportFactoryTest.java
new file mode 100644
index 0000000000..367e28a512
--- /dev/null
+++ b/lib/jinterface/test/jinterface_SUITE_data/TransportFactoryTest.java
@@ -0,0 +1,90 @@
+/*
+ * %CopyrightBegin%
+ *
+ * Copyright Ericsson AB 2015. All Rights Reserved.
+ *
+ * The contents of this file are subject to the Erlang Public License,
+ * Version 1.1, (the "License"); you may not use this file except in
+ * compliance with the License. You should have received a copy of the
+ * Erlang Public License along with this software. If not, it can be
+ * retrieved online at http://www.erlang.org/.
+ *
+ * Software distributed under the License is distributed on an "AS IS"
+ * basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ * the License for the specific language governing rights and limitations
+ * under the License.
+ *
+ * %CopyrightEnd%
+ */
+
+import java.io.IOException;
+import java.net.InetAddress;
+
+import com.ericsson.otp.erlang.OtpSelf;
+import com.ericsson.otp.erlang.OtpServerTransport;
+import com.ericsson.otp.erlang.OtpSocketTransportFactory;
+import com.ericsson.otp.erlang.OtpTransport;
+import com.ericsson.otp.erlang.OtpTransportFactory;
+
+/**
+ * @author Dmitriy Kargapolov
+ */
+public class TransportFactoryTest {
+
+ /**
+ * example of custom transport factory wrapping default one
+ */
+ public static class TransportFactory implements OtpTransportFactory {
+
+ OtpSocketTransportFactory tf = new OtpSocketTransportFactory();
+
+ public OtpTransport createTransport(final String addr, final int port)
+ throws IOException {
+ clientOk = true;
+ System.out.println("creating transport to " + addr + ", " + port);
+ return tf.createTransport(addr, port);
+ }
+
+ public OtpTransport createTransport(final InetAddress addr,
+ final int port) throws IOException {
+ clientOk = true;
+ System.out.println("creating transport to " + addr + ", " + port);
+ return tf.createTransport(addr, port);
+ }
+
+ public OtpServerTransport createServerTransport(final int port)
+ throws IOException {
+ serverOk = true;
+ System.out.println("creating server transport to " + port);
+ return tf.createServerTransport(port);
+ }
+
+ }
+
+ static boolean serverOk = false;
+ static boolean clientOk = false;
+
+ public static void main(final String[] args) throws IOException {
+
+ // check server transport
+ final OtpSelf self = new OtpSelf("local", new TransportFactory());
+ if (!serverOk) {
+ fail("custom server transport was not created");
+ }
+ System.out.println("accepting connections on " + self.port());
+
+ // check client transport
+ try {
+ self.publishPort();
+ } catch (final Exception e) {
+ }
+ if (!clientOk) {
+ fail("custom client transport was not created");
+ }
+ }
+
+ private static void fail(final String string) {
+ System.err.println(string);
+ System.exit(1);
+ }
+}
diff --git a/lib/jinterface/test/jitu.erl b/lib/jinterface/test/jitu.erl
index 46b8cb3ac2..8097237af6 100644
--- a/lib/jinterface/test/jitu.erl
+++ b/lib/jinterface/test/jitu.erl
@@ -117,10 +117,7 @@ classpath(Dir) ->
end,
es(Dir++PS++
filename:join([code:lib_dir(jinterface),"priv","OtpErlang.jar"])++PS++
- case os:getenv("CLASSPATH") of
- false -> "";
- Classpath -> Classpath
- end,
+ os:getenv("CLASSPATH", ""),
Quote,
EscSpace).
diff --git a/lib/kernel/doc/src/error_logger.xml b/lib/kernel/doc/src/error_logger.xml
index df2f0b01ee..f49d63b5a6 100644
--- a/lib/kernel/doc/src/error_logger.xml
+++ b/lib/kernel/doc/src/error_logger.xml
@@ -58,12 +58,11 @@
specific events. (<c>add_report_handler/1,2</c>). Also, there is
a useful event handler in STDLIB for multi-file logging of events,
see <c>log_mf_h(3)</c>.</p>
- <p>Warning events were introduced in Erlang/OTP R9C. To retain
- backwards compatibility, these are by default tagged as errors,
- thus showing up as error reports in the logs. By using
- the command line flag <c><![CDATA[+W <w | i>]]></c>, they can instead
- be tagged as warnings or info. Tagging them as warnings may
- require rewriting existing user defined event handlers.</p>
+ <p>Warning events were introduced in Erlang/OTP R9C and are enabled
+ by default as of 18.0. To retain backwards compatibility with existing
+ user defined event handlers, these may be tagged as errors or info
+ using the command line flag <c><![CDATA[+W <e | i | w>]]></c>, thus
+ showing up as error or info reports in the logs.</p>
</description>
<datatypes>
<datatype>
@@ -132,7 +131,7 @@ ok</pre>
<desc>
<p>Returns the current mapping for warning events. Events sent
using <c>warning_msg/1,2</c> or <c>warning_report/1,2</c>
- are tagged as errors (default), warnings or info, depending
+ are tagged as errors, warnings (default) or info, depending
on the value of the command line flag <c>+W</c>.</p>
<pre>
os$ <input>erl</input>
@@ -140,25 +139,25 @@ Erlang (BEAM) emulator version 5.4.8 [hipe] [threads:0] [kernel-poll]
Eshell V5.4.8 (abort with ^G)
1> <input>error_logger:warning_map().</input>
-error
-2> <input>error_logger:warning_msg("Warnings tagged as: ~p~n", [error]).</input>
+warning
+2> <input>error_logger:warning_msg("Warnings tagged as: ~p~n", [warning]).</input>
-=ERROR REPORT==== 11-Aug-2005::15:31:23 ===
-Warnings tagged as: error
+=WARNING REPORT==== 11-Aug-2005::15:31:55 ===
+Warnings tagged as: warning
ok
3>
User switch command
--> q
-os$ <input>erl +W w</input>
+os$ <input>erl +W e</input>
Erlang (BEAM) emulator version 5.4.8 [hipe] [threads:0] [kernel-poll]
Eshell V5.4.8 (abort with ^G)
1> <input>error_logger:warning_map().</input>
-warning
-2> <input>error_logger:warning_msg("Warnings tagged as: ~p~n", [warning]).</input>
+error
+2> <input>error_logger:warning_msg("Warnings tagged as: ~p~n", [error]).</input>
-=WARNING REPORT==== 11-Aug-2005::15:31:55 ===
-Warnings tagged as: warning
+=ERROR REPORT==== 11-Aug-2005::15:31:23 ===
+Warnings tagged as: error
ok</pre>
</desc>
</func>
diff --git a/lib/kernel/doc/src/gen_sctp.xml b/lib/kernel/doc/src/gen_sctp.xml
index dc9e4766a9..ee8cd441d4 100644
--- a/lib/kernel/doc/src/gen_sctp.xml
+++ b/lib/kernel/doc/src/gen_sctp.xml
@@ -961,7 +961,7 @@
<pre> #sctp_paddrinfo{
assoc_id = assoc_id(),
address = {IP, Port},
- state = inactive | active,
+ state = inactive | active | unconfirmed,
cwnd = integer(),
srtt = integer(),
rto = integer(),
diff --git a/lib/kernel/doc/src/gen_tcp.xml b/lib/kernel/doc/src/gen_tcp.xml
index 820ecd1e30..71ef5cd48f 100644
--- a/lib/kernel/doc/src/gen_tcp.xml
+++ b/lib/kernel/doc/src/gen_tcp.xml
@@ -347,11 +347,22 @@ do_recv(Sock, Bs) ->
</func>
<func>
<name name="shutdown" arity="2"/>
- <fsummary>Immediately close a socket</fsummary>
+ <fsummary>Asynchronously close a socket</fsummary>
<desc>
- <p>Immediately close a socket in one or two directions.</p>
+ <p>Close a socket in one or two directions.</p>
<p><c><anno>How</anno> == write</c> means closing the socket for writing,
reading from it is still possible.</p>
+ <p>If <c><anno>How</anno> == read</c>, or there is no outgoing
+ data buffered in the <c><anno>Socket</anno></c> port,
+ then the socket is shutdown immediately and any error encountered
+ is returned in <c><anno>Reason</anno></c>.</p>
+ <p>If there is data buffered in the socket port, then the attempt
+ to shutdown the socket is postponed until that data is written to the
+ kernel socket send buffer. Any errors encountered will result
+ in the socket being closed and <c>{error, closed}</c> being returned
+ on the next
+ <seealso marker="gen_tcp#recv/2">recv/2</seealso> or
+ <seealso marker="gen_tcp#send/2">send/2</seealso>.</p>
<p>To be able to handle that the peer has done a shutdown on
the write side, the <c>{exit_on_close, false}</c> option
is useful.</p>
diff --git a/lib/kernel/doc/src/heart.xml b/lib/kernel/doc/src/heart.xml
index 3ec33d2f18..a424d2978e 100644
--- a/lib/kernel/doc/src/heart.xml
+++ b/lib/kernel/doc/src/heart.xml
@@ -78,6 +78,16 @@
<pre>
% <input>erl -heart -env ERL_CRASH_DUMP_SECONDS 10 ...</input></pre>
+
+ <p> If a regular core dump is wanted, let heart know by setting the kill signal to abort
+ using the environment variable <c><![CDATA[HEART_KILL_SIGNAL=SIGABRT]]></c>.
+ If unset, or not set to <c><![CDATA[SIGABRT]]></c>, the default behaviour will be a kill
+ signal using <c><![CDATA[SIGKILL]]></c>.
+ </p>
+
+ <pre>
+% <input>erl -heart -env HEART_KILL_SIGNAL SIGABRT ...</input></pre>
+
<p>
Furthermore, <c><![CDATA[ERL_CRASH_DUMP_SECONDS]]></c> has the following behaviour on
<c>heart</c>:
diff --git a/lib/kernel/doc/src/inet.xml b/lib/kernel/doc/src/inet.xml
index 8dd311e5cd..77a8caaaf6 100644
--- a/lib/kernel/doc/src/inet.xml
+++ b/lib/kernel/doc/src/inet.xml
@@ -332,23 +332,23 @@ fe80::204:acff:fe17:bf38
<taglist>
<tag><c>recv_avg</c></tag>
<item>
- <p>Average size of packets in bytes received to the socket.</p>
+ <p>Average size of packets in bytes received by the socket.</p>
</item>
<tag><c>recv_cnt</c></tag>
<item>
- <p>Number of packets received to the socket.</p>
+ <p>Number of packets received by the socket.</p>
</item>
<tag><c>recv_dvi</c></tag>
<item>
- <p>Average packet size deviation in bytes received to the socket.</p>
+ <p>Average packet size deviation in bytes received by the socket.</p>
</item>
<tag><c>recv_max</c></tag>
<item>
- <p>The size of the largest packet in bytes received to the socket.</p>
+ <p>The size of the largest packet in bytes received by the socket.</p>
</item>
<tag><c>recv_oct</c></tag>
<item>
- <p>Number of bytes received to the socket.</p>
+ <p>Number of bytes received by the socket.</p>
</item>
<tag><c>send_avg</c></tag>
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index 00c6bc33d6..96e3651140 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -4,7 +4,7 @@
<appref>
<header>
<copyright>
- <year>1996</year><year>2014</year>
+ <year>1996</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -188,6 +188,18 @@
<p>Define the <c>First..Last</c> port range for the listener
socket of a distributed Erlang node.</p>
</item>
+ <tag><c>{inet_dist_listen_options, Opts}</c></tag>
+ <item>
+ <p>Define a list of extra socket options to be used when opening the
+ listening socket for a distributed Erlang node.
+ See <seealso marker="gen_tcp#listen/2">gen_tcp:listen/2</seealso></p>
+ </item>
+ <tag><c>{inet_dist_connect_options, Opts}</c></tag>
+ <item>
+ <p>Define a list of extra socket options to be used when connecting to
+ other distributed Erlang nodes.
+ See <seealso marker="gen_tcp#connect/4">gen_tcp:connect/4</seealso></p>
+ </item>
<tag><c>inet_parse_error_log = silent</c></tag>
<item>
<p>If this configuration parameter is set, no
diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml
index 1ef106e17a..6f7f18a8e7 100644
--- a/lib/kernel/doc/src/notes.xml
+++ b/lib/kernel/doc/src/notes.xml
@@ -30,6 +30,54 @@
</header>
<p>This document describes the changes made to the Kernel application.</p>
+<section><title>Kernel 3.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ A bug causing an infinite loop in hostname resolving has
+ been corrected. To trigger this bug you would have to
+ enter an bogus search method from a configuration file
+ e.g .inetrc.</p>
+ <p>
+ Bug pinpointed by Emil Holmström</p>
+ <p>
+ Own Id: OTP-12133</p>
+ </item>
+ <item>
+ <p>
+ The standard_error process now handles the getopts I/O
+ protocol request correctly and stores its encoding in the
+ same way as standard_io.</p>
+ <p>
+ Also, io:put_chars(standard_error, [oops]) could
+ previously crash the standard_error process. This is now
+ corrected.</p>
+ <p>
+ Own Id: OTP-12424</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Configuration parameters for the Kernel application that
+ allows setting socket options for the distribution
+ sockets have been added. See the application Kernel
+ documentation; parameters 'inet_dist_listen_options' and
+ 'inet_dist_connect_options'.</p>
+ <p>
+ Own Id: OTP-12476 Aux Id: OTP-12476 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Kernel 3.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml
index 8b85f24455..b9dbede0d3 100644
--- a/lib/kernel/doc/src/os.xml
+++ b/lib/kernel/doc/src/os.xml
@@ -142,14 +142,49 @@ DirOut = os:cmd("dir"), % on Win32 platform</code>
</desc>
</func>
<func>
+ <name name="system_time" arity="0"/>
+ <fsummary>Current OS system time</fsummary>
+ <desc>
+ <p>Returns current
+ <seealso marker="erts:time_correction#OS_System_Time">OS system time</seealso>
+ in <c>native</c>
+ <seealso marker="erts:erlang#type_time_unit">time unit</seealso>.</p>
+
+ <note><p>This time is <em>not</em> a monotonically increasing time.</p></note>
+ </desc>
+ </func>
+ <func>
+ <name name="system_time" arity="1"/>
+ <fsummary>Current OS system time</fsummary>
+ <desc>
+ <p>Returns current
+ <seealso marker="erts:time_correction#OS_System_Time">OS system time</seealso>
+ converted into the <c><anno>Unit</anno></c> passed as argument.</p>
+
+ <p>Calling <c>os:system_time(<anno>Unit</anno>)</c> is equivalent to:
+ <seealso marker="erts:erlang#convert_time_unit/3"><c>erlang:convert_time_unit</c></seealso><c>(</c><seealso marker="#system_time/0"><c>os:system_time()</c></seealso><c>,
+ native, <anno>Unit</anno>)</c>.</p>
+
+ <note><p>This time is <em>not</em> a monotonically increasing time.</p></note>
+ </desc>
+ </func>
+ <func>
<name name="timestamp" arity="0"/>
<type_desc variable="Timestamp">Timestamp = {MegaSecs, Secs, MicroSecs}</type_desc>
- <fsummary>Returna a timestamp from the OS in the erlang:now/0 format</fsummary>
+ <fsummary>Current OS system time on the erlang:timestamp/0 format</fsummary>
<desc>
- <p>Returns a tuple in the same format as <seealso marker="erts:erlang#now/0">erlang:now/0</seealso>. The difference is that this function returns what the operating system thinks (a.k.a. the wall clock time) without any attempts at time correction. The result of two different calls to this function is <em>not</em> guaranteed to be different.</p>
- <p>The most obvious use for this function is logging. The tuple can be used together with the function <seealso marker="stdlib:calendar#now_to_universal_time/1">calendar:now_to_universal_time/1</seealso>
-or <seealso marker="stdlib:calendar#now_to_local_time/1">calendar:now_to_local_time/1</seealso> to get calendar time. Using the calendar time together with the <c>MicroSecs</c> part of the return tuple from this function allows you to log timestamps in high resolution and consistent with the time in the rest of the operating system.</p>
- <p>Example of code formatting a string in the format &quot;DD Mon YYYY HH:MM:SS.mmmmmm&quot;, where DD is the day of month, Mon is the textual month name, YYYY is the year, HH:MM:SS is the time and mmmmmm is the microseconds in six positions:</p>
+ <p>Returns current
+ <seealso marker="erts:time_correction#OS_System_Time">OS system time</seealso>
+ in the same format as <seealso marker="erts:erlang#timestamp/0">erlang:timestamp/0</seealso>.
+ The tuple can be used together with the function
+ <seealso marker="stdlib:calendar#now_to_universal_time/1">calendar:now_to_universal_time/1</seealso>
+ or <seealso marker="stdlib:calendar#now_to_local_time/1">calendar:now_to_local_time/1</seealso> to
+ get calendar time. Using the calendar time together with the <c>MicroSecs</c> part of the return
+ tuple from this function allows you to log timestamps in high resolution and consistent with the
+ time in the rest of the operating system.</p>
+ <p>Example of code formatting a string in the format &quot;DD Mon YYYY HH:MM:SS.mmmmmm&quot;, where
+ DD is the day of month, Mon is the textual month name, YYYY is the year, HH:MM:SS is the time and
+ mmmmmm is the microseconds in six positions:</p>
<code>
-module(print_time).
-export([format_utc_timestamp/0]).
@@ -168,6 +203,9 @@ format_utc_timestamp() ->
1> <input>io:format("~s~n",[print_time:format_utc_timestamp()]).</input>
29 Apr 2009 9:55:30.051711
</pre>
+ <p>OS system time can also be retreived by
+ <c><seealso marker="#system_time/0"><c>os:system_time/0</c></seealso></c>,
+ and <seealso marker="#system_time/1"><c>os:system_time/1</c></seealso>.</p>
</desc>
</func>
<func>
diff --git a/lib/kernel/src/application_controller.erl b/lib/kernel/src/application_controller.erl
index daad45b6c2..a1a99a4e18 100644
--- a/lib/kernel/src/application_controller.erl
+++ b/lib/kernel/src/application_controller.erl
@@ -490,7 +490,8 @@ init(Init, Kernel) ->
%% called during start-up of any app.
case check_conf_data(ConfData) of
ok ->
- _ = ets:new(ac_tab, [set, public, named_table]),
+ _ = ets:new(ac_tab, [set, public, named_table,
+ {read_concurrency,true}]),
S = #state{conf_data = ConfData},
{ok, KAppl} = make_appl(Kernel),
case catch load(S, KAppl) of
@@ -1615,7 +1616,6 @@ conv([Key, Val | T]) ->
[{make_term(Key), make_term(Val)} | conv(T)];
conv(_) -> [].
-%%% Fix some day: eliminate the duplicated code here
make_term(Str) ->
case erl_scan:string(Str) of
{ok, Tokens, _} ->
@@ -1623,16 +1623,17 @@ make_term(Str) ->
{ok, Term} ->
Term;
{error, {_,M,Reason}} ->
- error_logger:format("application_controller: ~ts: ~ts~n",
- [M:format_error(Reason), Str]),
- throw({error, {bad_environment_value, Str}})
+ handle_make_term_error(M, Reason, Str)
end;
{error, {_,M,Reason}, _} ->
- error_logger:format("application_controller: ~ts: ~ts~n",
- [M:format_error(Reason), Str]),
- throw({error, {bad_environment_value, Str}})
+ handle_make_term_error(M, Reason, Str)
end.
+handle_make_term_error(Mod, Reason, Str) ->
+ error_logger:format("application_controller: ~ts: ~ts~n",
+ [Mod:format_error(Reason), Str]),
+ throw({error, {bad_environment_value, Str}}).
+
get_env_i(Name, #state{conf_data = ConfData}) when is_list(ConfData) ->
case lists:keyfind(Name, 1, ConfData) of
{_Name, Env} -> Env;
diff --git a/lib/kernel/src/auth.erl b/lib/kernel/src/auth.erl
index eda35147d3..dbc486bee1 100644
--- a/lib/kernel/src/auth.erl
+++ b/lib/kernel/src/auth.erl
@@ -370,8 +370,8 @@ check_cookie1([], Result) ->
%% Creates a new, random cookie.
create_cookie(Name) ->
- {_, S1, S2} = now(),
- Seed = S2*10000+S1,
+ Seed = abs(erlang:monotonic_time()
+ bxor erlang:unique_integer()),
Cookie = random_cookie(20, Seed, []),
case file:open(Name, [write, raw]) of
{ok, File} ->
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index 0eda558ed5..65045666ec 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -107,7 +107,7 @@ is_module_native(_) ->
-spec make_stub_module(Module, Beam, Info) -> Module when
Module :: module(),
Beam :: binary(),
- Info :: {list(), list()}.
+ Info :: {list(), list(), binary()}.
make_stub_module(_, _, _) ->
erlang:nif_error(undef).
@@ -339,7 +339,7 @@ do_start(Flags) ->
ok
end,
%% Quietly load native code for all modules loaded so far
- catch load_native_code_for_all_loaded(),
+ load_native_code_for_all_loaded(),
Ok2;
Other ->
Other
@@ -550,18 +550,43 @@ has_ext(Ext, Extlen, File) ->
_ -> false
end.
+%%%
+%%% Silently load native code for all modules loaded so far.
+%%%
+
-spec load_native_code_for_all_loaded() -> ok.
load_native_code_for_all_loaded() ->
Architecture = erlang:system_info(hipe_architecture),
- ChunkName = hipe_unified_loader:chunk_name(Architecture),
- lists:foreach(fun({Module, BeamFilename}) ->
- case code:is_module_native(Module) of
- false ->
- case beam_lib:chunks(BeamFilename, [ChunkName]) of
- {ok,{_,[{_,Bin}]}} when is_binary(Bin) ->
- load_native_partial(Module, Bin);
- {error, beam_lib, _} -> ok
- end;
- true -> ok
- end
- end, all_loaded()).
+ try hipe_unified_loader:chunk_name(Architecture) of
+ ChunkTag ->
+ Loaded = all_loaded(),
+ _ = spawn(fun() -> load_all_native(Loaded, ChunkTag) end),
+ ok
+ catch
+ _:_ ->
+ ok
+ end.
+
+load_all_native(Loaded, ChunkTag) ->
+ catch load_all_native_1(Loaded, ChunkTag).
+
+load_all_native_1([{_,preloaded}|T], ChunkTag) ->
+ load_all_native_1(T, ChunkTag);
+load_all_native_1([{Mod,BeamFilename}|T], ChunkTag) ->
+ case code:is_module_native(Mod) of
+ false ->
+ %% prim_file is faster than file and the file server may
+ %% not be started yet.
+ {ok,Beam} = prim_file:read_file(BeamFilename),
+ case code:get_chunk(Beam, ChunkTag) of
+ undefined ->
+ ok;
+ NativeCode when is_binary(NativeCode) ->
+ _ = load_native_partial(Mod, NativeCode),
+ ok
+ end;
+ true -> ok
+ end,
+ load_all_native_1(T, ChunkTag);
+load_all_native_1([], _) ->
+ ok.
diff --git a/lib/kernel/src/dist_util.erl b/lib/kernel/src/dist_util.erl
index b127fe2e33..6b510bd0c3 100644
--- a/lib/kernel/src/dist_util.erl
+++ b/lib/kernel/src/dist_util.erl
@@ -298,7 +298,7 @@ shutdown(_Module, _Line, _Data, Reason) ->
exit(Reason).
%% Use this line to debug connection.
%% Set net_kernel verbose = 1 as well.
-%% exit({Reason, ?MODULE, _Line, _Data, erlang:now()}).
+%% exit({Reason, ?MODULE, _Line, _Data, erlang:timestamp()}).
flush_down() ->
@@ -373,7 +373,9 @@ gen_digest(Challenge, Cookie) when is_integer(Challenge), is_atom(Cookie) ->
%% gen_challenge() returns a "random" number
%% ---------------------------------------------------------------
gen_challenge() ->
- {A,B,C} = erlang:now(),
+ A = erlang:phash2([erlang:node()]),
+ B = erlang:monotonic_time(),
+ C = erlang:unique_integer(),
{D,_} = erlang:statistics(reductions),
{E,_} = erlang:statistics(runtime),
{F,_} = erlang:statistics(wall_clock),
diff --git a/lib/kernel/src/erl_distribution.erl b/lib/kernel/src/erl_distribution.erl
index 25ad34357a..3c4429129e 100644
--- a/lib/kernel/src/erl_distribution.erl
+++ b/lib/kernel/src/erl_distribution.erl
@@ -22,7 +22,6 @@
-export([start_link/0,start_link/1,init/1,start/1,stop/0]).
-%-define(DBG,io:format("~p:~p~n",[?MODULE,?LINE])).
-define(DBG,erlang:display([?MODULE,?LINE])).
start_link() ->
diff --git a/lib/kernel/src/erts_debug.erl b/lib/kernel/src/erts_debug.erl
index ef605d0bfe..8f81fcf825 100644
--- a/lib/kernel/src/erts_debug.erl
+++ b/lib/kernel/src/erts_debug.erl
@@ -20,7 +20,7 @@
%% Low-level debugging support. EXPERIMENTAL!
--export([size/1,df/1,df/2,df/3]).
+-export([size/1,df/1,df/2,df/3,ic/1]).
%% This module contains the following *experimental* BIFs:
%% disassemble/1
@@ -33,7 +33,7 @@
-export([breakpoint/2, disassemble/1, display/1, dist_ext_to_term/2,
dump_monitors/1, dump_links/1, flat_size/1,
get_internal_state/1, instructions/0, lock_counters/1,
- same/2, set_internal_state/2]).
+ map_info/1, same/2, set_internal_state/2]).
-spec breakpoint(MFA, Flag) -> non_neg_integer() when
MFA :: {Module :: module(),
@@ -114,6 +114,19 @@ get_internal_state(_) ->
instructions() ->
erlang:nif_error(undef).
+-spec ic(F) -> Result when
+ F :: function(),
+ Result :: term().
+
+ic(F) when is_function(F) ->
+ Is0 = erlang:system_info(instruction_counts),
+ R = F(),
+ Is1 = erlang:system_info(instruction_counts),
+ Is = lists:keysort(2,[{I,C1 - C0}||{{I,C1},{I,C0}} <- lists:zip(Is1,Is0)]),
+ _ = [io:format("~12w ~w~n", [C,I])||{I,C}<-Is],
+ io:format("Total: ~w~n",[lists:sum([C||{_I,C}<-Is])]),
+ R.
+
-spec lock_counters(info) -> term();
(clear) -> ok;
({copy_save, boolean()}) -> boolean();
@@ -164,8 +177,10 @@ set_internal_state(_, _) ->
-spec size(term()) -> non_neg_integer().
+-record(s, {seen, maps}).
+
size(Term) ->
- {Sum,_} = size(Term, gb_trees:empty(), 0),
+ {Sum,_} = size(Term, #s{seen=gb_trees:empty(),maps=[]}, 0),
Sum.
size([H|T]=Term, Seen0, Sum0) ->
@@ -209,10 +224,24 @@ tuple_size(I, Sz, Tuple, Seen0, Sum0) ->
tuple_size(I+1, Sz, Tuple, Seen, Sum).
map_size(Map,Seen0,Sum0) ->
- Kt = erts_internal:map_to_tuple_keys(Map),
- Vs = maps:values(Map),
- {Sum1,Seen1} = size(Kt,Seen0,Sum0),
- fold_size(Vs,Seen1,Sum1+length(Vs)+3).
+ %% Danger:
+ %% The internal nodes from erts_internal:map_hashmap_children/1
+ %% is not allowed to leak anywhere. They are only allowed in
+ %% containers (cons cells and tuples, not maps), in gc and
+ %% in erts_debug:same/2
+ case erts_internal:map_type(Map) of
+ flatmap ->
+ Kt = erts_internal:map_to_tuple_keys(Map),
+ Vs = maps:values(Map),
+ {Sum1,Seen1} = size(Kt,Seen0,Sum0),
+ fold_size(Vs,Seen1,Sum1+length(Vs)+3);
+ hashmap ->
+ Cs = erts_internal:map_hashmap_children(Map),
+ fold_size(Cs,Seen0,Sum0+length(Cs)+2);
+ hashmap_node ->
+ Cs = erts_internal:map_hashmap_children(Map),
+ fold_size(Cs,Seen0,Sum0+length(Cs)+1)
+ end.
fun_size(Fun, Seen, Sum) ->
case erlang:fun_info(Fun, type) of
@@ -229,13 +258,18 @@ fold_size([H|T], Seen0, Sum0) ->
fold_size(T, Seen, Sum);
fold_size([], Seen, Sum) -> {Sum,Seen}.
-remember_term(Term, Seen) ->
- case gb_trees:lookup(Term, Seen) of
- none -> gb_trees:insert(Term, [Term], Seen);
+remember_term(Term, #s{maps=Ms}=S) when is_map(Term) ->
+ case is_term_seen(Term, Ms) of
+ false -> S#s{maps=[Term|Ms]};
+ true -> seen
+ end;
+remember_term(Term, #s{seen=T}=S) ->
+ case gb_trees:lookup(Term,T) of
+ none -> S#s{seen=gb_trees:insert(Term,[Term],T)};
{value,Terms} ->
case is_term_seen(Term, Terms) of
- false -> gb_trees:update(Term, [Term|Terms], Seen);
- true -> seen
+ false -> S#s{seen=gb_trees:update(Term,[Term|Terms],T)};
+ true -> seen
end
end.
@@ -313,3 +347,9 @@ cont_dis(File, {Addr,Str,MFA}, MFA) ->
io:put_chars(File, binary_to_list(Str)),
cont_dis(File, erts_debug:disassemble(Addr), MFA);
cont_dis(_, {_,_,_}, _) -> ok.
+
+-spec map_info(Map) -> list() when
+ Map :: map().
+
+map_info(_) ->
+ erlang:nif_error(undef).
diff --git a/lib/kernel/src/file_io_server.erl b/lib/kernel/src/file_io_server.erl
index 0e9ff5bc0f..7d30e7e1d8 100644
--- a/lib/kernel/src/file_io_server.erl
+++ b/lib/kernel/src/file_io_server.erl
@@ -307,18 +307,18 @@ io_request({get_chars,Enc,_Prompt,N},
#state{}=State) ->
get_chars(N, Enc, State);
-%%
-%% This optimization gives almost nothing - needs more working...
-%% Disabled for now. /PaN
-%%
-%% io_request({get_line,Enc,_Prompt},
-%% #state{unic=latin1}=State) ->
-%% get_line(Enc,State);
-
-io_request({get_line,Enc,_Prompt},
- #state{}=State) ->
- get_chars(io_lib, collect_line, [], Enc, State);
-
+io_request({get_line,OutEnc,_Prompt}, #state{buf=Buf, read_mode=Mode, unic=InEnc} = State0) ->
+ try
+ %% Minimize the encoding conversions
+ WorkEnc = case InEnc of
+ {_,_} -> OutEnc; %% utf16 or utf32
+ _ -> InEnc %% Byte oriented utf8 or latin1
+ end,
+ {Res, State} = get_line(start, convert_enc(Buf, InEnc, WorkEnc), WorkEnc, State0),
+ {reply, cast(Res, Mode, WorkEnc, OutEnc), State}
+ catch exit:ExError ->
+ {stop,ExError,{error,ExError},State0#state{buf= <<>>}}
+ end;
io_request({setopts, Opts},
#state{}=State) when is_list(Opts) ->
@@ -386,56 +386,40 @@ put_chars(Chars, InEncoding, #state{handle=Handle, unic=OutEncoding}=State) ->
{stop,normal,{error,{no_translation, InEncoding, OutEncoding}},State}
end.
-%%
-%% Process the I/O request get_line for latin1 encoding of file specially
-%% Unfortunately this function gives almost nothing, it needs more work
-%% I disable it for now /PaN
-%%
-%% srch(<<>>,_,_) ->
-%% nomatch;
-%% srch(<<X:8,_/binary>>,X,N) ->
-%% {match,N};
-%% srch(<<_:8,T/binary>>,X,N) ->
-%% srch(T,X,N+1).
-%% get_line(OutEnc, #state{handle=Handle,buf = <<>>,unic=latin1}=State) ->
-%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of
-%% {ok, B} ->
-%% get_line(OutEnc, State#state{buf = B});
-%% eof ->
-%% {reply,eof,State};
-%% {error,Reason}=Error ->
-%% {stop,Reason,Error,State}
-%% end;
-%% get_line(OutEnc, #state{handle=Handle,buf=Buf,read_mode=ReadMode,unic=latin1}=State) ->
-%% case srch(Buf,$\n,0) of
-%% nomatch ->
-%% case ?PRIM_FILE:read(Handle,?READ_SIZE_BINARY) of
-%% {ok, B} ->
-%% get_line(OutEnc,State#state{buf = <<Buf/binary,B/binary>>});
-%% eof ->
-%% std_reply(cast(Buf, ReadMode,latin1,OutEnc), State);
-%% {error,Reason}=Error ->
-%% {stop,Reason,Error,State#state{buf= <<>>}}
-%% end;
-%% {match,Pos} when Pos >= 1->
-%% PosP1 = Pos + 1,
-%% <<Res0:PosP1/binary,NewBuf/binary>> = Buf,
-%% PosM1 = Pos - 1,
-%% Res = case Res0 of
-%% <<Chomped:PosM1/binary,$\r:8,$\n:8>> ->
-%% cat(Chomped, <<"\n">>, ReadMode,latin1,OutEnc);
-%% _Other ->
-%% cast(Res0, ReadMode,latin1,OutEnc)
-%% end,
-%% {reply,Res,State#state{buf=NewBuf}};
-%% {match,Pos} ->
-%% PosP1 = Pos + 1,
-%% <<Res:PosP1/binary,NewBuf/binary>> = Buf,
-%% {reply,Res,State#state{buf=NewBuf}}
-%% end;
-%% get_line(_, #state{}=State) ->
-%% {error,{error,get_line},State}.
-
+get_line(S, {<<>>, Cont}, OutEnc,
+ #state{handle=Handle, read_mode=Mode, unic=InEnc}=State) ->
+ case ?PRIM_FILE:read(Handle, read_size(Mode)) of
+ {ok,Bin} ->
+ get_line(S, convert_enc([Cont, Bin], InEnc, OutEnc), OutEnc, State);
+ eof ->
+ get_line(S, {eof, Cont}, OutEnc, State);
+ {error,Reason}=Error ->
+ {stop,Reason,Error,State}
+ end;
+get_line(S0, {Buf, BCont}, OutEnc, #state{unic=InEnc}=State) ->
+ case io_lib:collect_line(S0, Buf, OutEnc, []) of
+ {stop, Result, Cont0} ->
+ %% Convert both buffers back to file InEnc encoding
+ {Cont, <<>>} = convert_enc(Cont0, OutEnc, InEnc),
+ {Result, State#state{buf=cast_binary([Cont, BCont])}};
+ S ->
+ get_line(S, {<<>>, BCont}, OutEnc, State)
+ end.
+
+convert_enc(Bins, Enc, Enc) ->
+ {cast_binary(Bins), <<>>};
+convert_enc(eof, _, _) ->
+ {<<>>, <<>>};
+convert_enc(Bin, InEnc, OutEnc) ->
+ case unicode:characters_to_binary(Bin, InEnc, OutEnc) of
+ Res when is_binary(Res) ->
+ {Res, <<>>};
+ {incomplete, Res, Cont} ->
+ {Res, Cont};
+ {error, _, _} ->
+ exit({no_translation, InEnc, OutEnc})
+ end.
+
%%
%% Process the I/O request get_chars
%%
@@ -640,8 +624,6 @@ invalid_unicode_error(Mod, Func, XtraArg, S) ->
%% Convert error code to make it look as before
err_func(io_lib, get_until, {_,F,_}) ->
- F;
-err_func(_, F, _) ->
F.
@@ -713,6 +695,8 @@ cat(B1, B2, list, latin1,_) ->
binary_to_list(B1)++binary_to_list(B2).
%% Cast binary to list or binary
+cast(eof, _, _, _) ->
+ eof;
cast(B, binary, latin1, latin1) ->
B;
cast(B, binary, InEncoding, OutEncoding) ->
@@ -736,6 +720,8 @@ cast(B, list, InEncoding, OutEncoding) ->
%% Convert buffer to binary
cast_binary(Binary) when is_binary(Binary) ->
Binary;
+cast_binary([<<>>|List]) ->
+ cast_binary(List);
cast_binary(List) when is_list(List) ->
list_to_binary(List);
cast_binary(_EOF) ->
diff --git a/lib/kernel/src/gen_udp.erl b/lib/kernel/src/gen_udp.erl
index 70dceb3679..860eec10a0 100644
--- a/lib/kernel/src/gen_udp.erl
+++ b/lib/kernel/src/gen_udp.erl
@@ -78,7 +78,7 @@
ipv6_v6only.
-type socket() :: port().
--export_type([option/0, option_name/0]).
+-export_type([option/0, option_name/0, socket/0]).
-spec open(Port) -> {ok, Socket} | {error, Reason} when
Port :: inet:port_number(),
diff --git a/lib/kernel/src/global.erl b/lib/kernel/src/global.erl
index 0a4edea452..6c36d417a2 100644
--- a/lib/kernel/src/global.erl
+++ b/lib/kernel/src/global.erl
@@ -881,11 +881,12 @@ handle_info({nodeup, Node}, S0) when S0#state.connect_all ->
false ->
resend_pre_connect(Node),
- %% now() is used as a tag to separate different synch sessions
+ %% erlang:unique_integer([monotonic]) is used as a tag to
+ %% separate different synch sessions
%% from each others. Global could be confused at bursty nodeups
%% because it couldn't separate the messages between the different
%% synch sessions started by a nodeup.
- MyTag = now(),
+ MyTag = erlang:unique_integer([monotonic]),
put({sync_tag_my, Node}, MyTag),
?trace({sending_nodeup_to_locker, {node,Node},{mytag,MyTag}}),
S1#state.the_locker ! {nodeup, Node, MyTag},
@@ -1772,8 +1773,8 @@ update_locker_known(Upd, S) ->
S#multi{known = Known, the_boss = TheBoss}.
random_element(L) ->
- {A,B,C} = now(),
- E = (A+B+C) rem length(L),
+ E = abs(erlang:monotonic_time()
+ bxor erlang:unique_integer()) rem length(L),
lists:nth(E+1, L).
exclude_known(Others, Known) ->
@@ -2072,9 +2073,10 @@ random_sleep(Times) ->
end,
case get(random_seed) of
undefined ->
- {A1, A2, A3} = now(),
- _ = random:seed(A1, A2, A3 + erlang:phash(node(), 100000)),
- ok;
+ _ = random:seed(erlang:phash2([erlang:node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
+ ok;
_ -> ok
end,
%% First time 1/4 seconds, then doubling each time up to 8 seconds max.
@@ -2106,7 +2108,7 @@ trace_message(S, M, X) ->
S#state{trace = [trace_message(M, X) | S#state.trace]}.
trace_message(M, X) ->
- {node(), now(), M, nodes(), X}.
+ {node(), erlang:timestamp(), M, nodes(), X}.
%%-----------------------------------------------------------------
%% Each sync process corresponds to one call to sync. Each such
diff --git a/lib/kernel/src/heart.erl b/lib/kernel/src/heart.erl
index daed6dd488..77cd5433de 100644
--- a/lib/kernel/src/heart.erl
+++ b/lib/kernel/src/heart.erl
@@ -25,7 +25,7 @@
%%%--------------------------------------------------------------------
%%% This is a rewrite of pre_heart from BS.3.
%%%
-%%% The purpose of this process-module is to act as an supervisor
+%%% The purpose of this process-module is to act as a supervisor
%%% of the entire erlang-system. This 'heart' beats with a frequence
%%% satisfying an external port program *not* reboot the entire
%%% system. If however the erlang-emulator would hang, a reboot is
diff --git a/lib/kernel/src/hipe_unified_loader.erl b/lib/kernel/src/hipe_unified_loader.erl
index e5928c7b63..49d4a8fe54 100644
--- a/lib/kernel/src/hipe_unified_loader.erl
+++ b/lib/kernel/src/hipe_unified_loader.erl
@@ -194,6 +194,7 @@ load_common(Mod, Bin, Beam, OldReferencesToPatch) ->
CodeSize, CodeBinary, Refs,
0,[] % ColdSize, CRrefs
] = binary_to_term(Bin),
+ MD5 = erlang:md5(Bin), % use md5 of actual running code for module_info
?debug_msg("***** ErLLVM *****~nVersion: ~s~nCheckSum: ~w~nConstAlign: ~w~n" ++
"ConstSize: ~w~nConstMap: ~w~nLabelMap: ~w~nExportMap ~w~nRefs ~w~n",
[Version, CheckSum, ConstAlign, ConstSize, ConstMap, LabelMap, ExportMap,
@@ -254,7 +255,8 @@ load_common(Mod, Bin, Beam, OldReferencesToPatch) ->
AddressesOfClosuresToPatch =
calculate_addresses(ClosurePatches, CodeAddress, Addresses),
export_funs(Addresses),
- export_funs(Mod, BeamBinary, Addresses, AddressesOfClosuresToPatch)
+ export_funs(Mod, MD5, BeamBinary,
+ Addresses, AddressesOfClosuresToPatch)
end,
%% Redirect references to the old module to the new module's BEAM stub.
patch_to_emu_step2(OldReferencesToPatch),
@@ -430,9 +432,9 @@ export_funs([FunDef | Addresses]) ->
export_funs([]) ->
ok.
-export_funs(Mod, Beam, Addresses, ClosuresToPatch) ->
+export_funs(Mod, MD5, Beam, Addresses, ClosuresToPatch) ->
Fs = [{F,A,Address} || #fundef{address=Address, mfa={_M,F,A}} <- Addresses],
- Mod = code:make_stub_module(Mod, Beam, {Fs,ClosuresToPatch}),
+ Mod = code:make_stub_module(Mod, Beam, {Fs,ClosuresToPatch,MD5}),
ok.
%%========================================================================
@@ -827,7 +829,6 @@ patch_to_emu_step1(Mod) ->
%% were added as the result of dynamic apply calls. We must
%% purge them too, but we have no explicit record of them.
%% Therefore invalidate all native addresses for the module.
- %% emu_make_stubs/1 will repair the ones for compiled static calls.
hipe_bifs:invalidate_funinfo_native_addresses(MFAs),
%% Find all call sites that call these MFAs. As a side-effect,
%% create native stubs for any MFAs that are referred.
@@ -841,7 +842,6 @@ patch_to_emu_step1(Mod) ->
%% Step 2 must occur after the new BEAM stub module is created.
patch_to_emu_step2(ReferencesToPatch) ->
- emu_make_stubs(ReferencesToPatch),
redirect(ReferencesToPatch).
-spec is_loaded(Module::atom()) -> boolean().
@@ -852,21 +852,6 @@ is_loaded(M) when is_atom(M) ->
catch _:_ -> false
end.
--ifdef(notdef).
-emu_make_stubs([{MFA,_Refs}|Rest]) ->
- make_stub(MFA),
- emu_make_stubs(Rest);
-emu_make_stubs([]) ->
- [].
-
-make_stub({_,_,A} = MFA) ->
- EmuAddress = hipe_bifs:get_emu_address(MFA),
- StubAddress = hipe_bifs:make_native_stub(EmuAddress, A),
- hipe_bifs:set_funinfo_native_address(MFA, StubAddress).
--else.
-emu_make_stubs(_) -> [].
--endif.
-
%%--------------------------------------------------------------------
%% Given a list of MFAs, tag them with their referred_from references.
%% The resulting {MFA,Refs} list is later passed to redirect/1, once
diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl
index 43bab8bcf0..d668738109 100644
--- a/lib/kernel/src/inet.erl
+++ b/lib/kernel/src/inet.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1070,7 +1070,7 @@ gethostbyname_tm(Name, Type, Timer, [wins|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
gethostbyname_tm(Name, Type, Timer, [native|_]=Opts) ->
gethostbyname_tm_native(Name, Type, Timer, Opts);
-gethostbyname_tm(Name, Type, Timer, [_|_]=Opts) ->
+gethostbyname_tm(Name, Type, Timer, [_|Opts]) ->
gethostbyname_tm(Name, Type, Timer, Opts);
%% Make sure we always can look up our own hostname.
gethostbyname_tm(Name, Type, Timer, []) ->
@@ -1527,26 +1527,28 @@ tcp_controlling_process(S, NewOwner) when is_port(S), is_pid(NewOwner) ->
_ ->
case prim_inet:getopt(S, active) of
{ok, A0} ->
- case A0 of
- false -> ok;
- _ -> ok = prim_inet:setopt(S, active, false)
- end,
- case tcp_sync_input(S, NewOwner, false) of
- true -> %% socket already closed,
+ SetOptRes =
+ case A0 of
+ false -> ok;
+ _ -> prim_inet:setopt(S, active, false)
+ end,
+ case {tcp_sync_input(S, NewOwner, false), SetOptRes} of
+ {true, _} -> %% socket already closed
ok;
- false ->
+ {false, ok} ->
try erlang:port_connect(S, NewOwner) of
true ->
unlink(S), %% unlink from port
case A0 of
false -> ok;
- _ -> ok = prim_inet:setopt(S, active, A0)
- end,
- ok
+ _ -> prim_inet:setopt(S, active, A0)
+ end
catch
error:Reason ->
{error, Reason}
- end
+ end;
+ {false, Error} ->
+ Error
end;
Error ->
Error
diff --git a/lib/kernel/src/inet_config.erl b/lib/kernel/src/inet_config.erl
index fdc244f959..187bfbdab0 100644
--- a/lib/kernel/src/inet_config.erl
+++ b/lib/kernel/src/inet_config.erl
@@ -113,13 +113,7 @@ init() ->
{unix,_} ->
%% The Etc variable enables us to run tests with other
%% configuration files than the normal ones
- Etc =
- case os:getenv("ERL_INET_ETC_DIR") of
- false ->
- ?DEFAULT_ETC;
- _EtcDir ->
- _EtcDir
- end,
+ Etc = os:getenv("ERL_INET_ETC_DIR", ?DEFAULT_ETC),
case inet_db:res_option(resolv_conf) of
undefined ->
inet_db:res_option(
@@ -152,11 +146,7 @@ erl_dist_mode() ->
do_load_resolv({unix,Type}, longnames) ->
%% The Etc variable enables us to run tests with other
%% configuration files than the normal ones
- Etc = case os:getenv("ERL_INET_ETC_DIR") of
- false -> ?DEFAULT_ETC;
- _EtcDir ->
- _EtcDir
- end,
+ Etc = os:getenv("ERL_INET_ETC_DIR", ?DEFAULT_ETC),
load_resolv(filename:join(Etc, ?DEFAULT_RESOLV), resolv),
case Type of
freebsd -> %% we may have to check version (2.2.2)
@@ -307,10 +297,7 @@ load_hosts(File,Os) ->
win32_load_from_registry(Type) ->
%% The TcpReg variable enables us to run tests with other registry configurations than
%% the normal ones
- TcpReg = case os:getenv("ERL_INET_ETC_DIR") of
- false -> [];
- _TReg -> _TReg
- end,
+ TcpReg = os:getenv("ERL_INET_ETC_DIR", ""),
{ok, Reg} = win32reg:open([read]),
{TcpIp,HFileKey} =
case Type of
diff --git a/lib/kernel/src/inet_db.erl b/lib/kernel/src/inet_db.erl
index 2ebdc0f554..abe207295f 100644
--- a/lib/kernel/src/inet_db.erl
+++ b/lib/kernel/src/inet_db.erl
@@ -1372,8 +1372,7 @@ cache_rr(_Db, Cache, RR) ->
ets:insert(Cache, RR).
times() ->
- {Mega,Secs,_} = erlang:now(),
- Mega*1000000 + Secs.
+ erlang:monotonic_time(1).
%% lookup and remove old entries
diff --git a/lib/kernel/src/inet_parse.erl b/lib/kernel/src/inet_parse.erl
index a88c94a453..a694642b19 100644
--- a/lib/kernel/src/inet_parse.erl
+++ b/lib/kernel/src/inet_parse.erl
@@ -675,28 +675,22 @@ ipv6_addr_done(Ar, Br, N) ->
ipv6_addr_done(Ar) ->
list_to_tuple(lists:reverse(Ar)).
-%% Collect Hex digits
-hex(Cs) -> hex(Cs, []).
-%%
-hex([C|Cs], R) when C >= $0, C =< $9 ->
- hex(Cs, [C|R]);
-hex([C|Cs], R) when C >= $a, C =< $f ->
- hex(Cs, [C|R]);
-hex([C|Cs], R) when C >= $A, C =< $F ->
- hex(Cs, [C|R]);
-hex(Cs, [_|_]=R) when is_list(Cs) ->
+%% Collect 1-4 Hex digits
+hex(Cs) -> hex(Cs, [], 4).
+%%
+hex([C|Cs], R, N) when C >= $0, C =< $9, N > 0 ->
+ hex(Cs, [C|R], N-1);
+hex([C|Cs], R, N) when C >= $a, C =< $f, N > 0 ->
+ hex(Cs, [C|R], N-1);
+hex([C|Cs], R, N) when C >= $A, C =< $F, N > 0 ->
+ hex(Cs, [C|R], N-1);
+hex(Cs, [_|_]=R, _) when is_list(Cs) ->
{lists:reverse(R),Cs};
-hex(_, _) ->
+hex(_, _, _) ->
erlang:error(badarg).
%% Hex string to integer
-hex_to_int(Cs0) ->
- case strip0(Cs0) of
- Cs when length(Cs) =< 4 ->
- erlang:list_to_integer("0"++Cs, 16);
- _ ->
- erlang:error(badarg)
- end.
+hex_to_int(Cs) -> erlang:list_to_integer(Cs, 16).
%% Dup onto head of existing list
dup(0, _, L) ->
diff --git a/lib/kernel/src/inet_res.erl b/lib/kernel/src/inet_res.erl
index 6037da1d22..410128a16a 100644
--- a/lib/kernel/src/inet_res.erl
+++ b/lib/kernel/src/inet_res.erl
@@ -715,10 +715,10 @@ udp_send(#sock{inet=I}, {A,B,C,D}=IP, Port, Buffer)
udp_recv(#sock{inet6=I}, {A,B,C,D,E,F,G,H}=IP, Port, Timeout, Decode)
when ?ip6(A,B,C,D,E,F,G,H), ?port(Port) ->
- do_udp_recv(I, IP, Port, Timeout, Decode, erlang:now(), Timeout);
+ do_udp_recv(I, IP, Port, Timeout, Decode, time_now(), Timeout);
udp_recv(#sock{inet=I}, {A,B,C,D}=IP, Port, Timeout, Decode)
when ?ip(A,B,C,D), ?port(Port) ->
- do_udp_recv(I, IP, Port, Timeout, Decode, erlang:now(), Timeout).
+ do_udp_recv(I, IP, Port, Timeout, Decode, time_now(), Timeout).
do_udp_recv(_I, _IP, _Port, 0, _Decode, _Start, _T) ->
timeout;
@@ -742,7 +742,7 @@ do_udp_recv(I, IP, Port, Timeout, Decode, Start, T) ->
NewTimeout = erlang:max(0, Timeout - 50),
do_udp_recv(I, IP, Port, NewTimeout, Decode, Start, T);
false ->
- Now = erlang:now(),
+ Now = time_now(),
NewT = erlang:max(0, Timeout - now_ms(Now, Start)),
do_udp_recv(I, IP, Port, Timeout, Decode, Start, NewT);
Result ->
@@ -1057,5 +1057,9 @@ dns_msg(Msg) ->
end.
-compile({inline, [now_ms/2]}).
-now_ms({Meg1,Sec1,Mic1}, {Meg0,Sec0,Mic0}) ->
- ((Meg1-Meg0)*1000000 + (Sec1-Sec0))*1000 + ((Mic1-Mic0) div 1000).
+now_ms(Int1, Int0) ->
+ Int1 - Int0.
+
+-compile({inline, [time_now/0]}).
+time_now() ->
+ erlang:monotonic_time(1000).
diff --git a/lib/kernel/src/inet_sctp.erl b/lib/kernel/src/inet_sctp.erl
index 93528d305d..f0f13c8d4a 100644
--- a/lib/kernel/src/inet_sctp.erl
+++ b/lib/kernel/src/inet_sctp.erl
@@ -133,15 +133,18 @@ connect_get_assoc(S, Addr, Port, Active, Timer) ->
Timeout = inet:timeout(Timer),
receive
{sctp,S,Addr,Port,{_,#sctp_assoc_change{state=St}=Ev}} ->
- case Active of
- once ->
- ok = prim_inet:setopt(S, active, once);
- _ -> ok
- end,
- if St =:= comm_up ->
+ SetOptRes =
+ case Active of
+ once -> prim_inet:setopt(S, active, once);
+ _ -> ok
+ end,
+ case {St, SetOptRes} of
+ {comm_up, ok} ->
{ok,Ev};
- true ->
- {error,Ev}
+ {_, ok} ->
+ {error,Ev};
+ {_, Error} ->
+ Error
end
after Timeout ->
{error,timeout}
diff --git a/lib/kernel/src/inet_tcp_dist.erl b/lib/kernel/src/inet_tcp_dist.erl
index 63f236b069..fb60a14afb 100644
--- a/lib/kernel/src/inet_tcp_dist.erl
+++ b/lib/kernel/src/inet_tcp_dist.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,7 +77,7 @@ listen(Name) ->
Error
end.
-do_listen(Options0) ->
+do_listen(Options) ->
{First,Last} = case application:get_env(kernel,inet_dist_listen_min) of
{ok,N} when is_integer(N) ->
case application:get_env(kernel,
@@ -90,13 +90,7 @@ do_listen(Options0) ->
_ ->
{0,0}
end,
- Options = case application:get_env(kernel, inet_dist_use_interface) of
- {ok, Ip} ->
- [{ip, Ip} | Options0];
- _ ->
- Options0
- end,
- do_listen(First, Last, [{backlog,128}|Options]).
+ do_listen(First, Last, listen_options([{backlog,128}|Options])).
do_listen(First,Last,_) when First > Last ->
{error,eaddrinuse};
@@ -108,6 +102,22 @@ do_listen(First,Last,Options) ->
Other
end.
+listen_options(Opts0) ->
+ Opts1 =
+ case application:get_env(kernel, inet_dist_use_interface) of
+ {ok, Ip} ->
+ [{ip, Ip} | Opts0];
+ _ ->
+ Opts0
+ end,
+ case application:get_env(kernel, inet_dist_listen_options) of
+ {ok,ListenOpts} ->
+ ListenOpts ++ Opts1;
+ _ ->
+ Opts1
+ end.
+
+
%% ------------------------------------------------------------
%% Accepts new connection attempts from other Erlang nodes.
%% ------------------------------------------------------------
@@ -219,7 +229,7 @@ nodelay() ->
_ ->
{nodelay, true}
end.
-
+
%% ------------------------------------------------------------
%% Get remote information about a Socket.
@@ -260,9 +270,11 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
?trace("port_please(~p) -> version ~p~n",
[Node,Version]),
dist_util:reset_timer(Timer),
- case inet_tcp:connect(Ip, TcpPort,
- [{active, false},
- {packet,2}]) of
+ case
+ inet_tcp:connect(
+ Ip, TcpPort,
+ connect_options([{active, false}, {packet, 2}]))
+ of
{ok, Socket} ->
HSData = #hs_data{
kernel_pid = Kernel,
@@ -324,6 +336,14 @@ do_setup(Kernel, Node, Type, MyNode, LongOrShortNames,SetupTime) ->
?shutdown(Node)
end.
+connect_options(Opts) ->
+ case application:get_env(kernel, inet_dist_connect_options) of
+ {ok,ConnectOpts} ->
+ ConnectOpts ++ Opts;
+ _ ->
+ Opts
+ end.
+
%%
%% Close a socket.
%%
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index 9f6c0f4624..9787dca162 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -115,6 +115,6 @@
{applications, []},
{env, [{error_logger, tty}]},
{mod, {kernel, []}},
- {runtime_dependencies, ["erts-6.1.2", "stdlib-2.0", "sasl-2.4"]}
+ {runtime_dependencies, ["erts-7.0", "stdlib-2.5", "sasl-2.4"]}
]
}.
diff --git a/lib/kernel/src/kernel.appup.src b/lib/kernel/src/kernel.appup.src
index 1bae762bed..5d3836bad7 100644
--- a/lib/kernel/src/kernel.appup.src
+++ b/lib/kernel/src/kernel.appup.src
@@ -1,7 +1,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -17,7 +17,7 @@
%% %CopyrightEnd%
{"%VSN%",
%% Up from - max one major revision back
- [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-17
+ [{<<"3\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-17
%% Down to - max one major revision back
- [{<<"3\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-17
+ [{<<"3\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-17
}.
diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl
index ecdb32424a..cc5683ba06 100644
--- a/lib/kernel/src/kernel.erl
+++ b/lib/kernel/src/kernel.erl
@@ -117,7 +117,7 @@ init([]) ->
[{local, kernel_safe_sup}, ?MODULE, safe]},
permanent, infinity, supervisor, [?MODULE]},
{ok, {SupFlags,
- [File, Code, StdError, User,
+ [Code, File, StdError, User,
Config, SafeSupervisor]}};
_ ->
Rpc = {rex, {rpc, start_link, []},
@@ -139,8 +139,8 @@ init([]) ->
[{local, kernel_safe_sup}, ?MODULE, safe]},
permanent, infinity, supervisor, [?MODULE]},
{ok, {SupFlags,
- [Rpc, Global, InetDb | DistAC] ++
- [NetSup, Glo_grp, File, Code,
+ [Code, Rpc, Global, InetDb | DistAC] ++
+ [NetSup, Glo_grp, File,
StdError, User, Config, SafeSupervisor] ++ Timer}}
end;
init(safe) ->
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index 8aaf13b3fd..3647545777 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -26,7 +26,8 @@
%%% BIFs
--export([getenv/0, getenv/1, getenv/2, getpid/0, putenv/2, timestamp/0, unsetenv/1]).
+-export([getenv/0, getenv/1, getenv/2, getpid/0, putenv/2, system_time/0, system_time/1,
+ timestamp/0, unsetenv/1]).
-spec getenv() -> [string()].
@@ -65,6 +66,17 @@ getpid() ->
putenv(_, _) ->
erlang:nif_error(undef).
+-spec system_time() -> integer().
+
+system_time() ->
+ erlang:nif_error(undef).
+
+-spec system_time(Unit) -> integer() when
+ Unit :: erlang:time_unit().
+
+system_time(_Unit) ->
+ erlang:nif_error(undef).
+
-spec timestamp() -> Timestamp when
Timestamp :: erlang:timestamp().
@@ -98,10 +110,7 @@ version() ->
Name :: string(),
Filename :: string().
find_executable(Name) ->
- case os:getenv("PATH") of
- false -> find_executable(Name, []);
- Path -> find_executable(Name, Path)
- end.
+ find_executable(Name, os:getenv("PATH", "")).
-spec find_executable(Name, Path) -> Filename | 'false' when
Name :: string(),
diff --git a/lib/kernel/src/pg2.erl b/lib/kernel/src/pg2.erl
index b562d4ffd2..70d7a75671 100644
--- a/lib/kernel/src/pg2.erl
+++ b/lib/kernel/src/pg2.erl
@@ -140,19 +140,22 @@ get_closest_pid(Name) ->
[Pid] ->
Pid;
[] ->
- {_,_,X} = erlang:now(),
case get_members(Name) of
[] -> {error, {no_process, Name}};
Members ->
- lists:nth((X rem length(Members))+1, Members)
+ random_element(Members)
end;
Members when is_list(Members) ->
- {_,_,X} = erlang:now(),
- lists:nth((X rem length(Members))+1, Members);
+ random_element(Members);
Else ->
Else
end.
+random_element(List) ->
+ X = abs(erlang:monotonic_time()
+ bxor erlang:unique_integer()),
+ lists:nth((X rem length(List)) + 1, List).
+
%%%
%%% Callback functions from gen_server
%%%
diff --git a/lib/kernel/src/standard_error.erl b/lib/kernel/src/standard_error.erl
index 10cf77e0d4..1c43063937 100644
--- a/lib/kernel/src/standard_error.erl
+++ b/lib/kernel/src/standard_error.erl
@@ -63,7 +63,7 @@ server(PortName,PortSettings) ->
run(Port).
run(P) ->
- put(unicode,false),
+ put(encoding, latin1),
server_loop(P).
server_loop(Port) ->
@@ -95,25 +95,47 @@ do_io_request(Req, From, ReplyAs, Port) ->
io_reply(From, ReplyAs, Reply).
%% New in R13B
-% Wide characters (Unicode)
-io_request({put_chars,Encoding,Chars}, Port) -> % Binary new in R9C
- put_chars(wrap_characters_to_binary(Chars,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end), Port);
-io_request({put_chars,Encoding,Mod,Func,Args}, Port) ->
- Result = case catch apply(Mod,Func,Args) of
- Data when is_list(Data); is_binary(Data) ->
- wrap_characters_to_binary(Data,Encoding,
- case get(unicode) of
- true -> unicode;
- _ -> latin1
- end);
- Undef ->
- Undef
- end,
- put_chars(Result, Port);
+%% Encoding option (unicode/latin1)
+io_request({put_chars,unicode,Chars}, Port) ->
+ case wrap_characters_to_binary(Chars, unicode, get(encoding)) of
+ error ->
+ {error,{error,put_chars}};
+ Bin ->
+ put_chars(Bin, Port)
+ end;
+io_request({put_chars,unicode,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case wrap_characters_to_binary(Data, unicode, get(encoding)) of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ error ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Chars}, Port) ->
+ case catch unicode:characters_to_binary(Chars, latin1, get(encoding)) of
+ Data when is_binary(Data) ->
+ put_chars(Data, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+io_request({put_chars,latin1,Mod,Func,Args}, Port) ->
+ case catch apply(Mod, Func, Args) of
+ Data when is_list(Data); is_binary(Data) ->
+ case
+ catch unicode:characters_to_binary(Data, latin1, get(encoding))
+ of
+ Bin when is_binary(Bin) ->
+ put_chars(Bin, Port);
+ _ ->
+ {error,{error,put_chars}}
+ end;
+ _ ->
+ {error,{error,put_chars}}
+ end;
%% BC if called from pre-R13 node
io_request({put_chars,Chars}, Port) ->
io_request({put_chars,latin1,Chars}, Port);
@@ -134,10 +156,10 @@ io_request({get_geometry,rows},Port) ->
_ ->
{error,{error,enotsup}}
end;
-io_request({getopts,[]}, Port) ->
- getopts(Port);
-io_request({setopts,Opts}, Port) when is_list(Opts) ->
- setopts(Opts, Port);
+io_request(getopts, _Port) ->
+ getopts();
+io_request({setopts,Opts}, _Port) when is_list(Opts) ->
+ setopts(Opts);
io_request({requests,Reqs}, Port) ->
io_requests(Reqs, {ok,ok}, Port);
io_request(R, _Port) -> %Unknown request
@@ -176,47 +198,48 @@ io_reply(From, ReplyAs, Reply) ->
%% put_chars
put_chars(Chars, Port) when is_binary(Chars) ->
_ = put_port(Chars, Port),
- {ok,ok};
-put_chars(Chars, Port) ->
- case catch list_to_binary(Chars) of
- Binary when is_binary(Binary) ->
- put_chars(Binary, Port);
- _ ->
- {error,{error,put_chars}}
- end.
+ {ok,ok}.
%% setopts
-setopts(Opts0,Port) ->
- Opts = proplists:unfold(
- proplists:substitute_negations(
- [{latin1,unicode}],
- Opts0)),
+setopts(Opts0) ->
+ Opts = expand_encoding(Opts0),
case check_valid_opts(Opts) of
- true ->
- do_setopts(Opts,Port);
- false ->
- {error,{error,enotsup}}
+ true ->
+ do_setopts(Opts);
+ false ->
+ {error,{error,enotsup}}
end.
+
check_valid_opts([]) ->
true;
-check_valid_opts([{unicode,Valid}|T]) when Valid =:= true; Valid =:= utf8; Valid =:= false ->
+check_valid_opts([{encoding,Valid}|T]) when Valid =:= unicode;
+ Valid =:= utf8; Valid =:= latin1 ->
check_valid_opts(T);
check_valid_opts(_) ->
false.
-do_setopts(Opts, _Port) ->
- case proplists:get_value(unicode,Opts) of
- Valid when Valid =:= true; Valid =:= utf8 ->
- put(unicode,true);
- false ->
- put(unicode,false);
- undefined ->
- ok
+expand_encoding([]) ->
+ [];
+expand_encoding([latin1 | T]) ->
+ [{encoding,latin1} | expand_encoding(T)];
+expand_encoding([unicode | T]) ->
+ [{encoding,unicode} | expand_encoding(T)];
+expand_encoding([H|T]) ->
+ [H|expand_encoding(T)].
+
+do_setopts(Opts) ->
+ case proplists:get_value(encoding, Opts) of
+ Valid when Valid =:= unicode; Valid =:= utf8 ->
+ put(encoding, unicode);
+ latin1 ->
+ put(encoding, latin1);
+ undefined ->
+ ok
end,
{ok,ok}.
-getopts(_Port) ->
- Uni = {unicode, get(unicode) =:= true},
+getopts() ->
+ Uni = {encoding,get(encoding)},
{ok,[Uni]}.
wrap_characters_to_binary(Chars,From,To) ->
@@ -227,17 +250,17 @@ wrap_characters_to_binary(Chars,From,To) ->
_Else ->
16#10ffff
end,
- unicode:characters_to_binary(
- [ case X of
- $\n ->
- if
- TrNl ->
- "\r\n";
- true ->
- $\n
- end;
- High when High > Limit ->
- ["\\x{",erlang:integer_to_list(X, 16),$}];
- Ordinary ->
- Ordinary
- end || X <- unicode:characters_to_list(Chars,From) ],unicode,To).
+ case catch unicode:characters_to_list(Chars, From) of
+ L when is_list(L) ->
+ unicode:characters_to_binary(
+ [ case X of
+ $\n when TrNl ->
+ "\r\n";
+ High when High > Limit ->
+ ["\\x{",erlang:integer_to_list(X, 16),$}];
+ Low ->
+ Low
+ end || X <- L ], unicode, To);
+ _ ->
+ error
+ end.
diff --git a/lib/kernel/src/user_drv.erl b/lib/kernel/src/user_drv.erl
index e6ce85c379..d3deca3a20 100644
--- a/lib/kernel/src/user_drv.erl
+++ b/lib/kernel/src/user_drv.erl
@@ -133,8 +133,9 @@ server1(Iport, Oport, Shell) ->
flatten(io_lib:format("~ts\n",
[erlang:system_info(system_version)]))},
Iport, Oport),
+
%% Enter the server loop.
- server_loop(Iport, Oport, Curr, User, Gr, queue:new()).
+ server_loop(Iport, Oport, Curr, User, Gr, {false, queue:new()}).
rem_sh_opts(Node) ->
[{expand_fun,fun(B)-> rpc:call(Node,edlin_expand,expand,[B]) end}].
@@ -164,7 +165,7 @@ server_loop(Iport, Oport, User, Gr, IOQueue) ->
put(current_group, Curr),
server_loop(Iport, Oport, Curr, User, Gr, IOQueue).
-server_loop(Iport, Oport, Curr, User, Gr, IOQueue) ->
+server_loop(Iport, Oport, Curr, User, Gr, {Resp, IOQ} = IOQueue) ->
receive
{Iport,{data,Bs}} ->
BsBin = list_to_binary(Bs),
@@ -181,9 +182,9 @@ server_loop(Iport, Oport, Curr, User, Gr, IOQueue) ->
{Oport,ok} ->
%% We get this ok from the port, in io_request we store
%% info about where to send reply at head of queue
- {{value,{Origin,Reply}},ReplyQ} = queue:out(IOQueue),
+ {Origin,Reply} = Resp,
Origin ! {reply,Reply},
- NewQ = handle_req(next, Iport, Oport, ReplyQ),
+ NewQ = handle_req(next, Iport, Oport, {false, IOQ}),
server_loop(Iport, Oport, Curr, User, Gr, NewQ);
{'EXIT',Iport,_R} ->
server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
@@ -237,28 +238,30 @@ handle_req({Curr,get_unicode_state},Iport,_Oport,IOQueue) ->
handle_req({Curr,set_unicode_state, Bool},Iport,_Oport,IOQueue) ->
Curr ! {self(),set_unicode_state,set_unicode_state(Iport,Bool)},
IOQueue;
-handle_req(next,Iport,Oport,IOQueue) ->
- case queue:out(IOQueue) of
- {{value,Next},ExecQ} ->
- NewQ = handle_req(Next,Iport,Oport,queue:new()),
- queue:join(NewQ,ExecQ);
+handle_req(next,Iport,Oport,{false,IOQ}=IOQueue) ->
+ case queue:out(IOQ) of
{empty,_} ->
- IOQueue
- end;
-handle_req(Msg,Iport,Oport,IOQueue) ->
- case queue:peek(IOQueue) of
- empty ->
- {Origin,Req} = Msg,
+ IOQueue;
+ {{value,{Origin,Req}},ExecQ} ->
case io_request(Req, Iport, Oport) of
- ok -> IOQueue;
+ ok ->
+ handle_req(next,Iport,Oport,{false,ExecQ});
Reply ->
- %% Push reply info to front of queue
- queue:in_r({Origin,Reply},IOQueue)
- end;
- _Else ->
- %% All requests are queued when we have outstanding sync put_chars
- queue:in(Msg,IOQueue)
- end.
+ {{Origin,Reply}, ExecQ}
+ end
+ end;
+handle_req(Msg,Iport,Oport,{false,IOQ}=IOQueue) ->
+ empty = queue:peek(IOQ),
+ {Origin,Req} = Msg,
+ case io_request(Req, Iport, Oport) of
+ ok ->
+ IOQueue;
+ Reply ->
+ {{Origin,Reply}, IOQ}
+ end;
+handle_req(Msg,_Iport,_Oport,{Resp, IOQ}) ->
+ %% All requests are queued when we have outstanding sync put_chars
+ {Resp, queue:in(Msg,IOQ)}.
%% port_bytes(Bytes, InPort, OutPort, CurrentProcess, UserProcess, Group)
%% Check the Bytes from the port to see if it contains a ^G. If so,
@@ -315,6 +318,9 @@ handle_escape(Iport, Oport, User, Gr, IOQueue) ->
_ -> % {ok,jcl} | undefined
io_request({put_chars,unicode,"\nUser switch command\n"}, Iport, Oport),
+ %% init edlin used by switch command and have it copy the
+ %% text buffer from current group process
+ edlin:init(gr_cur_pid(Gr)),
server_loop(Iport, Oport, User, switch_loop(Iport, Oport, Gr), IOQueue)
end.
diff --git a/lib/kernel/test/Makefile b/lib/kernel/test/Makefile
index f1b8a105ed..ef351a25fb 100644
--- a/lib/kernel/test/Makefile
+++ b/lib/kernel/test/Makefile
@@ -77,7 +77,8 @@ MODULES= \
ignore_cores \
zlib_SUITE \
loose_node \
- sendfile_SUITE
+ sendfile_SUITE \
+ standard_error_SUITE
APP_FILES = \
appinc.app \
diff --git a/lib/kernel/test/application_SUITE.erl b/lib/kernel/test/application_SUITE.erl
index 4901206c8e..59efe85480 100644
--- a/lib/kernel/test/application_SUITE.erl
+++ b/lib/kernel/test/application_SUITE.erl
@@ -2699,10 +2699,7 @@ node_names(Names, Config) ->
node_name(Name, Config) ->
U = "_",
- {{Y,M,D}, {H,Min,S}} = calendar:now_to_local_time(now()),
- Date = io_lib:format("~4w_~2..0w_~2..0w__~2..0w_~2..0w_~2..0w",
- [Y,M,D, H,Min,S]),
- L = lists:flatten(Date),
+ L = integer_to_list(erlang:unique_integer([positive])),
lists:concat([Name,U,?testcase,U,U,L]).
stop_node_nice(Node) when is_atom(Node) ->
diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl
index afedc17e57..c82aaf0582 100644
--- a/lib/kernel/test/code_SUITE.erl
+++ b/lib/kernel/test/code_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1396,8 +1396,9 @@ on_load_binary(_) ->
{tuple,6,[{atom,6,Mod},{call,6,{atom,6,self},[]}]}},
{'receive',7,[{clause,8,[{atom,8,go}],[],[{atom,8,ok}]}]}]}]},
{function,11,ok,0,[{clause,11,[],[],[{atom,11,true}]}]}],
- {ok,Mod,Bin} = compile:forms(Forms, [report]),
- [io:put_chars(erl_pp:form(F)) || F <- Forms],
+ Forms1 = erl_parse:new_anno(Forms),
+ {ok,Mod,Bin} = compile:forms(Forms1, [report]),
+ [io:put_chars(erl_pp:form(F)) || F <- Forms1],
{Pid1,Ref1} = spawn_monitor(fun() ->
code:load_binary(Mod, File, Bin),
@@ -1653,9 +1654,7 @@ get_mode(Config) when is_list(Config) ->
init(Tester) ->
{ok, Tester}.
-handle_event({error, _GL, {emulator, _, _}}, Tester) ->
- {ok, Tester};
-handle_event({error, _GL, Msg}, Tester) ->
+handle_event({warning_msg, _GL, Msg}, Tester) ->
Tester ! Msg,
{ok, Tester};
handle_event(_Event, State) ->
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index 9cccdab76b..76564d4b0e 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,7 +26,8 @@
-export([tick/1, tick_change/1, illegal_nodenames/1, hidden_node/1,
table_waste/1, net_setuptime/1,
-
+ inet_dist_options_options/1,
+
monitor_nodes_nodedown_reason/1,
monitor_nodes_complex_nodedown_reason/1,
monitor_nodes_node_type/1,
@@ -38,7 +39,8 @@
monitor_nodes_many/1]).
%% Performs the test at another node.
--export([tick_cli_test/1, tick_cli_test1/1,
+-export([get_socket_priorities/0,
+ tick_cli_test/1, tick_cli_test1/1,
tick_serv_test/2, tick_serv_test1/1,
keep_conn/1, time_ping/1]).
@@ -62,7 +64,8 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[tick, tick_change, illegal_nodenames, hidden_node,
- table_waste, net_setuptime, {group, monitor_nodes}].
+ table_waste, net_setuptime, inet_dist_options_options,
+ {group, monitor_nodes}].
groups() ->
[{monitor_nodes, [],
@@ -232,11 +235,10 @@ do_test_setuptime(Setuptime) when is_list(Setuptime) ->
Res.
time_ping(Node) ->
- T0 = erlang:now(),
+ T0 = erlang:monotonic_time(),
pang = net_adm:ping(Node),
- T1 = erlang:now(),
- time_diff(T0,T1).
-
+ T1 = erlang:monotonic_time(),
+ erlang:convert_time_unit(T1 - T0, native, milli_seconds).
%% Keep the connection with the client node up.
%% This is neccessary as the client node runs with much shorter
@@ -273,13 +275,15 @@ tick_cli_test1(Node) ->
erlang:monitor_node(Node, true),
sleep(2),
rpc:call(Node, erlang, time, []), %% simulate action on the connection
- T1 = now(),
+ T1 = erlang:monotonic_time(),
receive
{nodedown, Node} ->
- T2 = now(),
+ T2 = erlang:monotonic_time(),
receive
{whats_the_result, From} ->
- case time_diff(T1, T2) of
+ Diff = erlang:convert_time_unit(T2-T1, native,
+ milli_seconds),
+ case Diff of
T when T > 8000, T < 16000 ->
From ! {tick_test, T};
T ->
@@ -554,6 +558,71 @@ check_monitor_nodes_res(Pid, Node) ->
end.
+
+inet_dist_options_options(suite) -> [];
+inet_dist_options_options(doc) ->
+ ["Check the kernel inet_dist_{listen,connect}_options options"];
+inet_dist_options_options(Config) when is_list(Config) ->
+ Prio = 1,
+ case gen_udp:open(0, [{priority,Prio}]) of
+ {ok,Socket} ->
+ case inet:getopts(Socket, [priority]) of
+ {ok,[{priority,Prio}]} ->
+ ok = gen_udp:close(Socket),
+ do_inet_dist_options_options(Prio);
+ _ ->
+ ok = gen_udp:close(Socket),
+ {skip,
+ "Can not set priority "++integer_to_list(Prio)++
+ " on socket"}
+ end;
+ {error,_} ->
+ {skip, "Can not set priority on socket"}
+ end.
+
+do_inet_dist_options_options(Prio) ->
+ PriorityString0 = "[{priority,"++integer_to_list(Prio)++"}]",
+ PriorityString =
+ case os:cmd("echo [{a,1}]") of
+ "[{a,1}]"++_ ->
+ PriorityString0;
+ _ ->
+ %% Some shells need quoting of [{}]
+ "'"++PriorityString0++"'"
+ end,
+ InetDistOptions =
+ "-hidden "
+ "-kernel inet_dist_connect_options "++PriorityString++" "
+ "-kernel inet_dist_listen_options "++PriorityString,
+ ?line {ok,Node1} =
+ start_node(inet_dist_options_1, InetDistOptions),
+ ?line {ok,Node2} =
+ start_node(inet_dist_options_2, InetDistOptions),
+ %%
+ ?line pong =
+ rpc:call(Node1, net_adm, ping, [Node2]),
+ ?line PrioritiesNode1 =
+ rpc:call(Node1, ?MODULE, get_socket_priorities, []),
+ ?line PrioritiesNode2 =
+ rpc:call(Node2, ?MODULE, get_socket_priorities, []),
+ ?line ?t:format("PrioritiesNode1 = ~p", [PrioritiesNode1]),
+ ?line ?t:format("PrioritiesNode2 = ~p", [PrioritiesNode2]),
+ ?line Elevated = [P || P <- PrioritiesNode1, P =:= Prio],
+ ?line Elevated = [P || P <- PrioritiesNode2, P =:= Prio],
+ ?line [_|_] = Elevated,
+ %%
+ ?line stop_node(Node2),
+ ?line stop_node(Node1),
+ ok.
+
+get_socket_priorities() ->
+ [Priority ||
+ {ok,[{priority,Priority}]} <-
+ [inet:getopts(Port, [priority]) ||
+ Port <- erlang:ports(),
+ element(2, erlang:port_info(Port, name)) =:= "tcp_inet"]].
+
+
%%
%% Testcase:
@@ -1140,19 +1209,6 @@ print_my_messages() ->
?line ?t:format("Messages: ~p~n", [Messages]),
?line ok.
-%% Time difference in milliseconds !!
-time_diff({TimeM, TimeS, TimeU}, {CurM, CurS, CurU}) when CurM > TimeM ->
- ((CurM - TimeM) * 1000000000) + sec_diff({TimeS, TimeU}, {CurS, CurU});
-time_diff({_, TimeS, TimeU}, {_, CurS, CurU}) ->
- sec_diff({TimeS, TimeU}, {CurS, CurU}).
-
-sec_diff({TimeS, TimeU}, {CurS, CurU}) when CurS > TimeS ->
- ((CurS - TimeS) * 1000) + micro_diff(TimeU, CurU);
-sec_diff({_, TimeU}, {_, CurU}) ->
- micro_diff(TimeU, CurU).
-
-micro_diff(TimeU, CurU) ->
- trunc(CurU/1000) - trunc(TimeU/1000).
sleep(T) -> receive after T * 1000 -> ok end.
@@ -1199,16 +1255,12 @@ get_nodenames(N, T) ->
get_nodenames(0, _, Acc) ->
Acc;
get_nodenames(N, T, Acc) ->
- {A, B, C} = now(),
+ U = erlang:unique_integer([positive]),
get_nodenames(N-1, T, [list_to_atom(atom_to_list(T)
++ "-"
- ++ atom_to_list(?MODULE)
+ ++ ?MODULE_STRING
++ "-"
- ++ integer_to_list(A)
- ++ "-"
- ++ integer_to_list(B)
- ++ "-"
- ++ integer_to_list(C)) | Acc]).
+ ++ integer_to_list(U)) | Acc]).
get_numbered_nodenames(N, T) ->
get_numbered_nodenames(N, T, []).
@@ -1216,16 +1268,12 @@ get_numbered_nodenames(N, T) ->
get_numbered_nodenames(0, _, Acc) ->
Acc;
get_numbered_nodenames(N, T, Acc) ->
- {A, B, C} = now(),
+ U = erlang:unique_integer([positive]),
NL = [list_to_atom(atom_to_list(T) ++ integer_to_list(N)
++ "-"
- ++ atom_to_list(?MODULE)
- ++ "-"
- ++ integer_to_list(A)
- ++ "-"
- ++ integer_to_list(B)
+ ++ ?MODULE_STRING
++ "-"
- ++ integer_to_list(C)) | Acc],
+ ++ integer_to_list(U)) | Acc],
get_numbered_nodenames(N-1, T, NL).
wait_until(Fun) ->
diff --git a/lib/kernel/test/erl_distribution_wb_SUITE.erl b/lib/kernel/test/erl_distribution_wb_SUITE.erl
index 3b8b2d9150..8e2bbf5b64 100644
--- a/lib/kernel/test/erl_distribution_wb_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_wb_SUITE.erl
@@ -451,11 +451,8 @@ close_pair({Client, Server}) ->
%% MD5 hashing
%%
-%% This is no proper random number, but that is not really important in
-%% this test
gen_challenge() ->
- {_,_,N} = erlang:now(),
- N.
+ rand:uniform(1000000).
%% Generate a message digest from Challenge number and Cookie
gen_digest(Challenge, Cookie) when is_integer(Challenge), is_atom(Cookie) ->
@@ -712,13 +709,9 @@ get_nodenames(N, T) ->
get_nodenames(0, _, Acc) ->
Acc;
get_nodenames(N, T, Acc) ->
- {A, B, C} = now(),
- get_nodenames(N-1, T, [list_to_atom(atom_to_list(?MODULE)
+ U = erlang:unique_integer([positive]),
+ get_nodenames(N-1, T, [list_to_atom(?MODULE_STRING
++ "-"
++ atom_to_list(T)
++ "-"
- ++ integer_to_list(A)
- ++ "-"
- ++ integer_to_list(B)
- ++ "-"
- ++ integer_to_list(C)) | Acc]).
+ ++ integer_to_list(U)) | Acc]).
diff --git a/lib/kernel/test/error_logger_SUITE.erl b/lib/kernel/test/error_logger_SUITE.erl
index 05bf5aae18..1c2e56f083 100644
--- a/lib/kernel/test/error_logger_SUITE.erl
+++ b/lib/kernel/test/error_logger_SUITE.erl
@@ -32,7 +32,7 @@
error_report/1, info_report/1, error/1, info/1,
emulator/1, tty/1, logfile/1, add/1, delete/1]).
--export([generate_error/0]).
+-export([generate_error/2]).
-export([init/1,
handle_event/2, handle_call/2, handle_info/2,
@@ -210,13 +210,16 @@ emulator(suite) -> [];
emulator(doc) -> [];
emulator(Config) when is_list(Config) ->
?line error_logger:add_report_handler(?MODULE, self()),
- spawn(?MODULE, generate_error, []),
- reported(emulator),
+ Msg = "Error in process ~p on node ~p with exit value:~n~p~n",
+ Error = {badmatch,4},
+ Stack = [{module, function, 2, []}],
+ Pid = spawn(?MODULE, generate_error, [Error, Stack]),
+ reported(error, Msg, [Pid, node(), {Error, Stack}]),
?line my_yes = error_logger:delete_report_handler(?MODULE),
ok.
-generate_error() ->
- erlang:error({badmatch,4}).
+generate_error(Error, Stack) ->
+ erlang:raise(error, Error, Stack).
%%-----------------------------------------------------------------
%% We don't enables or disables tty error logging here. We do not
@@ -283,15 +286,6 @@ reported(Tag, Type, Report) ->
test_server:fail(no_report_received)
end.
-reported(emulator) ->
- receive
- {error, "~s~n", String} when is_list(String) ->
- test_server:messages_get(),
- ok
- after 1000 ->
- test_server:fail(no_report_received)
- end.
-
%%-----------------------------------------------------------------
%% The error_logger handler (gen_event behaviour).
%% Sends a notification to the Tester process about the events
diff --git a/lib/kernel/test/error_logger_warn_SUITE.erl b/lib/kernel/test/error_logger_warn_SUITE.erl
index 2bf467610e..fb576d77a3 100644
--- a/lib/kernel/test/error_logger_warn_SUITE.erl
+++ b/lib/kernel/test/error_logger_warn_SUITE.erl
@@ -21,8 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- basic/1,warnings_info/1,warnings_warnings/1,
- rb_basic/1,rb_warnings_info/1,rb_warnings_warnings/1,
+ basic/1,warnings_info/1,warnings_errors/1,
+ rb_basic/1,rb_warnings_info/1,rb_warnings_errors/1,
rb_trunc/1,rb_utc/1,file_utc/1]).
%% Internal exports.
@@ -48,8 +48,8 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [basic, warnings_info, warnings_warnings, rb_basic,
- rb_warnings_info, rb_warnings_warnings, rb_trunc,
+ [basic, warnings_info, warnings_errors, rb_basic,
+ rb_warnings_info, rb_warnings_errors, rb_trunc,
rb_utc, file_utc].
groups() ->
@@ -88,11 +88,11 @@ warnings_info(Config) when is_list(Config) ->
put(elw_config,Config),
warnings_info().
-warnings_warnings(doc) ->
- ["Tests mapping warnings to warnings functionality"];
-warnings_warnings(Config) when is_list(Config) ->
+warnings_errors(doc) ->
+ ["Tests mapping warnings to errors functionality"];
+warnings_errors(Config) when is_list(Config) ->
put(elw_config,Config),
- warnings_warnings().
+ warnings_errors().
rb_basic(doc) ->
["Tests basic rb functionality"];
@@ -106,11 +106,11 @@ rb_warnings_info(Config) when is_list(Config) ->
put(elw_config,Config),
rb_warnings_info().
-rb_warnings_warnings(doc) ->
- ["Tests warnings as warnings rb functionality"];
-rb_warnings_warnings(Config) when is_list(Config) ->
+rb_warnings_errors(doc) ->
+ ["Tests warnings as errors rb functionality"];
+rb_warnings_errors(Config) when is_list(Config) ->
put(elw_config,Config),
- rb_warnings_warnings().
+ rb_warnings_errors().
rb_trunc(doc) ->
["Tests rb functionality on truncated data"];
@@ -159,6 +159,9 @@ install_relay(Node) ->
rpc:call(Node,error_logger,add_report_handler,[?MODULE,[self()]]).
+warning_map(Node) ->
+ rpc:call(Node,error_logger,warning_map,[]).
+
format(Node,A,B) ->
rpc:call(Node,error_logger,format,[A,B]).
error_msg(Node,A,B) ->
@@ -185,19 +188,20 @@ basic() ->
?line ok = install_relay(Node),
?line Self = self(),
?line GL = group_leader(),
+ ?line warning = warning_map(Node),
?line format(Node,"~p~n",[Self]),
?line ?EXPECT({handle_event,{error,GL,{_,"~p~n",[Self]}}}),
?line error_msg(Node,"~p~n",[Self]),
?line ?EXPECT({handle_event,{error,GL,{_,"~p~n",[Self]}}}),
?line warning_msg(Node,"~p~n",[Self]),
- ?line ?EXPECT({handle_event,{error,GL,{_,"~p~n",[Self]}}}),
+ ?line ?EXPECT({handle_event,{warning_msg,GL,{_,"~p~n",[Self]}}}),
?line info_msg(Node,"~p~n",[Self]),
?line ?EXPECT({handle_event,{info_msg,GL,{_,"~p~n",[Self]}}}),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line error_report(Node,Report),
?line ?EXPECT({handle_event,{error_report,GL,{_,std_error,Report}}}),
?line warning_report(Node,Report),
- ?line ?EXPECT({handle_event,{error_report,GL,{_,std_error,Report}}}),
+ ?line ?EXPECT({handle_event,{warning_report,GL,{_,std_warning,Report}}}),
?line info_report(Node,Report),
?line ?EXPECT({handle_event,{info_report,GL,{_,std_info,Report}}}),
@@ -209,6 +213,7 @@ warnings_info() ->
?line ok = install_relay(Node),
?line Self = self(),
?line GL = group_leader(),
+ ?line info = warning_map(Node),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line warning_msg(Node,"~p~n",[Self]),
?line ?EXPECT({handle_event,{info_msg,GL,{_,"~p~n",[Self]}}}),
@@ -217,16 +222,17 @@ warnings_info() ->
?line stop_node(Node),
ok.
-warnings_warnings() ->
- ?line Node = start_node(nn(),"+Ww"),
+warnings_errors() ->
+ ?line Node = start_node(nn(),"+We"),
?line ok = install_relay(Node),
?line Self = self(),
?line GL = group_leader(),
+ ?line error = warning_map(Node),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line warning_msg(Node,"~p~n",[Self]),
- ?line ?EXPECT({handle_event,{warning_msg,GL,{_,"~p~n",[Self]}}}),
+ ?line ?EXPECT({handle_event,{error,GL,{_,"~p~n",[Self]}}}),
?line warning_report(Node,Report),
- ?line ?EXPECT({handle_event,{warning_report,GL,{_,std_warning,Report}}}),
+ ?line ?EXPECT({handle_event,{error_report,GL,{_,std_error,Report}}}),
?line stop_node(Node),
ok.
@@ -356,6 +362,7 @@ rb_basic() ->
"error_logger_mf_maxfiles 5"),
?line Self = self(),
?line GL = group_leader(),
+ ?line warning = warning_map(Node),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line fake_gl(Node,warning_msg,"~p~n",[Self]),
?line fake_gl(Node,warning_report,Report),
@@ -363,10 +370,14 @@ rb_basic() ->
?line application:start(sasl),
?line rb:start([{report_dir, rd()}]),
?line rb:list(),
- ?line true = (one_rb_lines([error]) > 1),
- ?line true = (one_rb_lines([error_report]) > 1),
- ?line 1 = one_rb_findstr([error],pid_to_list(Self)),
- ?line 1 = one_rb_findstr([error_report],pid_to_list(Self)),
+ ?line true = (one_rb_lines([error]) =:= 0),
+ ?line true = (one_rb_lines([error_report]) =:= 0),
+ ?line 0 = one_rb_findstr([error],pid_to_list(Self)),
+ ?line 0 = one_rb_findstr([error_report],pid_to_list(Self)),
+ ?line 1 = one_rb_findstr([warning_msg],pid_to_list(Self)),
+ ?line 1 = one_rb_findstr([warning_report],pid_to_list(Self)),
+ ?line 0 = one_rb_findstr([info_msg],pid_to_list(Self)),
+ ?line 0 = one_rb_findstr([info_report],pid_to_list(Self)),
?line 2 = one_rb_findstr([],pid_to_list(Self)),
?line true = (one_rb_findstr([progress],"===") > 4),
?line rb:stop(),
@@ -381,6 +392,7 @@ rb_warnings_info() ->
"error_logger_mf_maxfiles 5"),
?line Self = self(),
?line GL = group_leader(),
+ ?line info = warning_map(Node),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line fake_gl(Node,warning_msg,"~p~n",[Self]),
?line fake_gl(Node,warning_report,Report),
@@ -403,13 +415,14 @@ rb_warnings_info() ->
?line stop_node(Node),
ok.
-rb_warnings_warnings() ->
+rb_warnings_errors() ->
?line clean_rd(),
- ?line Node = start_node(nn(),"+W w -boot start_sasl -sasl error_logger_mf_dir "++
+ ?line Node = start_node(nn(),"+W e -boot start_sasl -sasl error_logger_mf_dir "++
quote(rd())++" error_logger_mf_maxbytes 5000 "
"error_logger_mf_maxfiles 5"),
?line Self = self(),
?line GL = group_leader(),
+ ?line error = warning_map(Node),
?line Report = [{self,Self},{gl,GL},make_ref()],
?line fake_gl(Node,warning_msg,"~p~n",[Self]),
?line fake_gl(Node,warning_report,Report),
@@ -417,12 +430,12 @@ rb_warnings_warnings() ->
?line application:start(sasl),
?line rb:start([{report_dir, rd()}]),
?line rb:list(),
- ?line true = (one_rb_lines([error]) =:= 0),
- ?line true = (one_rb_lines([error_report]) =:= 0),
- ?line 0 = one_rb_findstr([error],pid_to_list(Self)),
- ?line 0 = one_rb_findstr([error_report],pid_to_list(Self)),
- ?line 1 = one_rb_findstr([warning_msg],pid_to_list(Self)),
- ?line 1 = one_rb_findstr([warning_report],pid_to_list(Self)),
+ ?line true = (one_rb_lines([error]) > 1),
+ ?line true = (one_rb_lines([error_report]) > 1),
+ ?line 1 = one_rb_findstr([error],pid_to_list(Self)),
+ ?line 1 = one_rb_findstr([error_report],pid_to_list(Self)),
+ ?line 0 = one_rb_findstr([warning_msg],pid_to_list(Self)),
+ ?line 0 = one_rb_findstr([warning_report],pid_to_list(Self)),
?line 0 = one_rb_findstr([info_msg],pid_to_list(Self)),
?line 0 = one_rb_findstr([info_report],pid_to_list(Self)),
?line 2 = one_rb_findstr([],pid_to_list(Self)),
@@ -434,7 +447,7 @@ rb_warnings_warnings() ->
rb_trunc() ->
?line clean_rd(),
- ?line Node = start_node(nn(),"+W w -boot start_sasl -sasl error_logger_mf_dir "++
+ ?line Node = start_node(nn(),"-boot start_sasl -sasl error_logger_mf_dir "++
quote(rd())++" error_logger_mf_maxbytes 5000 "
"error_logger_mf_maxfiles 5"),
?line Self = self(),
@@ -467,7 +480,7 @@ rb_trunc() ->
rb_utc() ->
?line clean_rd(),
- ?line Node = start_node(nn(),"+W w -boot start_sasl -sasl error_logger_mf_dir "++
+ ?line Node = start_node(nn(),"-boot start_sasl -sasl error_logger_mf_dir "++
quote(rd())++" error_logger_mf_maxbytes 5000 "
"error_logger_mf_maxfiles 5 -sasl utc_log true"),
?line Self = self(),
@@ -500,7 +513,7 @@ rb_utc() ->
file_utc() ->
?line file:delete(lf()),
- ?line SS="+W w -stdlib utc_log true -kernel error_logger "++ oquote("{file,"++iquote(lf())++"}"),
+ ?line SS="-stdlib utc_log true -kernel error_logger "++ oquote("{file,"++iquote(lf())++"}"),
%erlang:display(SS),
?line Node = start_node(nn(),SS),
%erlang:display(rpc:call(Node,application,get_env,[kernel,error_logger])),
diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl
index 56c35678b6..48abc92e4c 100644
--- a/lib/kernel/test/file_SUITE.erl
+++ b/lib/kernel/test/file_SUITE.erl
@@ -93,6 +93,8 @@
-export([old_io_protocol/1]).
+-export([unicode_mode/1]).
+
%% Debug exports
-export([create_file_slow/2, create_file/2, create_bin/2]).
-export([verify_file/2, verify_bin/3]).
@@ -105,6 +107,7 @@
-include_lib("test_server/include/test_server.hrl").
-include_lib("kernel/include/file.hrl").
+-define(THROW_ERROR(RES), throw({fail, ?LINE, RES})).
suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -116,7 +119,9 @@ all() ->
delayed_write, read_ahead, segment_read, segment_write,
ipread, pid2name, interleaved_read_write, otp_5814, otp_10852,
large_file, large_write, read_line_1, read_line_2, read_line_3,
- read_line_4, standard_io, old_io_protocol].
+ read_line_4, standard_io, old_io_protocol,
+ unicode_mode
+ ].
groups() ->
[{dirs, [], [make_del_dir, cur_dir_0, cur_dir_1,
@@ -347,7 +352,152 @@ old_io_protocol(Config) when is_list(Config) ->
[] = flush(),
ok.
+unicode_mode(suite) -> [];
+unicode_mode(doc) -> [""];
+unicode_mode(Config) ->
+ Dir = {dir, ?config(priv_dir,Config)},
+ OptVariants = [[Dir],
+ [Dir, {encoding, utf8}],
+ [Dir, binary],
+ [Dir, binary, {encoding, utf8}]
+ ],
+ ReadVariants = [{read, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read(Fd1, 1024) end) end},
+ {read_line, fun(Fd) -> um_read(Fd, fun(Fd1) -> file:read_line(Fd1) end) end}
+ %%{pread, fun(Fd) -> file:pread(Fd, 0, 1024) end},
+ %%{preadl, fun(Fd) -> file:pread(Fd, [{0, 1024}]) end},
+ ],
+
+ _ = [read_write_0("ASCII: list: Hello World", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0("LATIN1: list: åäöÅÄÖ", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0(<<"ASCII: bin: Hello World">>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ _ = [read_write_0(<<"LATIN1: bin: åäöÅÄÖ">>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ %% These will be double encoded if option is encoding utf-8
+ _ = [read_write_0(<<"UTF8: bin: Ωß"/utf8>>, Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ %% These should not work (with encoding set to utf-8)
+ %% according to file's documentation
+ _ = [read_write_0("UTF8: list: Ωß", Read, Opt) ||
+ Opt <- OptVariants, Read <- ReadVariants],
+ ok.
+
+read_write_0(Str, {Func, ReadFun}, Options) ->
+ try
+ Res = read_write_1(Str, ReadFun, Options),
+ io:format("~p: ~ts ~p '~p'~n", [Func, Str, tl(Options), Res]),
+ ok
+ catch {fail, Line, ReadBytes = [_|_]} ->
+ io:format("~p:~p: ~p ERROR: ~w vs~n ~w~n - ~p~n",
+ [?MODULE, Line, Func, Str, ReadBytes, Options]),
+ exit({error, ?LINE});
+ {fail, Line, ReadBytes} ->
+ io:format("~p:~p: ~p ERROR: ~ts vs~n ~w~n - ~p~n",
+ [?MODULE, Line, Func, Str, ReadBytes, Options]),
+ exit({error, ?LINE});
+ error:What ->
+ io:format("~p:??: ~p ERROR: ~p from~n ~w~n ~p~n",
+ [?MODULE, Func, What, Str, Options]),
+
+ io:format("\t~p~n", [erlang:get_stacktrace()]),
+ exit({error, ?LINE})
+ end.
+
+read_write_1(Str0, ReadFun, [{dir,Dir}|Options]) ->
+ File = um_filename(Str0, Dir, Options),
+ Pre = "line 1\n", Post = "\nlast line\n",
+ Str = case is_list(Str0) andalso lists:max(Str0) > 255 of
+ false -> %% Normal case Use options
+ {ok, FdW} = file:open(File, [write|Options]),
+ IO = [Pre, Str0, Post],
+ ok = file:write(FdW, IO),
+ case is_binary(Str0) of
+ true -> iolist_to_binary(IO);
+ false -> lists:append(IO)
+ end;
+ true -> %% Test unicode lists
+ {ok, FdW} = file:open(File, [write]),
+ Utf8 = unicode:characters_to_binary([Pre, Str0, Post]),
+ file:write(FdW, Utf8),
+ {unicode, Utf8}
+ end,
+ file:close(FdW),
+ {ok, FdR} = file:open(File, [read|Options]),
+ ReadRes = ReadFun(FdR),
+ file:close(FdR),
+ Res = um_check(Str, ReadRes, Options),
+ file:delete(File),
+ Res.
+
+
+um_read(Fd, Fun) ->
+ um_read(Fd, Fun, []).
+
+um_read(Fd, Fun, Acc) ->
+ case Fun(Fd) of
+ eof ->
+ case is_binary(hd(Acc)) of
+ true -> {ok, iolist_to_binary(lists:reverse(Acc))};
+ false -> {ok, lists:append(lists:reverse(Acc))}
+ end;
+ {ok, Data} ->
+ um_read(Fd, Fun, [Data|Acc]);
+ Error ->
+ Error
+ end.
+
+um_check(Str, {ok, Str}, _) -> ok;
+um_check(Bin, {ok, Res}, _Options) when is_binary(Bin), is_list(Res) ->
+ case list_to_binary(Res) of
+ Bin -> ok;
+ _ -> ?THROW_ERROR(Res)
+ end;
+um_check(Str, {ok, Res}, _Options) when is_list(Str), is_binary(Res) ->
+ case iolist_to_binary(Str) of
+ Res -> ok;
+ _ -> ?THROW_ERROR(Res)
+ end;
+um_check({unicode, Utf8Bin}, Res, Options) ->
+ um_check_unicode(Utf8Bin, Res,
+ proplists:get_value(binary, Options, false),
+ proplists:get_value(encoding, Options, none));
+um_check(_Str, Res, _Options) ->
+ ?THROW_ERROR(Res).
+
+um_check_unicode(Utf8Bin, {ok, Utf8Bin}, true, none) ->
+ ok;
+um_check_unicode(Utf8Bin, {ok, List = [_|_]}, false, none) ->
+ case binary_to_list(Utf8Bin) == List of
+ true -> ok;
+ false -> ?THROW_ERROR(List)
+ end;
+um_check_unicode(_Utf8Bin, {error, {no_translation, unicode, latin1}}, _, _) ->
+ no_translation;
+um_check_unicode(_Utf8Bin, Error = {error, _}, _, _Unicode) ->
+ ?THROW_ERROR(Error);
+um_check_unicode(_Utf8Bin, {ok, _ListOrBin}, _, _UTF8_) ->
+ %% List = if is_binary(ListOrBin) -> unicode:characters_to_list(ListOrBin);
+ %% true -> ListOrBin
+ %% end,
+ %% io:format("In: ~w~n", [binary_to_list(Utf8Bin)]),
+ %% io:format("Ut: ~w~n", [List]),
+ ?THROW_ERROR({shoud_be, no_translation}).
+
+um_filename(Bin, Dir, Options) when is_binary(Bin) ->
+ um_filename(binary_to_list(Bin), Dir, Options);
+um_filename(Str = [_|_], Dir, Options) ->
+ Name = hd(string:tokens(Str, ":")),
+ Enc = atom_to_list(proplists:get_value(encoding, Options, latin1)),
+ File = case lists:member(binary, Options) of
+ true ->
+ "test_" ++ Name ++ "_bin_enc_" ++ Enc;
+ false ->
+ "test_" ++ Name ++ "_list_enc_" ++ Enc
+ end,
+ filename:join(Dir, File).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -424,7 +574,7 @@ make_del_dir(Config) when is_list(Config) ->
?line ok = ?FILE_MODULE:del_dir(NewDir),
?line {error, enoent} = ?FILE_MODULE:del_dir(NewDir),
% Make sure we are not in a directory directly under test_server
- % as that would result in eacess errors when trying to delere '..',
+ % as that would result in eacces errors when trying to delete '..',
% because there are processes having that directory as current.
?line ok = ?FILE_MODULE:make_dir(NewDir),
?line {ok,CurrentDir} = file:get_cwd(),
@@ -3764,7 +3914,7 @@ response_analysis(Module, Function, Arguments) ->
receive {Parent, start, Ts} -> ok end,
Stat =
iterate(response_stat(response_stat(init, Ts),
- erlang:now()),
+ micro_ts()),
done,
fun (S) ->
erlang:yield(),
@@ -3772,12 +3922,12 @@ response_analysis(Module, Function, Arguments) ->
{Parent, stop} ->
done
after 0 ->
- response_stat(S, erlang:now())
+ response_stat(S, micro_ts())
end
end),
- Parent ! {self(), stopped, response_stat(Stat, erlang:now())}
+ Parent ! {self(), stopped, response_stat(Stat, micro_ts())}
end),
- ?line Child ! {Parent, start, erlang:now()},
+ Child ! {Parent, start, micro_ts()},
?line Result = apply(Module, Function, Arguments),
?line Child ! {Parent, stop},
?line {N, Sum, _, M, Max} = receive {Child, stopped, X} -> X end,
@@ -3791,12 +3941,13 @@ response_analysis(Module, Function, Arguments) ->
[Mean_ms, Max_ms, M, (N-1)])),
?line {Result, Comment}.
-
+micro_ts() ->
+ erlang:monotonic_time(micro_seconds).
response_stat(init, Ts) ->
{0, 0, Ts, 0, 0};
-response_stat({N, Sum, {A1, B1, C1}, M, Max}, {A2, B2, C2} = Ts) ->
- D = C2-C1 + 1000000*((B2-B1) + 1000000*(A2-A1)),
+response_stat({N, Sum, Ts0, M, Max}, Ts) ->
+ D = Ts - Ts0,
if D > Max ->
{N+1, Sum+D, Ts, N, D};
true ->
diff --git a/lib/kernel/test/gen_tcp_api_SUITE.erl b/lib/kernel/test/gen_tcp_api_SUITE.erl
index c27d265550..4a527e2f51 100644
--- a/lib/kernel/test/gen_tcp_api_SUITE.erl
+++ b/lib/kernel/test/gen_tcp_api_SUITE.erl
@@ -32,6 +32,7 @@
t_connect_bad/1,
t_recv_timeout/1, t_recv_eof/1,
t_shutdown_write/1, t_shutdown_both/1, t_shutdown_error/1,
+ t_shutdown_async/1,
t_fdopen/1, t_fdconnect/1, t_implicit_inet6/1]).
-export([getsockfd/0,closesockfd/1]).
@@ -41,7 +42,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[{group, t_accept}, {group, t_connect}, {group, t_recv},
t_shutdown_write, t_shutdown_both, t_shutdown_error,
- t_fdopen, t_fdconnect, t_implicit_inet6].
+ t_shutdown_async, t_fdopen, t_fdconnect, t_implicit_inet6].
groups() ->
[{t_accept, [], [t_accept_timeout]},
@@ -155,7 +156,34 @@ t_shutdown_error(Config) when is_list(Config) ->
?line ok = gen_tcp:close(L),
?line {error, closed} = gen_tcp:shutdown(L, read_write),
ok.
-
+
+t_shutdown_async(Config) when is_list(Config) ->
+ ?line {OS, _} = os:type(),
+ ?line {ok, L} = gen_tcp:listen(0, [{sndbuf, 4096}]),
+ ?line {ok, Port} = inet:port(L),
+ ?line {ok, Client} = gen_tcp:connect(localhost, Port,
+ [{recbuf, 4096},
+ {active, false}]),
+ ?line {ok, S} = gen_tcp:accept(L),
+ ?line PayloadSize = 1024 * 1024,
+ ?line Payload = lists:duplicate(PayloadSize, $.),
+ ?line ok = gen_tcp:send(S, Payload),
+ ?line case erlang:port_info(S, queue_size) of
+ {queue_size, N} when N > 0 -> ok;
+ {queue_size, 0} when OS =:= win32 -> ok;
+ {queue_size, 0} = T -> ?t:fail({unexpected, T})
+ end,
+
+ ?line ok = gen_tcp:shutdown(S, write),
+ ?line {ok, Buf} = gen_tcp:recv(Client, PayloadSize),
+ ?line {error, closed} = gen_tcp:recv(Client, 0),
+ ?line case length(Buf) of
+ PayloadSize -> ok;
+ Sz -> ?t:fail({payload_size,
+ {expected, PayloadSize},
+ {received, Sz}})
+ end.
+
%%% gen_tcp:fdopen/2
diff --git a/lib/kernel/test/gen_tcp_misc_SUITE.erl b/lib/kernel/test/gen_tcp_misc_SUITE.erl
index 4e4aeb67e2..4f0d7a7d50 100644
--- a/lib/kernel/test/gen_tcp_misc_SUITE.erl
+++ b/lib/kernel/test/gen_tcp_misc_SUITE.erl
@@ -60,19 +60,19 @@ init_per_testcase(wrapping_oct, Config) when is_list(Config) ->
[{watchdog, Dog}|Config];
init_per_testcase(iter_max_socks, Config) when is_list(Config) ->
Dog = case os:type() of
- {win32,_} ->
- test_server:timetrap(test_server:minutes(30));
- _Else ->
- test_server:timetrap(test_server:seconds(240))
- end,
+ {win32,_} ->
+ test_server:timetrap(test_server:minutes(30));
+ _Else ->
+ test_server:timetrap(test_server:seconds(240))
+ end,
[{watchdog, Dog}|Config];
init_per_testcase(accept_system_limit, Config) when is_list(Config) ->
case os:type() of
- {ose,_} ->
- {skip,"Skip in OSE"};
- _ ->
- Dog = test_server:timetrap(test_server:seconds(240)),
- [{watchdog,Dog}|Config]
+ {ose,_} ->
+ {skip,"Skip in OSE"};
+ _ ->
+ Dog = test_server:timetrap(test_server:seconds(240)),
+ [{watchdog,Dog}|Config]
end;
init_per_testcase(wrapping_oct, Config) when is_list(Config) ->
Dog = test_server:timetrap(test_server:seconds(600)),
@@ -121,8 +121,6 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-
-
default_options(doc) ->
["Tests kernel application variables inet_default_listen_options and "
"inet_default_connect_options"];
@@ -130,69 +128,68 @@ default_options(suite) ->
[];
default_options(Config) when is_list(Config) ->
%% First check the delay_send option
- ?line {true,true,true}=do_delay_send_1(),
- ?line {false,false,false}=do_delay_send_2(),
- ?line {true,false,false}=do_delay_send_3(),
- ?line {false,false,false}=do_delay_send_4(),
- ?line {false,false,false}=do_delay_send_5(),
- ?line {false,true,true}=do_delay_send_6(),
+ {true,true,true}=do_delay_send_1(),
+ {false,false,false}=do_delay_send_2(),
+ {true,false,false}=do_delay_send_3(),
+ {false,false,false}=do_delay_send_4(),
+ {false,false,false}=do_delay_send_5(),
+ {false,true,true}=do_delay_send_6(),
%% Now lets start some nodes with different combinations of options:
- ?line {true,true,true} = do_delay_on_other_node("",
- fun do_delay_send_1/0),
- ?line {true,false,false} =
+ {true,true,true} = do_delay_on_other_node("", fun do_delay_send_1/0),
+ {true,false,false} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_2/0),
- ?line {false,true,true} =
+ {false,true,true} =
do_delay_on_other_node("-kernel inet_default_listen_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_2/0),
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_listen_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_3/0),
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_6/0),
- ?line {false,false,false} =
+ {false,false,false} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_5/0),
- ?line {false,true,true} =
+ {false,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{delay_send,true}]\" "
"-kernel inet_default_listen_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_5/0),
- ?line {true,false,false} =
+ {true,false,false} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{delay_send,true}]\" "
"-kernel inet_default_listen_options "
"\"[{delay_send,true}]\"",
fun do_delay_send_4/0),
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"{delay_send,true}\" "
"-kernel inet_default_listen_options "
"\"{delay_send,true}\"",
fun do_delay_send_2/0),
%% Active is to dangerous and is supressed
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"{active,false}\" "
"-kernel inet_default_listen_options "
"\"{active,false}\"",
fun do_delay_send_7/0),
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{active,false},{delay_send,true}]\" "
"-kernel inet_default_listen_options "
"\"[{active,false},{delay_send,true}]\"",
fun do_delay_send_7/0),
- ?line {true,true,true} =
+ {true,true,true} =
do_delay_on_other_node("-kernel inet_default_connect_options "
"\"[{active,false},{delay_send,true}]\" "
"-kernel inet_default_listen_options "
@@ -204,12 +201,10 @@ default_options(Config) when is_list(Config) ->
do_delay_on_other_node(XArgs, Function) ->
Dir = filename:dirname(code:which(?MODULE)),
{ok,Node} = test_server:start_node(test_default_options_slave,slave,
- [{args,"-pa " ++ Dir ++ " " ++
- XArgs}]),
+ [{args,"-pa " ++ Dir ++ " " ++ XArgs}]),
Res = rpc:call(Node,erlang,apply,[Function,[]]),
test_server:stop_node(Node),
Res.
-
do_delay_send_1() ->
{ok,LS}=gen_tcp:listen(0,[{delay_send,true}]),
@@ -301,8 +296,6 @@ do_delay_send_7() ->
gen_tcp:close(S),
gen_tcp:close(LS),
{B1,B2,B3}.
-
-
controlling_process(doc) ->
["Open a listen port and change controlling_process for it",
@@ -313,18 +306,18 @@ controlling_process(Config) when is_list(Config) ->
{ok,S} = gen_tcp:listen(0,[]),
Pid2 = spawn(?MODULE,not_owner,[S]),
Pid2 ! {self(),2,control},
- ?line {error, E} = receive {2,_E} ->
+ {error, E} = receive {2,_E} ->
_E
after 10000 -> timeout
end,
io:format("received ~p~n",[E]),
Pid = spawn(?MODULE,not_owner,[S]),
- ?line ok = gen_tcp:controlling_process(S,Pid),
+ ok = gen_tcp:controlling_process(S,Pid),
Pid ! {self(),1,control},
- ?line ok = receive {1,ok} ->
- ok
- after 1000 -> timeout
- end,
+ ok = receive {1,ok} ->
+ ok
+ after 1000 -> timeout
+ end,
Pid ! close.
not_owner(S) ->
@@ -377,7 +370,7 @@ no_accept(Config) when is_list(Config) ->
{tcp_closed, Client} ->
ok
after 5000 ->
- ?line test_server:fail(never_closed)
+ test_server:fail(never_closed)
end.
@@ -386,30 +379,30 @@ close_with_pending_output(doc) ->
"to the other end."];
close_with_pending_output(suite) -> [];
close_with_pending_output(Config) when is_list(Config) ->
- ?line {ok, L} = gen_tcp:listen(0, [binary, {active, false}]),
- ?line {ok, {_, Port}} = inet:sockname(L),
- ?line Packets = 16,
- ?line Total = 2048*Packets,
+ {ok, L} = gen_tcp:listen(0, [binary, {active, false}]),
+ {ok, {_, Port}} = inet:sockname(L),
+ Packets = 16,
+ Total = 2048*Packets,
case start_remote(close_pending) of
{ok, Node} ->
- ?line {ok, Host} = inet:gethostname(),
- ?line spawn_link(Node, ?MODULE, sender, [Port, Packets, Host]),
- ?line {ok, A} = gen_tcp:accept(L),
- ?line case gen_tcp:recv(A, Total) of
+ {ok, Host} = inet:gethostname(),
+ spawn_link(Node, ?MODULE, sender, [Port, Packets, Host]),
+ {ok, A} = gen_tcp:accept(L),
+ case gen_tcp:recv(A, Total) of
{ok, Bin} when byte_size(Bin) == Total ->
gen_tcp:close(A),
gen_tcp:close(L);
{ok, Bin} ->
- ?line test_server:fail({small_packet,
+ test_server:fail({small_packet,
byte_size(Bin)});
Error ->
- ?line test_server:fail({unexpected, Error})
+ test_server:fail({unexpected, Error})
end,
ok;
{error, no_remote_hosts} ->
{skipped,"No remote hosts"};
{error, Other} ->
- ?line ?t:fail({failed_to_start_slave_node, Other})
+ ?t:fail({failed_to_start_slave_node, Other})
end.
sender(Port, Packets, Host) ->
@@ -556,63 +549,62 @@ otp_3924(Config) when is_list(Config) ->
otp_3924_1(MaxDelay).
otp_3924_1(MaxDelay) ->
- ?line {ok, Node} = start_node(otp_3924),
- ?line DataLen = 100*1024,
- ?line Data = otp_3924_data(DataLen),
+ {ok, Node} = start_node(otp_3924),
+ DataLen = 100*1024,
+ Data = otp_3924_data(DataLen),
% Repeat the test a couple of times to prevent the test from passing
% by chance.
- repeat(10,
- fun (N) ->
- ?line ok = otp_3924(MaxDelay, Node, Data, DataLen, N)
- end),
- ?line test_server:stop_node(Node),
+ repeat(10, fun(N) ->
+ ok = otp_3924(MaxDelay, Node, Data, DataLen, N)
+ end),
+ test_server:stop_node(Node),
ok.
otp_3924(MaxDelay, Node, Data, DataLen, N) ->
- ?line {ok, L} = gen_tcp:listen(0, [list, {active, false}]),
- ?line {ok, {_, Port}} = inet:sockname(L),
- ?line {ok, Host} = inet:gethostname(),
- ?line Sender = spawn_link(Node,
- ?MODULE,
- otp_3924_sender,
- [self(), Host, Port, Data]),
- ?line Data = otp_3924_receive_data(L, Sender, MaxDelay, DataLen, N),
- ?line ok = gen_tcp:close(L).
+ {ok, L} = gen_tcp:listen(0, [list, {active, false}]),
+ {ok, {_, Port}} = inet:sockname(L),
+ {ok, Host} = inet:gethostname(),
+ Sender = spawn_link(Node,
+ ?MODULE,
+ otp_3924_sender,
+ [self(), Host, Port, Data]),
+ Data = otp_3924_receive_data(L, Sender, MaxDelay, DataLen, N),
+ ok = gen_tcp:close(L).
otp_3924_receive_data(LSock, Sender, MaxDelay, Len, N) ->
- ?line OP = process_flag(priority, max),
- ?line OTE = process_flag(trap_exit, true),
- ?line TimeoutRef = make_ref(),
- ?line Data = (catch begin
- ?line Sender ! start,
- ?line {ok, Sock} = gen_tcp:accept(LSock),
- ?line D = otp_3924_receive_data(Sock,
- TimeoutRef,
- MaxDelay,
- Len,
- [],
- 0),
- ?line ok = gen_tcp:close(Sock),
- D
- end),
- ?line unlink(Sender),
- ?line process_flag(trap_exit, OTE),
- ?line process_flag(priority, OP),
+ OP = process_flag(priority, max),
+ OTE = process_flag(trap_exit, true),
+ TimeoutRef = make_ref(),
+ Data = (catch begin
+ Sender ! start,
+ {ok, Sock} = gen_tcp:accept(LSock),
+ D = otp_3924_receive_data(Sock,
+ TimeoutRef,
+ MaxDelay,
+ Len,
+ [],
+ 0),
+ ok = gen_tcp:close(Sock),
+ D
+ end),
+ unlink(Sender),
+ process_flag(trap_exit, OTE),
+ process_flag(priority, OP),
receive
{'EXIT', _, TimeoutRef} ->
- ?line test_server:fail({close_not_fast_enough,MaxDelay,N});
+ test_server:fail({close_not_fast_enough,MaxDelay,N});
{'EXIT', Sender, Reason} ->
- ?line test_server:fail({sender_exited, Reason});
+ test_server:fail({sender_exited, Reason});
{'EXIT', _Other, Reason} ->
- ?line test_server:fail({linked_process_exited, Reason})
+ test_server:fail({linked_process_exited, Reason})
after 0 ->
case Data of
{'EXIT', {A,B}} ->
- ?line test_server:fail({A,B,N});
+ test_server:fail({A,B,N});
{'EXIT', Failure} ->
- ?line test_server:fail(Failure);
+ test_server:fail(Failure);
_ ->
- ?line Data
+ Data
end
end.
@@ -623,12 +615,12 @@ otp_3924_receive_data(Sock, TimeoutRef, MaxDelay, Len, Acc, AccLen) ->
NewAccLen = AccLen + length(Data),
if
NewAccLen == Len ->
- ?line {ok, TRef} = timer:exit_after(MaxDelay,
+ {ok, TRef} = timer:exit_after(MaxDelay,
self(),
TimeoutRef),
- ?line {error, closed} = gen_tcp:recv(Sock, 0),
- ?line timer:cancel(TRef),
- ?line lists:flatten([Acc, Data]);
+ {error, closed} = gen_tcp:recv(Sock, 0),
+ timer:cancel(TRef),
+ lists:flatten([Acc, Data]);
NewAccLen > Len ->
exit({received_too_much, NewAccLen});
true ->
@@ -713,8 +705,8 @@ get_status(doc) ->
"is called."];
get_status(suite) -> [];
get_status(Config) when is_list(Config) ->
- ?line {ok,{socket,Pid,_,_}} = gen_tcp:listen(5678,[]),
- ?line {status,Pid,_,_} = sys:get_status(Pid).
+ {ok,{socket,Pid,_,_}} = gen_tcp:listen(5678,[]),
+ {status,Pid,_,_} = sys:get_status(Pid).
-define(RECOVER_SLEEP, 60000).
-define(RETRY_SLEEP, 15000).
@@ -744,19 +736,19 @@ do_iter_max_socks(N, failed) ->
MS = max_socks(),
[MS|do_iter_max_socks(N-1, failed)];
do_iter_max_socks(N, First) when is_integer(First) ->
- ?line MS = max_socks(),
+ MS = max_socks(),
if MS == First ->
- ?line [MS|do_iter_max_socks(N-1, First)];
+ [MS|do_iter_max_socks(N-1, First)];
true ->
- ?line io:format("Sleeping for ~p seconds...~n",
+ io:format("Sleeping for ~p seconds...~n",
[?RETRY_SLEEP/1000]),
- ?line ?t:sleep(?RETRY_SLEEP),
- ?line io:format("Trying again...~n", []),
- ?line RetryMS = max_socks(),
- ?line if RetryMS == First ->
- ?line [RetryMS|do_iter_max_socks(N-1, First)];
+ ?t:sleep(?RETRY_SLEEP),
+ io:format("Trying again...~n", []),
+ RetryMS = max_socks(),
+ if RetryMS == First ->
+ [RetryMS|do_iter_max_socks(N-1, First)];
true ->
- ?line [RetryMS|do_iter_max_socks(N-1, failed)]
+ [RetryMS|do_iter_max_socks(N-1, failed)]
end
end.
@@ -768,7 +760,7 @@ all_equal([Rule | T]) ->
all_equal(Rule, [Rule | T]) ->
all_equal(Rule, T);
all_equal(_, [_ | _]) ->
- ?line ?t:sleep(?RECOVER_SLEEP), % Wait a while and *hope* that we'll
+ ?t:sleep(?RECOVER_SLEEP), % Wait a while and *hope* that we'll
% recover so other tests won't be
% affected.
?t:fail(max_socket_mismatch);
@@ -776,9 +768,9 @@ all_equal(_Rule, []) ->
ok.
max_socks() ->
- ?line Socks = open_socks(),
- ?line N = length(Socks),
- ?line lists:foreach(fun(S) -> ok = gen_tcp:close(S) end, Socks),
+ Socks = open_socks(),
+ N = length(Socks),
+ lists:foreach(fun(S) -> ok = gen_tcp:close(S) end, Socks),
io:format("Got ~p sockets", [N]),
N.
@@ -817,18 +809,18 @@ passive_sockets(doc) ->
["Tests that when 'the other side' on a passive socket closes, the connecting",
"side still can read until the end of data."];
passive_sockets(Config) when is_list(Config) ->
- ?line spawn_link(?MODULE, passive_sockets_server,
- [[{active,false}],self()]),
- ?line receive
- {socket,Port} -> ok
- end,
+ spawn_link(?MODULE, passive_sockets_server,
+ [[{active,false}],self()]),
+ receive
+ {socket,Port} -> ok
+ end,
?t:sleep(500),
- ?line case gen_tcp:connect("localhost", Port, [{active, false}]) of
- {ok, Sock} ->
- passive_sockets_read(Sock);
- Error ->
- ?t:fail({"Could not connect to server", Error})
- end.
+ case gen_tcp:connect("localhost", Port, [{active, false}]) of
+ {ok, Sock} ->
+ passive_sockets_read(Sock);
+ Error ->
+ ?t:fail({"Could not connect to server", Error})
+ end.
%%
%% Read until we get an {error, closed}. If we get another error, this test case
@@ -847,58 +839,58 @@ passive_sockets_read(Sock) ->
end.
passive_sockets_server(Opts, Parent) ->
- ?line case gen_tcp:listen(0, Opts) of
- {ok, LSock} ->
- {ok,{_,Port}} = inet:sockname(LSock),
- Parent ! {socket,Port},
- passive_sockets_server_accept(LSock);
- Error ->
- ?t:fail({"Could not create listen socket", Error})
- end.
+ case gen_tcp:listen(0, Opts) of
+ {ok, LSock} ->
+ {ok,{_,Port}} = inet:sockname(LSock),
+ Parent ! {socket,Port},
+ passive_sockets_server_accept(LSock);
+ Error ->
+ ?t:fail({"Could not create listen socket", Error})
+ end.
passive_sockets_server_accept(Sock) ->
- ?line case gen_tcp:accept(Sock) of
- {ok, Socket} ->
- ?t:sleep(500), % Simulate latency
- passive_sockets_server_send(Socket, 5),
- passive_sockets_server_accept(Sock);
- Error ->
- ?t:fail({"Could not accept connection", Error})
- end.
+ case gen_tcp:accept(Sock) of
+ {ok, Socket} ->
+ ?t:sleep(500), % Simulate latency
+ passive_sockets_server_send(Socket, 5),
+ passive_sockets_server_accept(Sock);
+ Error ->
+ ?t:fail({"Could not accept connection", Error})
+ end.
passive_sockets_server_send(Socket, 0) ->
io:format("Closing other end..~n", []),
gen_tcp:close(Socket);
passive_sockets_server_send(Socket, X) ->
- ?line Data = lists:duplicate(1024*X, $a),
- ?line case gen_tcp:send(Socket, Data) of
- ok ->
- ?t:sleep(50), % Simulate some processing.
- passive_sockets_server_send(Socket, X-1);
- {error, _Reason} ->
- ?t:fail("Failed to send data")
- end.
+ Data = lists:duplicate(1024*X, $a),
+ case gen_tcp:send(Socket, Data) of
+ ok ->
+ ?t:sleep(50), % Simulate some processing.
+ passive_sockets_server_send(Socket, X-1);
+ {error, _Reason} ->
+ ?t:fail("Failed to send data")
+ end.
accept_closed_by_other_process(doc) ->
["Tests the return value from gen_tcp:accept when ",
"the socket is closed from another process. (OTP-3817)"];
accept_closed_by_other_process(Config) when is_list(Config) ->
- ?line Parent = self(),
- ?line {ok, ListenSocket} = gen_tcp:listen(0, []),
- ?line Child =
+ Parent = self(),
+ {ok, ListenSocket} = gen_tcp:listen(0, []),
+ Child =
spawn_link(
fun() ->
Parent ! {self(), gen_tcp:accept(ListenSocket)}
end),
- ?line receive after 1000 -> ok end,
- ?line ok = gen_tcp:close(ListenSocket),
- ?line receive
- {Child, {error, closed}} ->
- ok;
- {Child, Other} ->
- ?t:fail({"Wrong result of gen_tcp:accept", Other})
- end.
+ receive after 1000 -> ok end,
+ ok = gen_tcp:close(ListenSocket),
+ receive
+ {Child, {error, closed}} ->
+ ok;
+ {Child, Other} ->
+ ?t:fail({"Wrong result of gen_tcp:accept", Other})
+ end.
repeat(N, Fun) ->
repeat(N, N, Fun).
@@ -915,9 +907,9 @@ closed_socket(suite) ->
closed_socket(doc) ->
["Tests the response when using a closed socket as argument"];
closed_socket(Config) when is_list(Config) ->
- ?line {ok, LS1} = gen_tcp:listen(0, []),
- ?line erlang:yield(),
- ?line ok = gen_tcp:close(LS1),
+ {ok, LS1} = gen_tcp:listen(0, []),
+ erlang:yield(),
+ ok = gen_tcp:close(LS1),
%% If the following delay is uncommented, the result error values
%% below will change from {error, einval} to {error, closed} since
%% inet_db then will have noticed that the socket is closed.
@@ -925,19 +917,18 @@ closed_socket(Config) when is_list(Config) ->
%% in inet_db processes the 'EXIT' message from the port,
%% the socket is unregistered.
%%
- %% ?line test_server:sleep(test_server:seconds(2)),
+ %% test_server:sleep(test_server:seconds(2)),
%%
- ?line {error, R_send} = gen_tcp:send(LS1, "data"),
- ?line {error, R_recv} = gen_tcp:recv(LS1, 17),
- ?line {error, R_accept} = gen_tcp:accept(LS1),
- ?line {error, R_controlling_process} =
+ {error, R_send} = gen_tcp:send(LS1, "data"),
+ {error, R_recv} = gen_tcp:recv(LS1, 17),
+ {error, R_accept} = gen_tcp:accept(LS1),
+ {error, R_controlling_process} =
gen_tcp:controlling_process(LS1, self()),
%%
- ?line ok = io:format("R_send = ~p~n", [R_send]),
- ?line ok = io:format("R_recv = ~p~n", [R_recv]),
- ?line ok = io:format("R_accept = ~p~n", [R_accept]),
- ?line ok = io:format("R_controlling_process = ~p~n",
- [R_controlling_process]),
+ ok = io:format("R_send = ~p~n", [R_send]),
+ ok = io:format("R_recv = ~p~n", [R_recv]),
+ ok = io:format("R_accept = ~p~n", [R_accept]),
+ ok = io:format("R_controlling_process = ~p~n", [R_controlling_process]),
ok.
%%%
@@ -945,28 +936,27 @@ closed_socket(Config) when is_list(Config) ->
%%%
shutdown_active(Config) when is_list(Config) ->
- ?line shutdown_common(true).
+ shutdown_common(true).
shutdown_passive(Config) when is_list(Config) ->
- ?line shutdown_common(false).
+ shutdown_common(false).
shutdown_common(Active) ->
- ?line P = sort_server(Active),
+ P = sort_server(Active),
io:format("Sort server port: ~p\n", [P]),
- ?line do_sort(P, []),
- ?line do_sort(P, ["glurf"]),
- ?line do_sort(P, ["abc","nisse","dum"]),
-
- ?line do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 255)]),
- ?line do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(77, 999)]),
- ?line do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 55)]),
- ?line do_sort(P, []),
- ?line do_sort(P, ["apa"]),
- ?line do_sort(P, ["kluns","gorilla"]),
- ?line do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 1233)]),
- ?line do_sort(P, []),
-
+ do_sort(P, []),
+ do_sort(P, ["glurf"]),
+ do_sort(P, ["abc","nisse","dum"]),
+
+ do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 255)]),
+ do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(77, 999)]),
+ do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 55)]),
+ do_sort(P, []),
+ do_sort(P, ["apa"]),
+ do_sort(P, ["kluns","gorilla"]),
+ do_sort(P, [lists:reverse(integer_to_list(I)) || I <- lists:seq(25, 1233)]),
+ do_sort(P, []),
receive
Any ->
?t:fail({unexpected_message,Any})
@@ -985,14 +975,14 @@ do_sort(P, List0) ->
sort_server(Active) ->
Opts = [{exit_on_close,false},{packet,line},{active,Active}],
- ?line {ok,L} = gen_tcp:listen(0, Opts),
+ {ok,L} = gen_tcp:listen(0, Opts),
Go = make_ref(),
- ?line Pid = spawn_link(fun() ->
- receive Go -> sort_server_1(L, Active) end
- end),
- ?line ok = gen_tcp:controlling_process(L, Pid),
- ?line Pid ! Go,
- ?line {ok,Port} = inet:port(L),
+ Pid = spawn_link(fun() ->
+ receive Go -> sort_server_1(L, Active) end
+ end),
+ ok = gen_tcp:controlling_process(L, Pid),
+ Pid ! Go,
+ {ok,Port} = inet:port(L),
Port.
sort_server_1(L, Active) ->
@@ -1042,17 +1032,17 @@ shutdown_pending(Config) when is_list(Config) ->
Data = [<<N:32>>,ones(N),42],
P = a_server(),
io:format("Server port: ~p\n", [P]),
- ?line {ok,S} = gen_tcp:connect(localhost, P, []),
- ?line gen_tcp:send(S, Data),
- ?line gen_tcp:shutdown(S, write),
- ?line receive
- {tcp,S,Msg} ->
- io:format("~p\n", [Msg]),
- ?line N = list_to_integer(Msg) - 5;
- Other ->
- ?t:fail({unexpected,Other})
- end,
- ok.
+ {ok,S} = gen_tcp:connect(localhost, P, []),
+ gen_tcp:send(S, Data),
+ gen_tcp:shutdown(S, write),
+ receive
+ {tcp,S,Msg} ->
+ io:format("~p\n", [Msg]),
+ N = list_to_integer(Msg) - 5;
+ Other ->
+ ?t:fail({unexpected,Other})
+ end,
+ ok.
ones(0) -> [];
ones(1) -> [1];
@@ -1065,10 +1055,10 @@ shutdown_pending(Config) when is_list(Config) ->
end.
a_server() ->
- ?line {ok,L} = gen_tcp:listen(0, [{exit_on_close,false},{active,false}]),
- ?line Pid = spawn_link(fun() -> a_server(L) end),
- ?line ok = gen_tcp:controlling_process(L, Pid),
- ?line {ok,Port} = inet:port(L),
+ {ok,L} = gen_tcp:listen(0, [{exit_on_close,false},{active,false}]),
+ Pid = spawn_link(fun() -> a_server(L) end),
+ ok = gen_tcp:controlling_process(L, Pid),
+ {ok,Port} = inet:port(L),
Port.
a_server(L) ->
@@ -1090,19 +1080,18 @@ shutdown_pending(Config) when is_list(Config) ->
%% corrupt data. The testcase will be killed by the timetrap timeout
%% if the bug is present.
http_bad_packet(Config) when is_list(Config) ->
- ?line {ok,L} = gen_tcp:listen(0,
- [{active, false},
- binary,
- {reuseaddr, true},
- {packet, http}]),
- ?line {ok,Port} = inet:port(L),
- ?line spawn_link(fun() -> erlang:yield(), http_bad_client(Port) end),
- ?line case gen_tcp:accept(L) of
- {ok,S} ->
- http_worker(S);
- Err ->
- exit({accept,Err})
- end.
+ {ok,L} = gen_tcp:listen(0, [{active, false},
+ binary,
+ {reuseaddr, true},
+ {packet, http}]),
+ {ok,Port} = inet:port(L),
+ spawn_link(fun() -> erlang:yield(), http_bad_client(Port) end),
+ case gen_tcp:accept(L) of
+ {ok,S} ->
+ http_worker(S);
+ Err ->
+ exit({accept,Err})
+ end.
http_worker(S) ->
case gen_tcp:recv(S, 0, 30000) of
@@ -1122,9 +1111,9 @@ http_bad_client(Port) ->
%% Fill send queue and then start receiving.
%%
busy_send(Config) when is_list(Config) ->
- ?line Master = self(),
- ?line Msg = <<"the quick brown fox jumps over a lazy dog~n">>,
- ?line Server =
+ Master = self(),
+ Msg = <<"the quick brown fox jumps over a lazy dog~n">>,
+ Server =
spawn_link(fun () ->
{ok,L} = gen_tcp:listen
(0, [{active,false},binary,
@@ -1134,45 +1123,42 @@ busy_send(Config) when is_list(Config) ->
busy_send_client(Port, Master, Msg)},
busy_send_srv(L, Master, Msg)
end),
- ?line io:format("~p Server~n", [Server]),
- ?line receive
- {Server,client,Client} ->
- ?line io:format("~p Client~n", [Client]),
- ?line busy_send_loop(Server, Client, 0)
- end.
+ io:format("~p Server~n", [Server]),
+ receive
+ {Server,client,Client} ->
+ io:format("~p Client~n", [Client]),
+ busy_send_loop(Server, Client, 0)
+ end.
busy_send_loop(Server, Client, N) ->
%% Master
%%
- ?line receive {Server,send} ->
+ receive {Server,send} ->
busy_send_loop(Server, Client, N+1)
after 2000 ->
%% Send queue full, sender blocked
%% -> stop sender and release client
- ?line io:format("Send timeout, time to receive...~n", []),
- ?line Server ! {self(),close},
- ?line Client ! {self(),recv,N+1},
- ?line receive
- {Server,send} ->
- ?line busy_send_2(Server, Client, N+1)
- after 10000 ->
- %% If this happens, see busy_send_srv
- ?t:fail({timeout,{server,not_send,flush([])}})
- end
- end.
+ io:format("Send timeout, time to receive...~n", []),
+ Server ! {self(),close},
+ Client ! {self(),recv,N+1},
+ receive
+ {Server,send} ->
+ busy_send_2(Server, Client, N+1)
+ after 10000 ->
+ %% If this happens, see busy_send_srv
+ ?t:fail({timeout,{server,not_send,flush([])}})
+ end
+ end.
busy_send_2(Server, Client, _N) ->
%% Master
%%
- ?line receive
- {Server,[closed]} ->
- ?line receive
- {Client,[0,{error,closed}]} ->
- ok
- end
- after 10000 ->
- ?t:fail({timeout,{server,not_closed,flush([])}})
- end.
+ receive
+ {Server,[closed]} ->
+ receive {Client,[0,{error,closed}]} -> ok end
+ after 10000 ->
+ ?t:fail({timeout,{server,not_closed,flush([])}})
+ end.
busy_send_srv(L, Master, Msg) ->
%% Server
@@ -1228,7 +1214,7 @@ busy_send_client_loop(Socket, Master, Msg, N) ->
busy_disconnect_passive(Config) when is_list(Config) ->
MuchoData = list_to_binary(ones(64*1024)),
- ?line [do_busy_disconnect_passive(MuchoData) || _ <- lists:seq(1, 10)],
+ [do_busy_disconnect_passive(MuchoData) || _ <- lists:seq(1, 10)],
ok.
do_busy_disconnect_passive(MuchoData) ->
@@ -1236,8 +1222,8 @@ do_busy_disconnect_passive(MuchoData) ->
busy_disconnect_passive_send(S, MuchoData).
busy_disconnect_passive_send(S, Data) ->
- ?line case gen_tcp:send(S, Data) of
- ok -> ?line busy_disconnect_passive_send(S, Data);
+ case gen_tcp:send(S, Data) of
+ ok -> busy_disconnect_passive_send(S, Data);
{error,closed} -> ok
end.
@@ -1248,7 +1234,7 @@ busy_disconnect_passive_send(S, Data) ->
%%%
busy_disconnect_active(Config) when is_list(Config) ->
MuchoData = list_to_binary(ones(64*1024)),
- ?line [do_busy_disconnect_active(MuchoData) || _ <- lists:seq(1, 10)],
+ [do_busy_disconnect_active(MuchoData) || _ <- lists:seq(1, 10)],
ok.
do_busy_disconnect_active(MuchoData) ->
@@ -1256,21 +1242,21 @@ do_busy_disconnect_active(MuchoData) ->
busy_disconnect_active_send(S, MuchoData).
busy_disconnect_active_send(S, Data) ->
- ?line case gen_tcp:send(S, Data) of
- ok -> ?line busy_disconnect_active_send(S, Data);
+ case gen_tcp:send(S, Data) of
+ ok -> busy_disconnect_active_send(S, Data);
{error,closed} ->
receive
{tcp_closed,S} -> ok;
- _Other -> ?line ?t:fail()
+ _Other -> ?t:fail()
end
end.
busy_disconnect_prepare_server(ConnectOpts) ->
- ?line Sender = self(),
- ?line Server = spawn_link(fun() -> busy_disconnect_server(Sender) end),
+ Sender = self(),
+ Server = spawn_link(fun() -> busy_disconnect_server(Sender) end),
receive {port,Server,Port} -> ok end,
- ?line {ok,S} = gen_tcp:connect(localhost, Port, ConnectOpts),
+ {ok,S} = gen_tcp:connect(localhost, Port, ConnectOpts),
Server ! {Sender,sending},
S.
@@ -1304,8 +1290,8 @@ busy_disconnect_server_wait_for_busy(Sender, S) ->
%%% Fill send queue
%%%
fill_sendq(Config) when is_list(Config) ->
- ?line Master = self(),
- ?line Server =
+ Master = self(),
+ Server =
spawn_link(fun () ->
{ok,L} = gen_tcp:listen
(0, [{active,false},binary,
@@ -1315,12 +1301,12 @@ fill_sendq(Config) when is_list(Config) ->
fill_sendq_client(Port, Master)},
fill_sendq_srv(L, Master)
end),
- ?line io:format("~p Server~n", [Server]),
- ?line receive {Server,client,Client} ->
- ?line io:format("~p Client~n", [Client]),
- ?line receive {Server,reader,Reader} ->
- ?line io:format("~p Reader~n", [Reader]),
- ?line fill_sendq_loop(Server, Client, Reader)
+ io:format("~p Server~n", [Server]),
+ receive {Server,client,Client} ->
+ io:format("~p Client~n", [Client]),
+ receive {Server,reader,Reader} ->
+ io:format("~p Reader~n", [Reader]),
+ fill_sendq_loop(Server, Client, Reader)
end
end.
@@ -1331,21 +1317,21 @@ fill_sendq_loop(Server, Client, Reader) ->
fill_sendq_loop(Server, Client, Reader)
after 2000 ->
%% Send queue full, sender blocked -> close client.
- ?line io:format("Send timeout, closing Client...~n", []),
- ?line Client ! {self(),close},
- ?line receive {Server,[{error,closed}]} ->
- ?line io:format("Got server closed.~n"),
- ?line receive {Reader,[{error,closed}]} ->
- ?line io:format
+ io:format("Send timeout, closing Client...~n", []),
+ Client ! {self(),close},
+ receive {Server,[{error,closed}]} ->
+ io:format("Got server closed.~n"),
+ receive {Reader,[{error,closed}]} ->
+ io:format
("Got reader closed.~n"),
ok
after 3000 ->
?t:fail({timeout,{closed,reader}})
end;
{Reader,[{error,closed}]} ->
- ?line io:format("Got reader closed.~n"),
- ?line receive {Server,[{error,closed}]} ->
- ?line io:format("Got server closed~n"),
+ io:format("Got reader closed.~n"),
+ receive {Server,[{error,closed}]} ->
+ io:format("Got server closed~n"),
ok
after 3000 ->
?t:fail({timeout,{closed,server}})
@@ -1416,39 +1402,39 @@ fill_sendq_client(Port, Master) ->
%%% a closed socket.
%%%
partial_recv_and_close(Config) when is_list(Config) ->
- ?line Msg = "the quick brown fox jumps over a lazy dog 0123456789\n",
- ?line Len = length(Msg),
- ?line {ok,L} = gen_tcp:listen(0, [{active,false}]),
- ?line {ok,P} = inet:port(L),
- ?line {ok,S} = gen_tcp:connect("localhost", P, [{active,false}]),
- ?line {ok,A} = gen_tcp:accept(L),
- ?line ok = gen_tcp:send(S, Msg),
- ?line ok = gen_tcp:close(S),
- ?line {error,closed} = gen_tcp:recv(A, Len+1),
+ Msg = "the quick brown fox jumps over a lazy dog 0123456789\n",
+ Len = length(Msg),
+ {ok,L} = gen_tcp:listen(0, [{active,false}]),
+ {ok,P} = inet:port(L),
+ {ok,S} = gen_tcp:connect("localhost", P, [{active,false}]),
+ {ok,A} = gen_tcp:accept(L),
+ ok = gen_tcp:send(S, Msg),
+ ok = gen_tcp:close(S),
+ {error,closed} = gen_tcp:recv(A, Len+1),
ok.
%%% Try to receive more than available number of bytes from
%%% a closed socket, this time waiting in the recv before closing.
%%%
partial_recv_and_close_2(Config) when is_list(Config) ->
- ?line Msg = "the quick brown fox jumps over a lazy dog 0123456789\n",
- ?line Len = length(Msg),
- ?line {ok,L} = gen_tcp:listen(0, [{active,false}]),
- ?line {ok,P} = inet:port(L),
- ?line Server = self(),
- ?line Client =
+ Msg = "the quick brown fox jumps over a lazy dog 0123456789\n",
+ Len = length(Msg),
+ {ok,L} = gen_tcp:listen(0, [{active,false}]),
+ {ok,P} = inet:port(L),
+ Server = self(),
+ Client =
spawn_link(
fun () ->
receive after 2000 -> ok end,
{ok,S} = gen_tcp:connect("localhost", P, [{active,false}]),
- ?line ok = gen_tcp:send(S, Msg),
+ ok = gen_tcp:send(S, Msg),
receive {Server,close} -> ok end,
receive after 2000 -> ok end,
- ?line ok = gen_tcp:close(S)
+ ok = gen_tcp:close(S)
end),
- ?line {ok,A} = gen_tcp:accept(L),
- ?line Client ! {Server,close},
- ?line {error,closed} = gen_tcp:recv(A, Len+1),
+ {ok,A} = gen_tcp:accept(L),
+ Client ! {Server,close},
+ {error,closed} = gen_tcp:recv(A, Len+1),
ok.
%%% Here we tests that gen_tcp:recv/2 will return {error,closed} following
@@ -1471,151 +1457,151 @@ do_partial_recv_and_close_3() ->
receive
{port,Port} -> ok
end,
- ?line Much = ones(8*64*1024),
- ?line {ok,S} = gen_tcp:connect(localhost, Port, [{active,false}]),
+ Much = ones(8*64*1024),
+ {ok,S} = gen_tcp:connect(localhost, Port, [{active,false}]),
%% Send a lot of data (most of it will be queued). The receiver will read one byte
%% and close the connection. The write operation will fail.
- ?line gen_tcp:send(S, Much),
+ gen_tcp:send(S, Much),
%% We should always get {error,closed} here.
- ?line {error,closed} = gen_tcp:recv(S, 0).
+ {error,closed} = gen_tcp:recv(S, 0).
test_prio_put_get() ->
Tos = 3 bsl 5,
- ?line {ok,L1} = gen_tcp:listen(0, [{active,false}]),
- ?line ok = inet:setopts(L1,[{priority,3}]),
- ?line ok = inet:setopts(L1,[{tos,Tos}]),
- ?line {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
- ?line ok = inet:setopts(L1,[{priority,3}]), % Dont destroy each other
- ?line {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
- ?line ok = inet:setopts(L1,[{reuseaddr,true}]), % Dont let others destroy
- ?line {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
- ?line gen_tcp:close(L1),
+ {ok,L1} = gen_tcp:listen(0, [{active,false}]),
+ ok = inet:setopts(L1,[{priority,3}]),
+ ok = inet:setopts(L1,[{tos,Tos}]),
+ {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
+ ok = inet:setopts(L1,[{priority,3}]), % Dont destroy each other
+ {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
+ ok = inet:setopts(L1,[{reuseaddr,true}]), % Dont let others destroy
+ {ok,[{priority,3},{tos,Tos}]} = inet:getopts(L1,[priority,tos]),
+ gen_tcp:close(L1),
ok.
test_prio_accept() ->
- ?line {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
- {reuseaddr,true},{priority,4}]),
- ?line {ok,Port} = inet:port(Sock),
- ?line {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
- {active,false},
- {reuseaddr,true},
- {priority,4}]),
- ?line {ok,Sock3}=gen_tcp:accept(Sock),
- ?line {ok,[{priority,4}]} = inet:getopts(Sock,[priority]),
- ?line {ok,[{priority,4}]} = inet:getopts(Sock2,[priority]),
- ?line {ok,[{priority,4}]} = inet:getopts(Sock3,[priority]),
- ?line gen_tcp:close(Sock),
- ?line gen_tcp:close(Sock2),
- ?line gen_tcp:close(Sock3),
+ {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
+ {reuseaddr,true},{priority,4}]),
+ {ok,Port} = inet:port(Sock),
+ {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
+ {active,false},
+ {reuseaddr,true},
+ {priority,4}]),
+ {ok,Sock3}=gen_tcp:accept(Sock),
+ {ok,[{priority,4}]} = inet:getopts(Sock,[priority]),
+ {ok,[{priority,4}]} = inet:getopts(Sock2,[priority]),
+ {ok,[{priority,4}]} = inet:getopts(Sock3,[priority]),
+ gen_tcp:close(Sock),
+ gen_tcp:close(Sock2),
+ gen_tcp:close(Sock3),
ok.
test_prio_accept2() ->
Tos1 = 4 bsl 5,
Tos2 = 3 bsl 5,
- ?line {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
- {reuseaddr,true},{priority,4},
- {tos,Tos1}]),
- ?line {ok,Port} = inet:port(Sock),
- ?line {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
- {active,false},
- {reuseaddr,true},
- {priority,4},
- {tos,Tos2}]),
- ?line {ok,Sock3}=gen_tcp:accept(Sock),
- ?line {ok,[{priority,4},{tos,Tos1}]} = inet:getopts(Sock,[priority,tos]),
- ?line {ok,[{priority,4},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
- ?line {ok,[{priority,4},{tos,Tos1}]} = inet:getopts(Sock3,[priority,tos]),
- ?line gen_tcp:close(Sock),
- ?line gen_tcp:close(Sock2),
- ?line gen_tcp:close(Sock3),
+ {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
+ {reuseaddr,true},{priority,4},
+ {tos,Tos1}]),
+ {ok,Port} = inet:port(Sock),
+ {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
+ {active,false},
+ {reuseaddr,true},
+ {priority,4},
+ {tos,Tos2}]),
+ {ok,Sock3}=gen_tcp:accept(Sock),
+ {ok,[{priority,4},{tos,Tos1}]} = inet:getopts(Sock,[priority,tos]),
+ {ok,[{priority,4},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
+ {ok,[{priority,4},{tos,Tos1}]} = inet:getopts(Sock3,[priority,tos]),
+ gen_tcp:close(Sock),
+ gen_tcp:close(Sock2),
+ gen_tcp:close(Sock3),
ok.
test_prio_accept3() ->
Tos1 = 4 bsl 5,
Tos2 = 3 bsl 5,
- ?line {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
- {reuseaddr,true},
- {tos,Tos1}]),
- ?line {ok,Port} = inet:port(Sock),
- ?line {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
- {active,false},
- {reuseaddr,true},
- {tos,Tos2}]),
- ?line {ok,Sock3}=gen_tcp:accept(Sock),
- ?line {ok,[{priority,0},{tos,Tos1}]} = inet:getopts(Sock,[priority,tos]),
- ?line {ok,[{priority,0},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
- ?line {ok,[{priority,0},{tos,Tos1}]} = inet:getopts(Sock3,[priority,tos]),
- ?line gen_tcp:close(Sock),
- ?line gen_tcp:close(Sock2),
- ?line gen_tcp:close(Sock3),
+ {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
+ {reuseaddr,true},
+ {tos,Tos1}]),
+ {ok,Port} = inet:port(Sock),
+ {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
+ {active,false},
+ {reuseaddr,true},
+ {tos,Tos2}]),
+ {ok,Sock3}=gen_tcp:accept(Sock),
+ {ok,[{priority,0},{tos,Tos1}]} = inet:getopts(Sock,[priority,tos]),
+ {ok,[{priority,0},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
+ {ok,[{priority,0},{tos,Tos1}]} = inet:getopts(Sock3,[priority,tos]),
+ gen_tcp:close(Sock),
+ gen_tcp:close(Sock2),
+ gen_tcp:close(Sock3),
ok.
test_prio_accept_async() ->
Tos1 = 4 bsl 5,
Tos2 = 3 bsl 5,
Ref = make_ref(),
- ?line spawn(?MODULE,priority_server,[{self(),Ref}]),
- ?line Port = receive
- {Ref,P} -> P
- after 5000 -> ?t:fail({error,"helper process timeout"})
- end,
- ?line receive
- after 3000 -> ok
- end,
- ?line {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
- {active,false},
- {reuseaddr,true},
- {priority,4},
- {tos,Tos2}]),
- ?line receive
- {Ref,{ok,[{priority,4},{tos,Tos1}]}} ->
- ok ;
- {Ref,Error} ->
- ?t:fail({missmatch,Error})
- after 5000 -> ?t:fail({error,"helper process timeout"})
- end,
- ?line receive
- {Ref,{ok,[{priority,4},{tos,Tos1}]}} ->
- ok ;
- {Ref,Error2} ->
- ?t:fail({missmatch,Error2})
- after 5000 -> ?t:fail({error,"helper process timeout"})
- end,
-
- ?line {ok,[{priority,4},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
- ?line catch gen_tcp:close(Sock2),
+ spawn(?MODULE,priority_server,[{self(),Ref}]),
+ Port = receive
+ {Ref,P} -> P
+ after 5000 -> ?t:fail({error,"helper process timeout"})
+ end,
+ receive
+ after 3000 -> ok
+ end,
+ {ok,Sock2}=gen_tcp:connect("localhost",Port,[binary,{packet,0},
+ {active,false},
+ {reuseaddr,true},
+ {priority,4},
+ {tos,Tos2}]),
+ receive
+ {Ref,{ok,[{priority,4},{tos,Tos1}]}} ->
+ ok;
+ {Ref,Error} ->
+ ?t:fail({missmatch,Error})
+ after 5000 -> ?t:fail({error,"helper process timeout"})
+ end,
+ receive
+ {Ref,{ok,[{priority,4},{tos,Tos1}]}} ->
+ ok;
+ {Ref,Error2} ->
+ ?t:fail({missmatch,Error2})
+ after 5000 -> ?t:fail({error,"helper process timeout"})
+ end,
+
+ {ok,[{priority,4},{tos,Tos2}]} = inet:getopts(Sock2,[priority,tos]),
+ catch gen_tcp:close(Sock2),
ok.
priority_server({Parent,Ref}) ->
Tos1 = 4 bsl 5,
- ?line {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
- {reuseaddr,true},{priority,4},
- {tos,Tos1}]),
- ?line {ok,Port} = inet:port(Sock),
+ {ok,Sock}=gen_tcp:listen(0,[binary,{packet,0},{active,false},
+ {reuseaddr,true},{priority,4},
+ {tos,Tos1}]),
+ {ok,Port} = inet:port(Sock),
Parent ! {Ref,Port},
- ?line {ok,Sock3}=gen_tcp:accept(Sock),
+ {ok,Sock3}=gen_tcp:accept(Sock),
Parent ! {Ref, inet:getopts(Sock,[priority,tos])},
Parent ! {Ref, inet:getopts(Sock3,[priority,tos])},
ok.
test_prio_fail() ->
- ?line {ok,L} = gen_tcp:listen(0, [{active,false}]),
- ?line {error,_} = inet:setopts(L,[{priority,1000}]),
+ {ok,L} = gen_tcp:listen(0, [{active,false}]),
+ {error,_} = inet:setopts(L,[{priority,1000}]),
% This error could only happen in linux kernels earlier than 2.6.24.4
% Privilege check is now disabled and IP_TOS can never fail (only silently
% be masked).
-% ?line {error,_} = inet:setopts(L,[{tos,6 bsl 5}]),
- ?line gen_tcp:close(L),
+% {error,_} = inet:setopts(L,[{tos,6 bsl 5}]),
+ gen_tcp:close(L),
ok.
test_prio_udp() ->
Tos = 3 bsl 5,
- ?line {ok,S} = gen_udp:open(0,[{active,false},binary,{tos, Tos},
- {priority,3}]),
- ?line {ok,[{priority,3},{tos,Tos}]} = inet:getopts(S,[priority,tos]),
- ?line gen_udp:close(S),
+ {ok,S} = gen_udp:open(0,[{active,false},binary,{tos, Tos},
+ {priority,3}]),
+ {ok,[{priority,3},{tos,Tos}]} = inet:getopts(S,[priority,tos]),
+ gen_udp:close(S),
ok.
so_priority(doc) ->
@@ -1623,9 +1609,9 @@ so_priority(doc) ->
so_priority(suite) ->
[];
so_priority(Config) when is_list(Config) ->
- ?line {ok,L} = gen_tcp:listen(0, [{active,false}]),
- ?line ok = inet:setopts(L,[{priority,1}]),
- ?line case inet:getopts(L,[priority]) of
+ {ok,L} = gen_tcp:listen(0, [{active,false}]),
+ ok = inet:setopts(L,[{priority,1}]),
+ case inet:getopts(L,[priority]) of
{ok,[{priority,1}]} ->
gen_tcp:close(L),
test_prio_put_get(),
@@ -1641,7 +1627,7 @@ so_priority(Config) when is_list(Config) ->
{unix,linux} ->
case os:version() of
{X,Y,_} when (X > 2) or ((X =:= 2) and (Y >= 4)) ->
- ?line ?t:fail({error,
+ ?t:fail({error,
"so_priority should work on this "
"OS, but does not"});
_ ->
@@ -1655,21 +1641,21 @@ so_priority(Config) when is_list(Config) ->
%% Accept test utilities (suites are below)
millis() ->
- {A,B,C}=erlang:now(),
- (A*1000000*1000)+(B*1000)+(C div 1000).
+ erlang:monotonic_time(milli_seconds).
-collect_accepts(Tmo) ->
+collect_accepts(0,_) -> [];
+collect_accepts(N,Tmo) ->
A = millis(),
receive
{accepted,P,Msg} ->
- [{P,Msg}] ++ collect_accepts(Tmo-(millis() - A))
+ [{P,Msg}] ++ collect_accepts(N-1,Tmo-(millis() - A))
after Tmo ->
[]
end.
--define(EXPECT_ACCEPTS(Pattern,Timeout),
+-define(EXPECT_ACCEPTS(Pattern,N,Timeout),
(fun() ->
- case collect_accepts(Timeout) of
+ case collect_accepts(if N =:= infinity -> -1; true -> N end,Timeout) of
Pattern ->
ok;
Other ->
@@ -1705,20 +1691,20 @@ primitive_accept(suite) ->
primitive_accept(doc) ->
["Test singular accept"];
primitive_accept(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line {ok,PortNo}=inet:port(LS),
- ?line Parent = self(),
- ?line F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
- ?line P = spawn(F),
- ?line gen_tcp:connect("localhost",PortNo,[]),
- ?line receive
- {accepted,P,{ok,P0}} when is_port(P0) ->
- ok;
- {accepted,P,Other0} ->
- {error,Other0}
- after 500 ->
- {error,timeout}
- end.
+ {ok,LS}=gen_tcp:listen(0,[]),
+ {ok,PortNo}=inet:port(LS),
+ Parent = self(),
+ F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
+ P = spawn(F),
+ gen_tcp:connect("localhost",PortNo,[]),
+ receive
+ {accepted,P,{ok,P0}} when is_port(P0) ->
+ ok;
+ {accepted,P,Other0} ->
+ {error,Other0}
+ after 500 ->
+ {error,timeout}
+ end.
multi_accept_close_listen(suite) ->
@@ -1726,111 +1712,109 @@ multi_accept_close_listen(suite) ->
multi_accept_close_listen(doc) ->
["Closing listen socket when multi-accepting"];
multi_accept_close_listen(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
- ?line spawn(F),
- ?line spawn(F),
- ?line spawn(F),
- ?line spawn(F),
- ?line gen_tcp:close(LS),
- ?line ?EXPECT_ACCEPTS([{_,{error,closed}},{_,{error,closed}},
- {_,{error,closed}},{_,{error,closed}}], 500).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
+ spawn(F),
+ spawn(F),
+ spawn(F),
+ spawn(F),
+ gen_tcp:close(LS),
+ ?EXPECT_ACCEPTS([{_,{error,closed}},{_,{error,closed}},
+ {_,{error,closed}},{_,{error,closed}}],4,500).
accept_timeout(suite) ->
[];
accept_timeout(doc) ->
["Single accept with timeout"];
accept_timeout(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS,1000)} end,
- ?line P = spawn(F),
- ?line ?EXPECT_ACCEPTS([{P,{error,timeout}}],2000).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS,1000)} end,
+ P = spawn(F),
+ ?EXPECT_ACCEPTS([{P,{error,timeout}}],1,2000).
accept_timeouts_in_order(suite) ->
[];
accept_timeouts_in_order(doc) ->
["Check that multi-accept timeouts happen in the correct order"];
accept_timeouts_in_order(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line P1 = spawn(mktmofun(1000,Parent,LS)),
- ?line P2 = spawn(mktmofun(1200,Parent,LS)),
- ?line P3 = spawn(mktmofun(1300,Parent,LS)),
- ?line P4 = spawn(mktmofun(1400,Parent,LS)),
- ?line ?EXPECT_ACCEPTS([{P1,{error,timeout}},{P2,{error,timeout}},
- {P3,{error,timeout}},{P4,{error,timeout}}], 2000).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ P1 = spawn(mktmofun(1000,Parent,LS)),
+ P2 = spawn(mktmofun(1200,Parent,LS)),
+ P3 = spawn(mktmofun(1300,Parent,LS)),
+ P4 = spawn(mktmofun(1400,Parent,LS)),
+ ?EXPECT_ACCEPTS([{P1,{error,timeout}},{P2,{error,timeout}},
+ {P3,{error,timeout}},{P4,{error,timeout}}],infinity,2000).
accept_timeouts_in_order2(suite) ->
[];
accept_timeouts_in_order2(doc) ->
["Check that multi-accept timeouts happen in the correct order (more)"];
accept_timeouts_in_order2(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line P1 = spawn(mktmofun(1400,Parent,LS)),
- ?line P2 = spawn(mktmofun(1300,Parent,LS)),
- ?line P3 = spawn(mktmofun(1200,Parent,LS)),
- ?line P4 = spawn(mktmofun(1000,Parent,LS)),
- ?line ?EXPECT_ACCEPTS([{P4,{error,timeout}},{P3,{error,timeout}},
- {P2,{error,timeout}},{P1,{error,timeout}}], 2000).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ P1 = spawn(mktmofun(1400,Parent,LS)),
+ P2 = spawn(mktmofun(1300,Parent,LS)),
+ P3 = spawn(mktmofun(1200,Parent,LS)),
+ P4 = spawn(mktmofun(1000,Parent,LS)),
+ ?EXPECT_ACCEPTS([{P4,{error,timeout}},{P3,{error,timeout}},
+ {P2,{error,timeout}},{P1,{error,timeout}}],infinity,2000).
accept_timeouts_in_order3(suite) ->
[];
accept_timeouts_in_order3(doc) ->
["Check that multi-accept timeouts happen in the correct order (even more)"];
accept_timeouts_in_order3(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line P1 = spawn(mktmofun(1200,Parent,LS)),
- ?line P2 = spawn(mktmofun(1400,Parent,LS)),
- ?line P3 = spawn(mktmofun(1300,Parent,LS)),
- ?line P4 = spawn(mktmofun(1000,Parent,LS)),
- ?line ?EXPECT_ACCEPTS([{P4,{error,timeout}},{P1,{error,timeout}},
- {P3,{error,timeout}},{P2,{error,timeout}}], 2000).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ P1 = spawn(mktmofun(1200,Parent,LS)),
+ P2 = spawn(mktmofun(1400,Parent,LS)),
+ P3 = spawn(mktmofun(1300,Parent,LS)),
+ P4 = spawn(mktmofun(1000,Parent,LS)),
+ ?EXPECT_ACCEPTS([{P4,{error,timeout}},{P1,{error,timeout}},
+ {P3,{error,timeout}},{P2,{error,timeout}}],infinity,2000).
accept_timeouts_mixed(suite) ->
[];
accept_timeouts_mixed(doc) ->
["Check that multi-accept timeouts behave correctly when mixed with successful timeouts"];
accept_timeouts_mixed(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line {ok,PortNo}=inet:port(LS),
- ?line P1 = spawn(mktmofun(1000,Parent,LS)),
- ?line wait_until_accepting(P1,500),
- ?line P2 = spawn(mktmofun(2000,Parent,LS)),
- ?line wait_until_accepting(P2,500),
- ?line P3 = spawn(mktmofun(3000,Parent,LS)),
- ?line wait_until_accepting(P3,500),
- ?line P4 = spawn(mktmofun(4000,Parent,LS)),
- ?line wait_until_accepting(P4,500),
- ?line ok = ?EXPECT_ACCEPTS([{P1,{error,timeout}}],1500),
- ?line {ok,_}=gen_tcp:connect("localhost",PortNo,[]),
- ?line ok = ?EXPECT_ACCEPTS([{P2,{ok,Port0}}] when is_port(Port0),100),
- ?line ok = ?EXPECT_ACCEPTS([{P3,{error,timeout}}],2000),
- ?line gen_tcp:connect("localhost",PortNo,[]),
- ?line ?EXPECT_ACCEPTS([{P4,{ok,Port1}}] when is_port(Port1),100).
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ {ok,PortNo}=inet:port(LS),
+ P1 = spawn(mktmofun(1000,Parent,LS)),
+ wait_until_accepting(P1,500),
+ P2 = spawn(mktmofun(2000,Parent,LS)),
+ wait_until_accepting(P2,500),
+ P3 = spawn(mktmofun(3000,Parent,LS)),
+ wait_until_accepting(P3,500),
+ P4 = spawn(mktmofun(4000,Parent,LS)),
+ wait_until_accepting(P4,500),
+ ok = ?EXPECT_ACCEPTS([{P1,{error,timeout}}],infinity,1500),
+ {ok,_}=gen_tcp:connect("localhost",PortNo,[]),
+ ok = ?EXPECT_ACCEPTS([{P2,{ok,Port0}}] when is_port(Port0),infinity,100),
+ ok = ?EXPECT_ACCEPTS([{P3,{error,timeout}}],infinity,2000),
+ gen_tcp:connect("localhost",PortNo,[]),
+ ?EXPECT_ACCEPTS([{P4,{ok,Port1}}] when is_port(Port1),infinity,100).
killing_acceptor(suite) ->
[];
killing_acceptor(doc) ->
["Check that single acceptor behaves as expected when killed"];
killing_acceptor(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Pid = spawn(fun() -> erlang:display({accepted,self(),gen_tcp:accept(LS)}) end),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L1} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L1),
- ?line exit(Pid,kill),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L2} = prim_inet:getstatus(LS),
- ?line false = lists:member(accepting, L2),
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Pid = spawn(fun() -> erlang:display({accepted,self(),gen_tcp:accept(LS)}) end),
+ receive after 100 -> ok
+ end,
+ {ok,L1} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L1),
+ exit(Pid,kill),
+ receive after 100 -> ok
+ end,
+ {ok,L2} = prim_inet:getstatus(LS),
+ false = lists:member(accepting, L2),
ok.
killing_multi_acceptors(suite) ->
@@ -1838,26 +1822,24 @@ killing_multi_acceptors(suite) ->
killing_multi_acceptors(doc) ->
["Check that multi acceptors behaves as expected when killed"];
killing_multi_acceptors(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
- ?line F2 = mktmofun(1000,Parent,LS),
- ?line Pid = spawn(F),
- ?line Pid2 = spawn(F2),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L1} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L1),
- ?line exit(Pid,kill),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L2} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L2),
- ?line ok = ?EXPECT_ACCEPTS([{Pid2,{error,timeout}}],1000),
- ?line {ok,L3} = prim_inet:getstatus(LS),
- ?line false = lists:member(accepting, L3),
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
+ F2 = mktmofun(1000,Parent,LS),
+ Pid = spawn(F),
+ Pid2 = spawn(F2),
+ receive after 100 -> ok
+ end,
+ {ok,L1} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L1),
+ exit(Pid,kill),
+ receive after 100 -> ok
+ end,
+ {ok,L2} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L2),
+ ok = ?EXPECT_ACCEPTS([{Pid2,{error,timeout}}],1,1000),
+ {ok,L3} = prim_inet:getstatus(LS),
+ false = lists:member(accepting, L3),
ok.
killing_multi_acceptors2(suite) ->
@@ -1865,40 +1847,36 @@ killing_multi_acceptors2(suite) ->
killing_multi_acceptors2(doc) ->
["Check that multi acceptors behaves as expected when killed (more)"];
killing_multi_acceptors2(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line {ok,PortNo}=inet:port(LS),
- ?line F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
- ?line F2 = mktmofun(1000,Parent,LS),
- ?line Pid = spawn(F),
- ?line Pid2 = spawn(F),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L1} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L1),
- ?line exit(Pid,kill),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L2} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L2),
- ?line exit(Pid2,kill),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L3} = prim_inet:getstatus(LS),
- ?line false = lists:member(accepting, L3),
- ?line Pid3 = spawn(F2),
- ?line receive after 100 ->
- ok
- end,
- ?line {ok,L4} = prim_inet:getstatus(LS),
- ?line true = lists:member(accepting, L4),
- ?line gen_tcp:connect("localhost",PortNo,[]),
- ?line ok = ?EXPECT_ACCEPTS([{Pid3,{ok,Port}}] when is_port(Port),100),
- ?line {ok,L5} = prim_inet:getstatus(LS),
- ?line false = lists:member(accepting, L5),
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ {ok,PortNo}=inet:port(LS),
+ F = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
+ F2 = mktmofun(1000,Parent,LS),
+ Pid = spawn(F),
+ Pid2 = spawn(F),
+ receive after 100 -> ok
+ end,
+ {ok,L1} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L1),
+ exit(Pid,kill),
+ receive after 100 -> ok
+ end,
+ {ok,L2} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L2),
+ exit(Pid2,kill),
+ receive after 100 -> ok
+ end,
+ {ok,L3} = prim_inet:getstatus(LS),
+ false = lists:member(accepting, L3),
+ Pid3 = spawn(F2),
+ receive after 100 -> ok
+ end,
+ {ok,L4} = prim_inet:getstatus(LS),
+ true = lists:member(accepting, L4),
+ gen_tcp:connect("localhost",PortNo,[]),
+ ok = ?EXPECT_ACCEPTS([{Pid3,{ok,Port}}] when is_port(Port),1,100),
+ {ok,L5} = prim_inet:getstatus(LS),
+ false = lists:member(accepting, L5),
ok.
several_accepts_in_one_go(suite) ->
@@ -1907,33 +1885,19 @@ several_accepts_in_one_go(doc) ->
["checks that multi-accept works when more than one accept can be "
"done at once (wb test of inet_driver)"];
several_accepts_in_one_go(Config) when is_list(Config) ->
- ?line {ok,LS}=gen_tcp:listen(0,[]),
- ?line Parent = self(),
- ?line {ok,PortNo}=inet:port(LS),
- ?line F1 = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
- ?line F2 = fun() -> Parent ! {connected,self(),gen_tcp:connect("localhost",PortNo,[])} end,
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line spawn(F1),
- ?line ok = ?EXPECT_ACCEPTS([],500),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line spawn(F2),
- ?line ok = ?EXPECT_ACCEPTS([{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}}],15000),
- ?line ok = ?EXPECT_CONNECTS([{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}}],1000),
+ {ok,LS}=gen_tcp:listen(0,[]),
+ Parent = self(),
+ {ok,PortNo}=inet:port(LS),
+ F1 = fun() -> Parent ! {accepted,self(),gen_tcp:accept(LS)} end,
+ F2 = fun() -> Parent ! {connected,self(),gen_tcp:connect("localhost",PortNo,[])} end,
+ Ns = lists:seq(1,8),
+ _ = [spawn(F1) || _ <- Ns],
+ ok = ?EXPECT_ACCEPTS([],1,500), % wait for tmo
+ _ = [spawn(F2) || _ <- Ns],
+ ok = ?EXPECT_ACCEPTS([{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}}],8,15000),
+ ok = ?EXPECT_CONNECTS([{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}},{_,{ok,_}}],1000),
ok.
-
flush(Msgs) ->
erlang:yield(),
receive Msg -> flush([Msg|Msgs])
@@ -1968,13 +1932,13 @@ accept_system_limit(doc) ->
["Check that accept returns {error, system_limit} "
"(and not {error, enfile}) when running out of ports"];
accept_system_limit(Config) when is_list(Config) ->
- ?line {ok, LS} = gen_tcp:listen(0, []),
- ?line {ok, TcpPort} = inet:port(LS),
+ {ok, LS} = gen_tcp:listen(0, []),
+ {ok, TcpPort} = inet:port(LS),
Me = self(),
- ?line Connector = spawn_link(fun () -> connector(TcpPort, Me) end),
+ Connector = spawn_link(fun () -> connector(TcpPort, Me) end),
receive {Connector, sync} -> Connector ! {self(), continue} end,
- ?line ok = acceptor(LS, false, []),
- ?line Connector ! stop,
+ ok = acceptor(LS, false, []),
+ Connector ! stop,
ok.
acceptor(LS, GotSL, A) ->
@@ -2021,49 +1985,49 @@ active_once_closed(doc) ->
["Check that active once and tcp_close messages behave as expected"];
active_once_closed(Config) when is_list(Config) ->
(fun() ->
- ?line {Loop,A} = setup_closed_ao(),
- ?line Loop({{error,closed},{error,econnaborted}},
+ {Loop,A} = setup_closed_ao(),
+ Loop({{error,closed},{error,econnaborted}},
fun() -> gen_tcp:send(A,"Hello") end),
- ?line ok = inet:setopts(A,[{active,once}]),
- ?line ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
- ?line {error,einval} = inet:setopts(A,[{active,once}]),
- ?line ok = receive {tcp_closed, A} -> error after 1000 -> ok end
+ ok = inet:setopts(A,[{active,once}]),
+ ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
+ {error,einval} = inet:setopts(A,[{active,once}]),
+ ok = receive {tcp_closed, A} -> error after 1000 -> ok end
end)(),
(fun() ->
- ?line {Loop,A} = setup_closed_ao(),
- ?line Loop({{error,closed},{error,econnaborted}},
+ {Loop,A} = setup_closed_ao(),
+ Loop({{error,closed},{error,econnaborted}},
fun() -> gen_tcp:send(A,"Hello") end),
- ?line ok = inet:setopts(A,[{active,true}]),
- ?line ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
- ?line {error,einval} = inet:setopts(A,[{active,true}]),
- ?line ok = receive {tcp_closed, A} -> error after 1000 -> ok end
+ ok = inet:setopts(A,[{active,true}]),
+ ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
+ {error,einval} = inet:setopts(A,[{active,true}]),
+ ok = receive {tcp_closed, A} -> error after 1000 -> ok end
end)(),
(fun() ->
- ?line {Loop,A} = setup_closed_ao(),
- ?line Loop({{error,closed},{error,econnaborted}},
+ {Loop,A} = setup_closed_ao(),
+ Loop({{error,closed},{error,econnaborted}},
fun() -> gen_tcp:send(A,"Hello") end),
- ?line ok = inet:setopts(A,[{active,true}]),
- ?line ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
- ?line {error,einval} = inet:setopts(A,[{active,once}]),
- ?line ok = receive {tcp_closed, A} -> error after 1000 -> ok end
+ ok = inet:setopts(A,[{active,true}]),
+ ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
+ {error,einval} = inet:setopts(A,[{active,once}]),
+ ok = receive {tcp_closed, A} -> error after 1000 -> ok end
end)(),
(fun() ->
- ?line {Loop,A} = setup_closed_ao(),
- ?line Loop({{error,closed},{error,econnaborted}},
+ {Loop,A} = setup_closed_ao(),
+ Loop({{error,closed},{error,econnaborted}},
fun() -> gen_tcp:send(A,"Hello") end),
- ?line ok = inet:setopts(A,[{active,once}]),
- ?line ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
- ?line {error,einval} = inet:setopts(A,[{active,true}]),
- ?line ok = receive {tcp_closed, A} -> error after 1000 -> ok end
+ ok = inet:setopts(A,[{active,once}]),
+ ok = receive {tcp_closed, A} -> ok after 1000 -> error end,
+ {error,einval} = inet:setopts(A,[{active,true}]),
+ ok = receive {tcp_closed, A} -> error after 1000 -> ok end
end)(),
(fun() ->
- ?line {Loop,A} = setup_closed_ao(),
- ?line Loop({{error,closed},{error,econnaborted}},
+ {Loop,A} = setup_closed_ao(),
+ Loop({{error,closed},{error,econnaborted}},
fun() -> gen_tcp:send(A,"Hello") end),
- ?line ok = inet:setopts(A,[{active,false}]),
- ?line ok = receive {tcp_closed, A} -> error after 1000 -> ok end,
- ?line ok = inet:setopts(A,[{active,once}]),
- ?line ok = receive {tcp_closed, A} -> ok after 1000 -> error end
+ ok = inet:setopts(A,[{active,false}]),
+ ok = receive {tcp_closed, A} -> error after 1000 -> ok end,
+ ok = inet:setopts(A,[{active,once}]),
+ ok = receive {tcp_closed, A} -> ok after 1000 -> error end
end)().
send_timeout(suite) ->
@@ -2072,10 +2036,10 @@ send_timeout(doc) ->
["Test the send_timeout socket option"];
send_timeout(Config) when is_list(Config) ->
%% Basic
- BasicFun =
+ BasicFun =
fun(AutoClose) ->
- ?line {Loop,A,RNode} = setup_timeout_sink(1000, AutoClose),
- ?line {error,timeout} =
+ {Loop,A,RNode} = setup_timeout_sink(1000, AutoClose),
+ {error,timeout} =
Loop(fun() ->
Res = gen_tcp:send(A,<<1:10000>>),
%%erlang:display(Res),
@@ -2083,64 +2047,63 @@ send_timeout(Config) when is_list(Config) ->
end),
%% Check that the socket is not busy/closed...
Error = after_send_timeout(AutoClose),
- ?line {error,Error} = gen_tcp:send(A,<<"Hej">>),
- ?line test_server:stop_node(RNode)
+ {error,Error} = gen_tcp:send(A,<<"Hej">>),
+ test_server:stop_node(RNode)
end,
BasicFun(false),
BasicFun(true),
%% Check timeout length
- ?line Self = self(),
- ?line Pid =
- spawn(fun() ->
- {Loop,A,RNode} = setup_timeout_sink(1000, true),
- {error,timeout} =
- Loop(fun() ->
- Res = gen_tcp:send(A,<<1:10000>>),
- %%erlang:display(Res),
- Self ! Res,
- Res
- end),
- test_server:stop_node(RNode)
- end),
- ?line Diff = get_max_diff(),
- ?line io:format("Max time for send: ~p~n",[Diff]),
- ?line true = (Diff > 500) and (Diff < 1500),
+ Self = self(),
+ Pid = spawn(fun() ->
+ {Loop,A,RNode} = setup_timeout_sink(1000, true),
+ {error,timeout} = Loop(fun() ->
+ Res = gen_tcp:send(A,<<1:10000>>),
+ %%erlang:display(Res),
+ Self ! Res,
+ Res
+ end),
+ test_server:stop_node(RNode)
+ end),
+ Diff = get_max_diff(),
+ io:format("Max time for send: ~p~n",[Diff]),
+ true = (Diff > 500) and (Diff < 1500),
%% Let test_server slave die...
- ?line Mon = erlang:monitor(process, Pid),
- ?line receive {'DOWN',Mon,process,Pid,_} -> ok end,
+ Mon = erlang:monitor(process, Pid),
+ receive {'DOWN',Mon,process,Pid,_} -> ok end,
%% Check that parallell writers do not hang forever
- ParaFun =
+ ParaFun =
fun(AutoClose) ->
- ?line {Loop,A,RNode} = setup_timeout_sink(1000, AutoClose),
+ {Loop,A,RNode} = setup_timeout_sink(1000, AutoClose),
SenderFun = fun() ->
- {error,Error} =
+ {error,Error} =
Loop(fun() ->
gen_tcp:send(A, <<1:10000>>)
end),
Self ! {error,Error}
end,
- ?line spawn_link(SenderFun),
- ?line spawn_link(SenderFun),
- ?line receive
- {error,timeout} -> ok
- after 10000 ->
- ?line exit(timeout)
- end,
+ spawn_link(SenderFun),
+ spawn_link(SenderFun),
+ receive
+ {error,timeout} -> ok
+ after 10000 ->
+ exit(timeout)
+ end,
NextErr = after_send_timeout(AutoClose),
- ?line receive
- {error,NextErr} -> ok
- after 10000 ->
- ?line exit(timeout)
- end,
- ?line {error,NextErr} = gen_tcp:send(A,<<"Hej">>),
- ?line test_server:stop_node(RNode)
+ receive
+ {error,NextErr} -> ok
+ after 10000 ->
+ exit(timeout)
+ end,
+ {error,NextErr} = gen_tcp:send(A,<<"Hej">>),
+ test_server:stop_node(RNode)
end,
ParaFun(false),
ParaFun(true),
ok.
+
mad_sender(S) ->
- {_, _, USec} = now(),
- case gen_tcp:send(S, integer_to_list(USec)) of
+ U = rand:uniform(1000000),
+ case gen_tcp:send(S, integer_to_list(U)) of
ok ->
mad_sender(S);
Err ->
@@ -2166,25 +2129,25 @@ send_timeout_active(Config) when is_list(Config) ->
%% Basic
BasicFun =
fun(AutoClose) ->
- ?line {Loop,A,RNode,C} = setup_active_timeout_sink(1, AutoClose),
+ {Loop,A,RNode,C} = setup_active_timeout_sink(1, AutoClose),
inet:setopts(A, [{active, once}]),
- ?line Mad = spawn_link(RNode,fun() -> mad_sender(C) end),
- ?line {error,timeout} =
- Loop(fun() ->
- receive
- {tcp, _Sock, _Data} ->
- inet:setopts(A, [{active, once}]),
- Res = gen_tcp:send(A,lists:duplicate(1000, $a)),
- %erlang:display(Res),
- Res;
- Err ->
- io:format("sock closed: ~p~n", [Err]),
- Err
- end
- end),
- unlink(Mad),
+ Mad = spawn_link(RNode,fun() -> mad_sender(C) end),
+ {error,timeout} =
+ Loop(fun() ->
+ receive
+ {tcp, _Sock, _Data} ->
+ inet:setopts(A, [{active, once}]),
+ Res = gen_tcp:send(A,lists:duplicate(1000, $a)),
+ %erlang:display(Res),
+ Res;
+ Err ->
+ io:format("sock closed: ~p~n", [Err]),
+ Err
+ end
+ end),
+ unlink(Mad),
exit(Mad,kill),
- ?line test_server:stop_node(RNode)
+ test_server:stop_node(RNode)
end,
BasicFun(false),
flush(),
@@ -2208,10 +2171,10 @@ get_max_diff() ->
end.
get_max_diff(Max) ->
- T1 = millistamp(),
+ T1 = millis(),
receive
ok ->
- Diff = millistamp() - T1,
+ Diff = millis() - T1,
if
Diff > Max ->
get_max_diff(Diff);
@@ -2219,7 +2182,7 @@ get_max_diff(Max) ->
get_max_diff(Max)
end;
{error,timeout} ->
- Diff = millistamp() - T1,
+ Diff = millis() - T1,
if
Diff > Max ->
Diff;
@@ -2227,29 +2190,29 @@ get_max_diff(Max) ->
Max
end
after 10000 ->
- exit(timeout)
+ exit(timeout)
end.
setup_closed_ao() ->
Dir = filename:dirname(code:which(?MODULE)),
{ok,R} = test_server:start_node(test_default_options_slave,slave,
- [{args,"-pa " ++ Dir}]),
+ [{args,"-pa " ++ Dir}]),
Host = list_to_atom(lists:nth(2,string:tokens(atom_to_list(node()),"@"))),
{ok, L} = gen_tcp:listen(0, [{active,false},{packet,2}]),
- Fun = fun(F) ->
- receive
- {From,X} when is_function(X) ->
- From ! {self(),X()}, F(F);
- die -> ok
- end
- end,
+ Fun = fun(F) ->
+ receive
+ {From,X} when is_function(X) ->
+ From ! {self(),X()}, F(F);
+ die -> ok
+ end
+ end,
Pid = rpc:call(R,erlang,spawn,[fun() -> Fun(Fun) end]),
{ok, Port} = inet:port(L),
- Remote = fun(Fu) ->
- Pid ! {self(), Fu},
- receive {Pid,X} -> X
- end
- end,
+ Remote = fun(Fu) ->
+ Pid ! {self(), Fu},
+ receive {Pid,X} -> X
+ end
+ end,
{ok, C} = Remote(fun() ->
gen_tcp:connect(Host,Port,
[{active,false},{packet,2}])
@@ -2257,113 +2220,109 @@ setup_closed_ao() ->
{ok,A} = gen_tcp:accept(L),
gen_tcp:send(A,"Hello"),
{ok, "Hello"} = Remote(fun() -> gen_tcp:recv(C,0) end),
- ok = Remote(fun() -> gen_tcp:close(C) end),
- Loop2 = fun(_,_,_,0) ->
+ ok = Remote(fun() -> gen_tcp:close(C) end),
+ Loop2 = fun(_,_,_,0) ->
{failure, timeout};
- (L2,{MA,MB},F2,N) ->
- case F2() of
- MA -> MA;
- MB -> MB;
- Other -> io:format("~p~n",[Other]),
- receive after 1000 -> ok end,
- L2(L2,{MA,MB},F2,N-1)
- end
+ (L2,{MA,MB},F2,N) ->
+ case F2() of
+ MA -> MA;
+ MB -> MB;
+ Other -> io:format("~p~n",[Other]),
+ receive after 1000 -> ok end,
+ L2(L2,{MA,MB},F2,N-1)
+ end
end,
Loop = fun(Match2,F3) -> Loop2(Loop2,Match2,F3,10) end,
test_server:stop_node(R),
{Loop,A}.
setup_timeout_sink(Timeout, AutoClose) ->
- ?line Dir = filename:dirname(code:which(?MODULE)),
- ?line {ok,R} = test_server:start_node(test_default_options_slave,slave,
- [{args,"-pa " ++ Dir}]),
- ?line Host = list_to_atom(lists:nth(2,string:tokens(atom_to_list(node()),"@"))),
- ?line {ok, L} = gen_tcp:listen(0, [{active,false},{packet,2},
+ Dir = filename:dirname(code:which(?MODULE)),
+ {ok,R} = test_server:start_node(test_default_options_slave,slave,
+ [{args,"-pa " ++ Dir}]),
+ Host = list_to_atom(lists:nth(2,string:tokens(atom_to_list(node()),"@"))),
+ {ok, L} = gen_tcp:listen(0, [{active,false},{packet,2},
{send_timeout,Timeout},
{send_timeout_close,AutoClose}]),
- ?line Fun = fun(F) ->
- receive
- {From,X} when is_function(X) ->
- From ! {self(),X()}, F(F);
- die -> ok
- end
- end,
- ?line Pid = rpc:call(R,erlang,spawn,[fun() -> Fun(Fun) end]),
- ?line {ok, Port} = inet:port(L),
- ?line Remote = fun(Fu) ->
- Pid ! {self(), Fu},
- receive {Pid,X} -> X
- end
+ Fun = fun(F) ->
+ receive
+ {From,X} when is_function(X) ->
+ From ! {self(),X()}, F(F);
+ die -> ok
+ end
+ end,
+ Pid = rpc:call(R,erlang,spawn,[fun() -> Fun(Fun) end]),
+ {ok, Port} = inet:port(L),
+ Remote = fun(Fu) ->
+ Pid ! {self(), Fu},
+ receive {Pid,X} -> X
+ end
end,
- ?line {ok, C} = Remote(fun() ->
+ {ok, C} = Remote(fun() ->
gen_tcp:connect(Host,Port,
- [{active,false},{packet,2}])
+ [{active,false},{packet,2}])
end),
- ?line {ok,A} = gen_tcp:accept(L),
- ?line gen_tcp:send(A,"Hello"),
- ?line {ok, "Hello"} = Remote(fun() -> gen_tcp:recv(C,0) end),
- ?line Loop2 = fun(_,_,0) ->
- {failure, timeout};
- (L2,F2,N) ->
+ {ok,A} = gen_tcp:accept(L),
+ gen_tcp:send(A,"Hello"),
+ {ok, "Hello"} = Remote(fun() -> gen_tcp:recv(C,0) end),
+ Loop2 = fun(_,_,0) ->
+ {failure, timeout};
+ (L2,F2,N) ->
Ret = F2(),
io:format("~p~n",[Ret]),
case Ret of
- ok -> receive after 1 -> ok end,
+ ok -> receive after 1 -> ok end,
L2(L2,F2,N-1);
Other -> Other
- end
+ end
end,
- ?line Loop = fun(F3) -> Loop2(Loop2,F3,1000) end,
+ Loop = fun(F3) -> Loop2(Loop2,F3,1000) end,
{Loop,A,R}.
setup_active_timeout_sink(Timeout, AutoClose) ->
- ?line Dir = filename:dirname(code:which(?MODULE)),
- ?line {ok,R} = test_server:start_node(test_default_options_slave,slave,
- [{args,"-pa " ++ Dir}]),
- ?line Host = list_to_atom(lists:nth(2,string:tokens(atom_to_list(node()),"@"))),
- ?line {ok, L} = gen_tcp:listen(0, [binary,{active,false},{packet,0},{nodelay, true},{keepalive, true},
+ Dir = filename:dirname(code:which(?MODULE)),
+ {ok,R} = test_server:start_node(test_default_options_slave,slave,
+ [{args,"-pa " ++ Dir}]),
+ Host = list_to_atom(lists:nth(2,string:tokens(atom_to_list(node()),"@"))),
+ {ok, L} = gen_tcp:listen(0, [binary,{active,false},{packet,0},{nodelay, true},{keepalive, true},
{send_timeout,Timeout},
{send_timeout_close,AutoClose}]),
- ?line Fun = fun(F) ->
- receive
- {From,X} when is_function(X) ->
- From ! {self(),X()}, F(F);
- die -> ok
- end
- end,
- ?line Pid = rpc:call(R,erlang,spawn,[fun() -> Fun(Fun) end]),
- ?line {ok, Port} = inet:port(L),
- ?line Remote = fun(Fu) ->
- Pid ! {self(), Fu},
- receive {Pid,X} -> X
- end
+ Fun = fun(F) ->
+ receive
+ {From,X} when is_function(X) ->
+ From ! {self(),X()}, F(F);
+ die -> ok
+ end
+ end,
+ Pid = rpc:call(R,erlang,spawn,[fun() -> Fun(Fun) end]),
+ {ok, Port} = inet:port(L),
+ Remote = fun(Fu) ->
+ Pid ! {self(), Fu},
+ receive {Pid,X} -> X
+ end
end,
- ?line {ok, C} = Remote(fun() ->
+ {ok, C} = Remote(fun() ->
gen_tcp:connect(Host,Port,
- [{active,false}])
+ [{active,false}])
end),
- ?line {ok,A} = gen_tcp:accept(L),
- ?line gen_tcp:send(A,"Hello"),
- ?line {ok, "H"++_} = Remote(fun() -> gen_tcp:recv(C,0) end),
- ?line Loop2 = fun(_,_,0) ->
- {failure, timeout};
- (L2,F2,N) ->
+ {ok,A} = gen_tcp:accept(L),
+ gen_tcp:send(A,"Hello"),
+ {ok, "H"++_} = Remote(fun() -> gen_tcp:recv(C,0) end),
+ Loop2 = fun(_,_,0) ->
+ {failure, timeout};
+ (L2,F2,N) ->
Ret = F2(),
io:format("~p~n",[Ret]),
case Ret of
- ok -> receive after 1 -> ok end,
+ ok -> receive after 1 -> ok end,
L2(L2,F2,N-1);
Other -> Other
- end
+ end
end,
- ?line Loop = fun(F3) -> Loop2(Loop2,F3,1000) end,
+ Loop = fun(F3) -> Loop2(Loop2,F3,1000) end,
{Loop,A,R,C}.
-millistamp() ->
- {Mega, Secs, Micros} = erlang:now(),
- (Micros div 1000) + Secs * 1000 + Mega * 1000000000.
-
has_superfluous_schedulers() ->
case {erlang:system_info(schedulers),
erlang:system_info(logical_processors)} of
@@ -2378,22 +2337,22 @@ otp_7731(doc) ->
"Leaking message from inet_drv {inet_reply,P,ok} "
"when a socket sending resumes working after a send_timeout";
otp_7731(Config) when is_list(Config) ->
- ?line ServerPid = spawn_link(?MODULE, otp_7731_server, [self()]),
- ?line receive {ServerPid, ready, PortNum} -> ok end,
+ ServerPid = spawn_link(?MODULE, otp_7731_server, [self()]),
+ receive {ServerPid, ready, PortNum} -> ok end,
- ?line {ok, Socket} = gen_tcp:connect("localhost", PortNum,
- [binary, {active, false}, {packet, raw},
- {send_timeout, 1000}]),
+ {ok, Socket} = gen_tcp:connect("localhost", PortNum,
+ [binary, {active, false}, {packet, raw},
+ {send_timeout, 1000}]),
otp_7731_send(Socket),
io:format("Sending complete...\n",[]),
ServerPid ! {self(), recv},
- receive {ServerPid, ok} -> ok end,
-
+ receive {ServerPid, ok} -> ok end,
+
io:format("Client waiting for leaking messages...\n",[]),
%% Now make sure inet_drv does not leak any internal messages.
receive Msg ->
- ?line test_server:fail({unexpected, Msg})
+ test_server:fail({unexpected, Msg})
after 1000 ->
ok
end,
@@ -2403,15 +2362,15 @@ otp_7731(Config) when is_list(Config) ->
otp_7731_send(Socket) ->
Bin = <<1:10000>>,
io:format("Client sending ~p bytes...\n",[size(Bin)]),
- ?line case gen_tcp:send(Socket, Bin) of
- ok -> otp_7731_send(Socket);
- {error,timeout} -> ok
- end.
+ case gen_tcp:send(Socket, Bin) of
+ ok -> otp_7731_send(Socket);
+ {error,timeout} -> ok
+ end.
otp_7731_server(ClientPid) ->
- ?line {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, raw},
- {active, false}]),
- ?line {ok, {_, PortNum}} = inet:sockname(LSocket),
+ {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, raw},
+ {active, false}]),
+ {ok, {_, PortNum}} = inet:sockname(LSocket),
io:format("Listening on ~w with port number ~p\n", [LSocket, PortNum]),
ClientPid ! {self(), ready, PortNum},
@@ -2433,7 +2392,7 @@ otp_7731_server(ClientPid) ->
otp_7731_recv(Socket) ->
- ?line case gen_tcp:recv(Socket, 0, 1000) of
+ case gen_tcp:recv(Socket, 0, 1000) of
{ok, Bin} ->
io:format("Server received ~p bytes\n",[size(Bin)]),
otp_7731_recv(Socket);
@@ -2452,21 +2411,21 @@ zombie_sockets(Config) when is_list(Config) ->
register(zombie_collector,self()),
Calls = 10,
Server = spawn_link(?MODULE, zombie_server,[self(), Calls]),
- ?line {Server, ready, PortNum} = receive Msg -> Msg end,
+ {Server, ready, PortNum} = receive Msg -> Msg end,
io:format("Ports before = ~p\n",[lists:sort(erlang:ports())]),
zombie_client_loop(Calls, PortNum),
Ports = lists:sort(zombie_collector(Calls,[])),
Server ! terminate,
io:format("Collected ports = ~p\n",[Ports]),
- ?line [] = zombies_alive(Ports, 10),
+ [] = zombies_alive(Ports, 10),
timer:sleep(1000),
ok.
zombie_client_loop(0, _) -> ok;
zombie_client_loop(N, PortNum) when is_integer(PortNum) ->
- ?line {ok, Socket} = gen_tcp:connect("localhost", PortNum,
- [binary, {active, false}, {packet, raw}]),
- ?line gen_tcp:close(Socket), % to make server recv fail
+ {ok, Socket} = gen_tcp:connect("localhost", PortNum,
+ [binary, {active, false}, {packet, raw}]),
+ gen_tcp:close(Socket), % to make server recv fail
zombie_client_loop(N-1, PortNum).
@@ -2495,19 +2454,19 @@ zombies_alive(Ports, WaitSec) ->
end.
zombie_server(Pid, Calls) ->
- ?line {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, raw},
- {active, false}, {backlog, Calls}]),
- ?line {ok, {_, PortNum}} = inet:sockname(LSocket),
+ {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, raw},
+ {active, false}, {backlog, Calls}]),
+ {ok, {_, PortNum}} = inet:sockname(LSocket),
io:format("Listening on ~w with port number ~p\n", [LSocket, PortNum]),
BigBin = list_to_binary(lists:duplicate(100*1024, 77)),
Pid ! {self(), ready, PortNum},
zombie_accept_loop(LSocket, BigBin, Calls),
- ?line terminate = receive Msg -> Msg end.
+ terminate = receive Msg -> Msg end.
zombie_accept_loop(_, _, 0) ->
ok;
zombie_accept_loop(Socket, BigBin, Calls) ->
- ?line case gen_tcp:accept(Socket) of
+ case gen_tcp:accept(Socket) of
{ok, NewSocket} ->
spawn_link(fun() -> zombie_serve_client(NewSocket, BigBin) end),
zombie_accept_loop(Socket, BigBin, Calls-1);
@@ -2517,29 +2476,27 @@ zombie_accept_loop(Socket, BigBin, Calls) ->
zombie_serve_client(Socket, Bin) ->
%%io:format("Got connection on ~p\n",[Socket]),
- ?line gen_tcp:send(Socket, Bin),
+ gen_tcp:send(Socket, Bin),
%%io:format("Sent data, waiting for reply on ~p\n",[Socket]),
- ?line case gen_tcp:recv(Socket, 4) of
+ case gen_tcp:recv(Socket, 4) of
{error,closed} -> ok;
{error,econnaborted} -> ok % may be returned on Windows
end,
%%io:format("Closing ~p\n",[Socket]),
- ?line gen_tcp:close(Socket),
+ gen_tcp:close(Socket),
zombie_collector ! {closed, Socket}.
-
-
otp_7816(suite) -> [];
otp_7816(doc) ->
"Hanging send on windows when sending iolist with more than 16 binaries.";
otp_7816(Config) when is_list(Config) ->
Client = self(),
- ?line Server = spawn_link(fun()-> otp_7816_server(Client) end),
- ?line receive {Server, ready, PortNum} -> ok end,
+ Server = spawn_link(fun()-> otp_7816_server(Client) end),
+ receive {Server, ready, PortNum} -> ok end,
- ?line {ok, Socket} = gen_tcp:connect("localhost", PortNum,
- [binary, {active, false}, {packet, 4},
- {send_timeout, 10}]),
+ {ok, Socket} = gen_tcp:connect("localhost", PortNum,
+ [binary, {active, false}, {packet, 4},
+ {send_timeout, 10}]),
%% We use the undocumented feature that sending can be resumed after
%% a send_timeout without any data loss if the peer starts to receive data.
%% Unless of course the 7816-bug is in affect, in which case the write event
@@ -2549,9 +2506,9 @@ otp_7816(Config) when is_list(Config) ->
io:format("Sending complete...\n",[]),
- ?line ok = gen_tcp:close(Socket),
+ ok = gen_tcp:close(Socket),
Server ! {self(), closed},
- ?line {Server, closed} = receive M -> M end.
+ {Server, closed} = receive M -> M end.
otp_7816_send(Socket, BinNr, BinSize, Server) ->
@@ -2559,7 +2516,7 @@ otp_7816_send(Socket, BinNr, BinSize, Server) ->
SentBytes = otp_7816_send_data(Socket, Data, 0) * BinNr * BinSize,
io:format("Client sent ~p bytes...\n",[SentBytes]),
Server ! {self(),recv,SentBytes},
- ?line {Server, ok} = receive M -> M end.
+ {Server, ok} = receive M -> M end.
@@ -2574,15 +2531,15 @@ otp_7816_send_data(Socket, Data, Loops) ->
otp_7816_server(Client) ->
- ?line {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, 4},
+ {ok, LSocket} = gen_tcp:listen(0, [binary, {packet, 4},
{active, false}]),
- ?line {ok, {_, PortNum}} = inet:sockname(LSocket),
+ {ok, {_, PortNum}} = inet:sockname(LSocket),
io:format("Listening on ~w with port number ~p\n", [LSocket, PortNum]),
Client ! {self(), ready, PortNum},
- ?line {ok, CSocket} = gen_tcp:accept(LSocket),
+ {ok, CSocket} = gen_tcp:accept(LSocket),
io:format("Server got connection...\n",[]),
- ?line gen_tcp:close(LSocket),
+ gen_tcp:close(LSocket),
otp_7816_server_loop(CSocket),
@@ -2596,13 +2553,13 @@ otp_7816_server_loop(CSocket) ->
{Client, recv, RecvBytes} ->
io:format("Server start receiving...\n",[]),
- ?line ok = otp_7816_recv(CSocket, RecvBytes),
+ ok = otp_7816_recv(CSocket, RecvBytes),
Client ! {self(), ok},
otp_7816_server_loop(CSocket);
{Client, closed} ->
- ?line {error, closed} = gen_tcp:recv(CSocket, 0, 1000),
+ {error, closed} = gen_tcp:recv(CSocket, 0, 1000),
Client ! {self(), closed}
end.
@@ -2611,7 +2568,7 @@ otp_7816_recv(_, 0) ->
io:format("Server got all.\n",[]),
ok;
otp_7816_recv(CSocket, BytesLeft) ->
- ?line case gen_tcp:recv(CSocket, 0, 1000) of
+ case gen_tcp:recv(CSocket, 0, 1000) of
{ok, Bin} when byte_size(Bin) =< BytesLeft ->
io:format("Server received ~p of ~p bytes.\n",[size(Bin), BytesLeft]),
otp_7816_recv(CSocket, BytesLeft - byte_size(Bin));
@@ -2623,8 +2580,8 @@ otp_7816_recv(CSocket, BytesLeft) ->
otp_8102(doc) -> ["Receive a packet with a faulty packet header"];
otp_8102(suite) -> [];
otp_8102(Config) when is_list(Config) ->
- ?line {ok, LSocket} = gen_tcp:listen(0, []),
- ?line {ok, {_, PortNum}} = inet:sockname(LSocket),
+ {ok, LSocket} = gen_tcp:listen(0, []),
+ {ok, {_, PortNum}} = inet:sockname(LSocket),
io:format("Listening on ~w with port number ~p\n", [LSocket, PortNum]),
[otp_8102_do(LSocket, PortNum, otp_8102_packet(Type,Size))
@@ -2644,18 +2601,18 @@ otp_8102_packet({cdr,little}, Size) ->
otp_8102_do(LSocket, PortNum, {Bin,PType}) ->
io:format("Connect with packet option ~p ...\n",[PType]),
- ?line {ok, RSocket} = gen_tcp:connect("localhost", PortNum, [binary,
+ {ok, RSocket} = gen_tcp:connect("localhost", PortNum, [binary,
{packet,PType},
{active,true}]),
- ?line {ok, SSocket} = gen_tcp:accept(LSocket),
+ {ok, SSocket} = gen_tcp:accept(LSocket),
io:format("Got connection, sending ~p...\n",[Bin]),
- ?line ok = gen_tcp:send(SSocket, Bin),
+ ok = gen_tcp:send(SSocket, Bin),
io:format("Sending complete...\n",[]),
- ?line {tcp_error,RSocket,emsgsize} = receive M -> M end,
+ {tcp_error,RSocket,emsgsize} = receive M -> M end,
io:format("Got error msg, ok.\n",[]),
gen_tcp:close(SSocket),
@@ -2664,61 +2621,61 @@ otp_8102_do(LSocket, PortNum, {Bin,PType}) ->
otp_9389(doc) -> ["Verify packet_size handles long HTTP header lines"];
otp_9389(suite) -> [];
otp_9389(Config) when is_list(Config) ->
- ?line {ok, LS} = gen_tcp:listen(0, [{active,false}]),
- ?line {ok, {_, PortNum}} = inet:sockname(LS),
+ {ok, LS} = gen_tcp:listen(0, [{active,false}]),
+ {ok, {_, PortNum}} = inet:sockname(LS),
io:format("Listening on ~w with port number ~p\n", [LS, PortNum]),
OrigLinkHdr = "/" ++ string:chars($S, 8192),
_Server = spawn_link(
fun() ->
- ?line {ok, S} = gen_tcp:accept(LS),
- ?line ok = inet:setopts(S, [{packet_size, 16384}]),
- ?line ok = otp_9389_loop(S, OrigLinkHdr),
- ?line ok = gen_tcp:close(S)
+ {ok, S} = gen_tcp:accept(LS),
+ ok = inet:setopts(S, [{packet_size, 16384}]),
+ ok = otp_9389_loop(S, OrigLinkHdr),
+ ok = gen_tcp:close(S)
end),
- ?line {ok, S} = gen_tcp:connect("localhost", PortNum,
+ {ok, S} = gen_tcp:connect("localhost", PortNum,
[binary, {active, false}]),
Req = "GET / HTTP/1.1\r\n"
++ "Host: localhost\r\n"
++ "Link: " ++ OrigLinkHdr ++ "\r\n\r\n",
- ?line ok = gen_tcp:send(S, Req),
- ?line ok = inet:setopts(S, [{packet, http}]),
- ?line {ok, {http_response, {1,1}, 200, "OK"}} = gen_tcp:recv(S, 0),
- ?line ok = inet:setopts(S, [{packet, httph}, {packet_size, 16384}]),
- ?line {ok, {http_header, _, 'Content-Length', _, "0"}} = gen_tcp:recv(S, 0),
- ?line {ok, {http_header, _, "Link", _, LinkHdr}} = gen_tcp:recv(S, 0),
- ?line true = (LinkHdr == OrigLinkHdr),
+ ok = gen_tcp:send(S, Req),
+ ok = inet:setopts(S, [{packet, http}]),
+ {ok, {http_response, {1,1}, 200, "OK"}} = gen_tcp:recv(S, 0),
+ ok = inet:setopts(S, [{packet, httph}, {packet_size, 16384}]),
+ {ok, {http_header, _, 'Content-Length', _, "0"}} = gen_tcp:recv(S, 0),
+ {ok, {http_header, _, "Link", _, LinkHdr}} = gen_tcp:recv(S, 0),
+ true = (LinkHdr == OrigLinkHdr),
ok = gen_tcp:close(S),
ok = gen_tcp:close(LS),
ok.
otp_9389_loop(S, OrigLinkHdr) ->
- ?line ok = inet:setopts(S, [{active,once},{packet,http}]),
+ ok = inet:setopts(S, [{active,once},{packet,http}]),
receive
{http, S, {http_request, 'GET', _, _}} ->
- ?line ok = otp_9389_loop(S, OrigLinkHdr, undefined)
+ ok = otp_9389_loop(S, OrigLinkHdr, undefined)
after
3000 ->
- ?line error({timeout,request_line})
+ error({timeout,request_line})
end.
otp_9389_loop(S, OrigLinkHdr, ok) ->
- ?line Resp = "HTTP/1.1 200 OK\r\nContent-length: 0\r\n" ++
+ Resp = "HTTP/1.1 200 OK\r\nContent-length: 0\r\n" ++
"Link: " ++ OrigLinkHdr ++ "\r\n\r\n",
- ?line ok = gen_tcp:send(S, Resp);
+ ok = gen_tcp:send(S, Resp);
otp_9389_loop(S, OrigLinkHdr, State) ->
- ?line ok = inet:setopts(S, [{active,once}, {packet,httph}]),
+ ok = inet:setopts(S, [{active,once}, {packet,httph}]),
receive
{http, S, http_eoh} ->
- ?line otp_9389_loop(S, OrigLinkHdr, ok);
+ otp_9389_loop(S, OrigLinkHdr, ok);
{http, S, {http_header, _, "Link", _, LinkHdr}} ->
- ?line LinkHdr = OrigLinkHdr,
- ?line otp_9389_loop(S, OrigLinkHdr, State);
+ LinkHdr = OrigLinkHdr,
+ otp_9389_loop(S, OrigLinkHdr, State);
{http, S, {http_header, _, _Hdr, _, _Val}} ->
- ?line otp_9389_loop(S, OrigLinkHdr, State);
+ otp_9389_loop(S, OrigLinkHdr, State);
{http, S, {http_error, Err}} ->
- ?line error({error, Err})
+ error({error, Err})
after
3000 ->
- ?line error({timeout,header})
+ error({timeout,header})
end.
wrapping_oct(doc) ->
@@ -2729,7 +2686,7 @@ wrapping_oct(Config) when is_list(Config) ->
{ok,Sock} = gen_tcp:listen(0,[{active,false},{mode,binary}]),
{ok,Port} = inet:port(Sock),
spawn_link(?MODULE,oct_acceptor,[Sock]),
- Res = oct_datapump(Port,16#1FFFFFFFF),
+ Res = oct_datapump(Port,16#10000FFFF),
gen_tcp:close(Sock),
ok = Res,
ok.
diff --git a/lib/kernel/test/heart_SUITE.erl b/lib/kernel/test/heart_SUITE.erl
index 35d3b75b34..43224cf554 100644
--- a/lib/kernel/test/heart_SUITE.erl
+++ b/lib/kernel/test/heart_SUITE.erl
@@ -562,13 +562,15 @@ suicide_by_heart() ->
generate(Module, Attributes, FunStrings) ->
FunForms = function_forms(FunStrings),
Forms = [
- {attribute,1,module,Module},
- {attribute,2,export,[FA || {FA,_} <- FunForms]}
- ] ++ [{attribute, 3, A, V}|| {A, V} <- Attributes] ++
+ {attribute,a(1),module,Module},
+ {attribute,a(2),export,[FA || {FA,_} <- FunForms]}
+ ] ++ [{attribute, a(3), A, V}|| {A, V} <- Attributes] ++
[ Function || {_, Function} <- FunForms],
{ok, Module, Bin} = compile:forms(Forms),
Bin.
+a(L) ->
+ erl_anno:new(L).
function_forms([]) -> [];
function_forms([S|Ss]) ->
diff --git a/lib/kernel/test/inet_SUITE.erl b/lib/kernel/test/inet_SUITE.erl
index d45dfc2173..c77de9316f 100644
--- a/lib/kernel/test/inet_SUITE.erl
+++ b/lib/kernel/test/inet_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -36,6 +36,7 @@
gethostnative_parallell/1, cname_loop/1,
gethostnative_soft_restart/0, gethostnative_soft_restart/1,
gethostnative_debug_level/0, gethostnative_debug_level/1,
+ lookup_bad_search_option/1,
getif/1,
getif_ifr_name_overflow/1,getservbyname_overflow/1, getifaddrs/1,
parse_strict_address/1, simple_netns/1, simple_netns_open/1]).
@@ -52,6 +53,7 @@ all() ->
ipv4_to_ipv6, host_and_addr, {group, parse},
t_gethostnative, gethostnative_parallell, cname_loop,
gethostnative_debug_level, gethostnative_soft_restart,
+ lookup_bad_search_option,
getif, getif_ifr_name_overflow, getservbyname_overflow,
getifaddrs, parse_strict_address, simple_netns, simple_netns_open].
@@ -86,10 +88,30 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
+init_per_testcase(lookup_bad_search_option, Config) ->
+ Db = inet_db,
+ Key = res_lookup,
+ %% The bad option can not enter through inet_db:set_lookup/1,
+ %% but through e.g .inetrc.
+ Prev = ets:lookup(Db, Key),
+ ets:delete(Db, Key),
+ ets:insert(Db, {Key,[lookup_bad_search_option]}),
+ ?t:format("Misconfigured resolver lookup order", []),
+ Dog = test_server:timetrap(test_server:seconds(60)),
+ [{Key,Prev},{watchdog,Dog}|Config];
init_per_testcase(_Func, Config) ->
Dog = test_server:timetrap(test_server:seconds(60)),
[{watchdog,Dog}|Config].
+end_per_testcase(lookup_bad_search_option, Config) ->
+ Dog = ?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ Db = inet_db,
+ Key = res_lookup,
+ Prev = ?config(Key, Config),
+ ets:delete(Db, Key),
+ ets:insert(Db, Prev),
+ ?t:format("Restored resolver lookup order", []);
end_per_testcase(_Func, Config) ->
Dog = ?config(watchdog, Config),
test_server:timetrap_cancel(Dog).
@@ -547,8 +569,11 @@ parse_address(Config) when is_list(Config) ->
"::-1",
"::g",
"f:f11::10100:2",
+ "f:f11::01100:2",
"::17000",
+ "::01700",
"10000::",
+ "01000::",
"::8:7:6:5:4:3:2:1",
"8:7:6:5:4:3:2:1::",
"8:7:6:5:4::3:2:1",
@@ -908,6 +933,19 @@ lookup_loop([H|Hs], Delay, Tag, Parent, Cnt, Hosts) ->
+lookup_bad_search_option(suite) ->
+ [];
+lookup_bad_search_option(doc) ->
+ ["Test lookup with erroneously configured lookup option (OTP-12133)"];
+lookup_bad_search_option(Config) when is_list(Config) ->
+ %% Manipulation of resolver config is done in init_per_testcase
+ %% and end_per_testcase to ensure cleanup.
+ {ok,Hostname} = inet:gethostname(),
+ {ok,_Hent} = inet:gethostbyname(Hostname), % Will hang loop for this bug
+ ok.
+
+
+
getif(suite) ->
[];
getif(doc) ->
diff --git a/lib/kernel/test/interactive_shell_SUITE.erl b/lib/kernel/test/interactive_shell_SUITE.erl
index 7f6024f642..89c574b025 100644
--- a/lib/kernel/test/interactive_shell_SUITE.erl
+++ b/lib/kernel/test/interactive_shell_SUITE.erl
@@ -48,12 +48,7 @@ groups() ->
[].
init_per_suite(Config) ->
- Term = case os:getenv("TERM") of
- List when is_list(List) ->
- List;
- _ ->
- "dumb"
- end,
+ Term = os:getenv("TERM", "dumb"),
os:putenv("TERM","vt100"),
DefShell = get_default_shell(),
[{default_shell,DefShell},{term,Term}|Config].
@@ -723,8 +718,7 @@ toerl_loop(Port,Acc) ->
end.
millistamp() ->
- {Mega, Secs, Micros} = erlang:now(),
- (Micros div 1000) + Secs * 1000 + Mega * 1000000000.
+ erlang:monotonic_time(milli_seconds).
get_data_within(Port, X, Acc) when X =< 0 ->
?dbg({get_data_within, X, Acc, ?LINE}),
diff --git a/lib/kernel/test/prim_file_SUITE.erl b/lib/kernel/test/prim_file_SUITE.erl
index 05bd5b3a3d..f55716cbec 100644
--- a/lib/kernel/test/prim_file_SUITE.erl
+++ b/lib/kernel/test/prim_file_SUITE.erl
@@ -262,7 +262,7 @@ make_del_dir(Config, Handle, Suffix) ->
?line {error, enoent} = ?PRIM_FILE_call(del_dir, Handle, [NewDir]),
% Make sure we are not in a directory directly under test_server
- % as that would result in eacess errors when trying to delere '..',
+ % as that would result in eacces errors when trying to delete '..',
% because there are processes having that directory as current.
?line ok = ?PRIM_FILE_call(make_dir, Handle, [NewDir]),
?line {ok, CurrentDir} = ?PRIM_FILE_call(get_cwd, Handle, []),
diff --git a/lib/kernel/test/rpc_SUITE.erl b/lib/kernel/test/rpc_SUITE.erl
index 7adef49014..867b448b36 100644
--- a/lib/kernel/test/rpc_SUITE.erl
+++ b/lib/kernel/test/rpc_SUITE.erl
@@ -456,32 +456,33 @@ called_throws(Config) when is_list(Config) ->
call_benchmark(Config) when is_list(Config) ->
Timetrap = ?t:timetrap(?t:seconds(120)),
- ?line PA = filename:dirname(code:which(?MODULE)),
- ?line {ok, Node} = ?t:start_node(rpc_SUITE_call_benchmark, slave,
- [{args, "-pa " ++ PA}]),
+ PA = filename:dirname(code:which(?MODULE)),
+ {ok, Node} = ?t:start_node(rpc_SUITE_call_benchmark, slave,
+ [{args, "-pa " ++ PA}]),
Iter = case erlang:system_info(modified_timing_level) of
undefined -> 10000;
- _ -> 500 %Moified timing - spawn is slower
+ _ -> 500 %Modified timing - spawn is slower
end,
- ?line do_call_benchmark(Node, Iter),
+ Res = do_call_benchmark(Node, Iter),
+ ?t:stop_node(Node),
?t:timetrap_cancel(Timetrap),
- ok.
+ Res.
do_call_benchmark(Node, M) when is_integer(M), M > 0 ->
- do_call_benchmark(Node, erlang:now(), 0, M).
-
-do_call_benchmark(Node, {A,B,C}, M, M) ->
- ?line {D,E,F} = erlang:now(),
- ?line T = float(D-A)*1000000.0 + float(E-B) + float(F-C)*0.000001,
- ?line Q = 3.0 * float(M) / T,
- ?line ?t:stop_node(Node),
- {comment,
- lists:flatten([float_to_list(Q)," RPC calls per second"])};
-do_call_benchmark(Node, Then, I, M) ->
- ?line Node = rpc:call(Node, erlang, node, []),
- ?line _ = rpc:call(Node, erlang, whereis, [rex]),
- ?line 3 = rpc:call(Node, erlang, '+', [1,2]),
- ?line do_call_benchmark(Node, Then, I+1, M).
+ {Micros,ok} = timer:tc(fun() ->
+ do_call_benchmark(Node, 0, M)
+ end),
+ Calls = 3*M,
+ S = io_lib:format("~p RPC calls/second", [Calls*1000000 div Micros]),
+ {comment,lists:flatten(S)}.
+
+do_call_benchmark(_Node, M, M) ->
+ ok;
+do_call_benchmark(Node, I, M) ->
+ Node = rpc:call(Node, erlang, node, []),
+ _ = rpc:call(Node, erlang, whereis, [rex]),
+ 3 = rpc:call(Node, erlang, '+', [1,2]),
+ do_call_benchmark(Node, I+1, M).
async_call(Config) when is_list(Config) ->
Dog = ?t:timetrap(?t:seconds(120)),
diff --git a/lib/kernel/test/standard_error_SUITE.erl b/lib/kernel/test/standard_error_SUITE.erl
new file mode 100644
index 0000000000..b290454b40
--- /dev/null
+++ b/lib/kernel/test/standard_error_SUITE.erl
@@ -0,0 +1,38 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(standard_error_SUITE).
+
+-export([all/0,suite/0]).
+-export([badarg/1,getopts/1]).
+
+suite() ->
+ [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [badarg,getopts].
+
+badarg(Config) when is_list(Config) ->
+ {'EXIT',{badarg,_}} = (catch io:put_chars(standard_error, [oops])),
+ true = erlang:is_process_alive(whereis(standard_error)),
+ ok.
+
+getopts(Config) when is_list(Config) ->
+ [{encoding,latin1}] = io:getopts(standard_error),
+ ok.
diff --git a/lib/kernel/test/zlib_SUITE.erl b/lib/kernel/test/zlib_SUITE.erl
index 3be6f39d95..41c19fce51 100644
--- a/lib/kernel/test/zlib_SUITE.erl
+++ b/lib/kernel/test/zlib_SUITE.erl
@@ -82,7 +82,7 @@ groups() ->
api_deflateSetDictionary, api_deflateReset,
api_deflateParams, api_deflate, api_deflateEnd,
api_inflateInit, api_inflateSetDictionary,
- api_inflateSync, api_inflateReset, api_inflate,
+ api_inflateSync, api_inflateReset, api_inflate, api_inflateChunk,
api_inflateEnd, api_setBufsz, api_getBufsz, api_crc32,
api_adler32, api_getQSize, api_un_compress, api_un_zip,
api_g_un_zip]},
@@ -146,8 +146,6 @@ api_deflateInit(Config) when is_list(Config) ->
?m(?BARG, zlib:deflateInit(Z1,default,deflated,-20,8,default)),
?m(?BARG, zlib:deflateInit(Z1,default,deflated,-7,8,default)),
?m(?BARG, zlib:deflateInit(Z1,default,deflated,7,8,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-8,8,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,8,8,default)),
?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,0,default)),
?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,10,default)),
@@ -169,7 +167,7 @@ api_deflateInit(Config) when is_list(Config) ->
?m(ok, zlib:deflateInit(Z12,default,deflated,-Wbits,8,default)),
?m(ok,zlib:close(Z11)),
?m(ok,zlib:close(Z12))
- end, lists:seq(9, 15)),
+ end, lists:seq(8, 15)),
lists:foreach(fun(MemLevel) ->
?line Z = zlib:open(),
@@ -277,7 +275,7 @@ api_inflateInit(Config) when is_list(Config) ->
?m(ok, zlib:inflateInit(Z12,-Wbits)),
?m(ok,zlib:close(Z11)),
?m(ok,zlib:close(Z12))
- end, lists:seq(9,15)),
+ end, lists:seq(8,15)),
?m(?BARG, zlib:inflateInit(gurka, -15)),
?m(?BARG, zlib:inflateInit(Z1, 7)),
?m(?BARG, zlib:inflateInit(Z1, -7)),
@@ -357,6 +355,39 @@ api_inflate(Config) when is_list(Config) ->
?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, <<2,1,2,1,2>>)),
?m(ok, zlib:close(Z1)).
+api_inflateChunk(doc) -> "Test inflateChunk";
+api_inflateChunk(suite) -> [];
+api_inflateChunk(Config) when is_list(Config) ->
+ ChunkSize = 1024,
+ Data = << <<(I rem 150)>> || I <- lists:seq(1, 3 * ChunkSize) >>,
+ Part1 = binary:part(Data, 0, ChunkSize),
+ Part2 = binary:part(Data, ChunkSize, ChunkSize),
+ Part3 = binary:part(Data, ChunkSize * 2, ChunkSize),
+ ?line Compressed = zlib:compress(Data),
+ ?line Z1 = zlib:open(),
+ ?line zlib:setBufSize(Z1, ChunkSize),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m([], zlib:inflateChunk(Z1, <<>>)),
+ ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
+ ?m({more, Part2}, zlib:inflateChunk(Z1)),
+ ?m(Part3, zlib:inflateChunk(Z1)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
+
+ ?m(ok, zlib:inflateReset(Z1)),
+
+ ?line zlib:setBufSize(Z1, size(Data)),
+ ?m(Data, zlib:inflateChunk(Z1, Compressed)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(?BARG, zlib:inflateChunk(gurka, Compressed)),
+ ?m(?BARG, zlib:inflateChunk(Z1, 4384)),
+ ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:close(Z1)).
+
api_inflateEnd(doc) -> "Test inflateEnd";
api_inflateEnd(suite) -> [];
api_inflateEnd(Config) when is_list(Config) ->
diff --git a/lib/kernel/vsn.mk b/lib/kernel/vsn.mk
index 15820a0182..c912da0091 100644
--- a/lib/kernel/vsn.mk
+++ b/lib/kernel/vsn.mk
@@ -1 +1 @@
-KERNEL_VSN = 3.1
+KERNEL_VSN = 4.0
diff --git a/lib/megaco/doc/src/megaco.xml b/lib/megaco/doc/src/megaco.xml
index dff1c3afc6..0a8dfe8a13 100644
--- a/lib/megaco/doc/src/megaco.xml
+++ b/lib/megaco/doc/src/megaco.xml
@@ -336,7 +336,7 @@ megaco_incr_timer() = #megaco_incr_timer{}
<tag><c><![CDATA[request_keep_alive_timeout]]></c></tag>
<item>
<p>Specifies the timeout time for the request-keep-alive timer. </p>
- <p>This timer is started when the <em>first</em> reply to an asynchroneous
+ <p>This timer is started when the <em>first</em> reply to an asynchronous
request (issued using the
<seealso marker="megaco#cast">megaco:cast/3</seealso> function)
arrives. As long as this timer is running, replies will
@@ -837,7 +837,7 @@ megaco_incr_timer() = #megaco_incr_timer{}
<tag><c><![CDATA[request_keep_alive_timeout]]></c></tag>
<item>
<p>Specifies the timeout time for the request-keep-alive timer. </p>
- <p>This timer is started when the <em>first</em> reply to an asynchroneous
+ <p>This timer is started when the <em>first</em> reply to an asynchronous
request (issued using the
<seealso marker="megaco#cast">megaco:cast/3</seealso> function)
arrives. As long as this timer is running, replies will
diff --git a/lib/megaco/src/app/megaco.app.src b/lib/megaco/src/app/megaco.app.src
index 6ab85a1bbc..573b1857f6 100644
--- a/lib/megaco/src/app/megaco.app.src
+++ b/lib/megaco/src/app/megaco.app.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -113,8 +113,8 @@
{applications, [stdlib, kernel]},
{env, []},
{mod, {megaco_sup, []}},
- {runtime_dependencies, ["stdlib-2.0","runtime_tools-1.8.14","kernel-3.0",
- "et-1.5","erts-6.0","debugger-4.0",
+ {runtime_dependencies, ["stdlib-2.5","runtime_tools-1.8.14","kernel-3.0",
+ "et-1.5","erts-7.0","debugger-4.0",
"asn1-3.0"]}
]}.
diff --git a/lib/megaco/src/app/megaco.appup.src b/lib/megaco/src/app/megaco.appup.src
index 92504e8e87..1c55a92b55 100644
--- a/lib/megaco/src/app/megaco.appup.src
+++ b/lib/megaco/src/app/megaco.appup.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -183,11 +183,15 @@
%% |
%% v
%% 3.17.3
+%% |
+%% v
+%% 3.18
%%
%%
{"%VSN%",
[
+ {"3.17.3", [{restart_application,megaco}]},
{"3.17.2", []},
{"3.17.1", [{restart_application,megaco}]},
{"3.17.0.3", [{restart_application,megaco}]},
@@ -202,6 +206,7 @@
}
],
[
+ {"3.17.3", [{restart_application,megaco}]},
{"3.17.2", []},
{"3.17.1", [{restart_application,megaco}]},
{"3.17.0.3", [{restart_application,megaco}]},
diff --git a/lib/megaco/src/engine/megaco_trans_sender.erl b/lib/megaco/src/engine/megaco_trans_sender.erl
index 710fef405a..e07f404289 100644
--- a/lib/megaco/src/engine/megaco_trans_sender.erl
+++ b/lib/megaco/src/engine/megaco_trans_sender.erl
@@ -672,8 +672,7 @@ to(To, Start) ->
%% Time in milli seconds
t() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
+ erlang:monotonic_time(milli_seconds).
warning_msg(F, A) ->
?megaco_warning("Transaction sender: " ++ F, A).
diff --git a/lib/megaco/vsn.mk b/lib/megaco/vsn.mk
index 8687d622e9..ede36e3fe6 100644
--- a/lib/megaco/vsn.mk
+++ b/lib/megaco/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2014. All Rights Reserved.
+# Copyright Ericsson AB 1997-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -18,6 +18,6 @@
# %CopyrightEnd%
APPLICATION = megaco
-MEGACO_VSN = 3.17.3
+MEGACO_VSN = 3.18
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(MEGACO_VSN)$(PRE_VSN)"
diff --git a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
index 65b950bd46..127c23e0f7 100644
--- a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
@@ -867,6 +867,7 @@ ok
</section>
<section>
+ <marker id="event_handling"></marker>
<title>Mnesia Event Handling</title>
<p>System events and table events are the two categories of events
that Mnesia will generate in various situations.
diff --git a/lib/mnesia/doc/src/mnesia.xml b/lib/mnesia/doc/src/mnesia.xml
index 268dc18e65..856a7594a7 100644
--- a/lib/mnesia/doc/src/mnesia.xml
+++ b/lib/mnesia/doc/src/mnesia.xml
@@ -151,9 +151,9 @@ If a new item is inserted with the same key as
</item>
<item>
<p><c>local_content</c> When an application requires
- tables whose contents is local to each node,
+ tables whose contents are local to each node,
<c>local_content</c> tables may be used. The name of the
- table is known to all Mnesia nodes, but its contents is
+ table is known to all Mnesia nodes, but its contents are
unique on each node. This means that access to such a table
must be done locally. Set the <c>local_content</c> field to
<c>true</c> if you want to enable the <c>local_content</c>
@@ -579,7 +579,7 @@ mnesia:add_table_index(person, age)
<desc>
<p>The tables are backed up to external media using the backup
module <c>BackupMod</c>. Tables with the local contents
- property is being backed up as they exist on the current
+ property are backed up as they exist on the current
node. <c>BackupMod</c> is the default backup callback
module obtained by
<c>mnesia:system_info(backup_module)</c>. See the User's
@@ -863,7 +863,7 @@ mnesia:create_table(person,
{attributes, record_info(fields,person)}]).
</code>
<p>The specification of <c>index</c> and <c>attributes</c> may be
- hard coded as <c>{index, [2]}</c> and
+ hard coded as <c>{index, [4]}</c> and
<c>{attributes, [name, age, address, salary, children]}</c>
respectively.
</p>
@@ -2188,12 +2188,13 @@ mnesia:create_table(employee,
</desc>
</func>
<func>
- <name>subscribe(EventCategory)</name>
+ <name>subscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Ensures that a copy of all events of type
<c>EventCategory</c> are sent to the caller. The event
- types available are described in the Mnesia User's Guide.</p>
+ types available are described in the Mnesia User's Guide at <seealso marker="Mnesia_chap5#event_handling">Mnesia Event Handling</seealso>.</p>
+ <p><c>Node</c> is the local node. For table events to be subscribed, mnesia must have a readable local copy of the table on the node.</p>
</desc>
</func>
<func>
@@ -2861,11 +2862,12 @@ raise(Name, Amount) ->
</desc>
</func>
<func>
- <name>unsubscribe(EventCategory)</name>
+ <name>unsubscribe(EventCategory) -> {ok, Node} | {error, Reason} </name>
<fsummary>Subscribe to events of type <c>EventCategory</c>.</fsummary>
<desc>
<p>Stops sending events of type
<c>EventCategory</c> to the caller.</p>
+ <p><c>Node</c> is the local node.</p>
</desc>
</func>
<func>
@@ -3017,6 +3019,12 @@ raise(Name, Amount) ->
totally unpredictable.</p>
</item>
<item>
+ <p><c>-mnesia dump_disc_copies_at_startup true | false</c>.
+ If set to false, this disables the dumping of <c>disc_copies</c>
+ tables during startup while tables are being loaded. The default
+ is true.</p>
+ </item>
+ <item>
<p><c>-mnesia dump_log_load_regulation true | false</c>.
Controls if the log dumps should be performed as fast as
possible or if the dumper should do its own load
diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml
index 18f72f4faf..dc98efbff3 100644
--- a/lib/mnesia/doc/src/notes.xml
+++ b/lib/mnesia/doc/src/notes.xml
@@ -38,7 +38,34 @@
thus constitutes one section in this document. The title of each
section is the version number of Mnesia.</p>
- <section><title>Mnesia 4.12.4</title>
+ <section><title>Mnesia 4.12.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed race condition in protocol negotiation.</p>
+ <p>
+ Own Id: OTP-12473</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Grammar corrections. (Thanks to Derek Brown)</p>
+ <p>
+ Own Id: OTP-12400</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Mnesia 4.12.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/mnesia/src/mnesia.app.src b/lib/mnesia/src/mnesia.app.src
index e755864792..c78a7cba1e 100644
--- a/lib/mnesia/src/mnesia.app.src
+++ b/lib/mnesia/src/mnesia.app.src
@@ -48,6 +48,6 @@
]},
{applications, [kernel, stdlib]},
{mod, {mnesia_sup, []}},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}]}.
+ {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-7.0"]}]}.
diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl
index 8f14831ad3..b9c2fd915c 100644
--- a/lib/mnesia/src/mnesia.erl
+++ b/lib/mnesia/src/mnesia.erl
@@ -145,7 +145,7 @@
%% Local function in order to avoid external function call
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
Value -> Value
end.
@@ -306,6 +306,8 @@ ms() ->
-spec abort(_) -> no_return().
+abort(Reason = {aborted, _}) ->
+ exit(Reason);
abort(Reason) ->
exit({aborted, Reason}).
@@ -807,7 +809,7 @@ next(Tid,Ts,Tab,Key)
tid ->
lock_table(Tid, Ts, Tab, read),
do_fixtable(Tab,Ts),
- New = (catch dirty_next(Tab,Key)),
+ New = ?CATCH(dirty_next(Tab,Key)),
stored_keys(Tab,New,Key,Ts,next,
val({Tab, setorbag}));
_Protocol ->
@@ -833,7 +835,7 @@ prev(Tid,Ts,Tab,Key)
tid ->
lock_table(Tid, Ts, Tab, read),
do_fixtable(Tab,Ts),
- New = (catch dirty_prev(Tab,Key)),
+ New = ?CATCH(dirty_prev(Tab,Key)),
stored_keys(Tab,New,Key,Ts,prev,
val({Tab, setorbag}));
_Protocol ->
@@ -965,7 +967,7 @@ foldl(Fun, Acc, Tab, LockKind) when is_function(Fun) ->
foldl(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
{Type, Prev} = init_iteration(ActivityId, Opaque, Tab, LockKind),
- Res = (catch do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)),
+ Res = ?CATCH(do_foldl(ActivityId, Opaque, Tab, dirty_first(Tab), Fun, Acc, Type, Prev)),
close_iteration(Res, Tab).
do_foldl(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
@@ -1011,7 +1013,7 @@ foldr(ActivityId, Opaque, Fun, Acc, Tab, LockKind) ->
true -> %% Order doesn't matter for set and bag
TempPrev %% Keep the order so we can use ordsets:del_element
end,
- Res = (catch do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)),
+ Res = ?CATCH(do_foldr(ActivityId, Opaque, Tab, dirty_last(Tab), Fun, Acc, Type, Prev)),
close_iteration(Res, Tab).
do_foldr(A, O, Tab, '$end_of_table', Fun, RAcc, _Type, Stored) ->
@@ -1626,13 +1628,7 @@ dirty_read(Oid) ->
dirty_read(Tab, Key)
when is_atom(Tab), Tab /= schema ->
-%% case catch ?ets_lookup(Tab, Key) of
-%% {'EXIT', _} ->
- %% Bad luck, we have to perform a real lookup
- dirty_rpc(Tab, mnesia_lib, db_get, [Tab, Key]);
-%% Val ->
-%% Val
-%% end;
+ dirty_rpc(Tab, mnesia_lib, db_get, [Tab, Key]);
dirty_read(Tab, _Key) ->
abort({bad_type, Tab}).
@@ -1905,21 +1901,21 @@ any_table_info(Tab, _Item) ->
abort({bad_type, Tab}).
raw_table_info(Tab, Item) ->
- case ?catch_val({Tab, storage_type}) of
- ram_copies ->
- info_reply(catch ?ets_info(Tab, Item), Tab, Item);
- disc_copies ->
- info_reply(catch ?ets_info(Tab, Item), Tab, Item);
- disc_only_copies ->
- info_reply(catch dets:info(Tab, Item), Tab, Item);
- unknown ->
- bad_info_reply(Tab, Item);
- {'EXIT', _} ->
+ try
+ case ?ets_lookup_element(mnesia_gvar, {Tab, storage_type}, 2) of
+ ram_copies ->
+ info_reply(?ets_info(Tab, Item), Tab, Item);
+ disc_copies ->
+ info_reply(?ets_info(Tab, Item), Tab, Item);
+ disc_only_copies ->
+ info_reply(dets:info(Tab, Item), Tab, Item);
+ unknown ->
+ bad_info_reply(Tab, Item)
+ end
+ catch error:_ ->
bad_info_reply(Tab, Item)
end.
-info_reply({'EXIT', _Reason}, Tab, Item) ->
- bad_info_reply(Tab, Item);
info_reply({error, _Reason}, Tab, Item) ->
bad_info_reply(Tab, Item);
info_reply(Val, _Tab, _Item) ->
@@ -2063,9 +2059,8 @@ storage_count(T, {U, R, D, DO}) ->
end.
system_info(Item) ->
- case catch system_info2(Item) of
- {'EXIT',Error} -> abort(Error);
- Other -> Other
+ try system_info2(Item)
+ catch _:Error -> abort(Error)
end.
system_info2(all) ->
@@ -2171,7 +2166,7 @@ system_info2(version) ->
Version;
false ->
%% Ensure that it does not match
- {mnesia_not_loaded, node(), now()}
+ {mnesia_not_loaded, node(), erlang:timestamp()}
end;
Version ->
Version
@@ -2381,11 +2376,10 @@ del_table_index(Tab, Ix) ->
mnesia_schema:del_table_index(Tab, Ix).
transform_table(Tab, Fun, NewA) ->
- case catch val({Tab, record_name}) of
- {'EXIT', Reason} ->
- mnesia:abort(Reason);
- OldRN ->
- mnesia_schema:transform_table(Tab, Fun, NewA, OldRN)
+ try val({Tab, record_name}) of
+ OldRN -> mnesia_schema:transform_table(Tab, Fun, NewA, OldRN)
+ catch exit:Reason ->
+ mnesia:abort(Reason)
end.
transform_table(Tab, Fun, NewA, NewRN) ->
@@ -2796,7 +2790,7 @@ pre_qlc(Opts, Tab) ->
end.
post_qlc(Tab) ->
- case catch get(mnesia_activity_state) of
+ case get(mnesia_activity_state) of
{_,#tid{},_} -> ok;
_ ->
case ?catch_val({Tab, setorbag}) of
diff --git a/lib/mnesia/src/mnesia.hrl b/lib/mnesia/src/mnesia.hrl
index c8010d5466..86b6fd908f 100644
--- a/lib/mnesia/src/mnesia.hrl
+++ b/lib/mnesia/src/mnesia.hrl
@@ -39,7 +39,12 @@
-define(ets_delete_table(Tab), ets:delete(Tab)).
-define(ets_fixtable(Tab, Bool), ets:fixtable(Tab, Bool)).
--define(catch_val(Var), (catch ?ets_lookup_element(mnesia_gvar, Var, 2))).
+
+-define(SAFE(OP), try (OP) catch error:_ -> ok end).
+-define(CATCH(OP), try (OP) catch _:_Reason -> {'EXIT', _Reason} end).
+
+-define(catch_val(Var), (try ?ets_lookup_element(mnesia_gvar, Var, 2)
+ catch error:_ -> {'EXIT', {badarg, []}} end)).
%% It's important that counter is first, since we compare tid's
@@ -53,7 +58,9 @@
up_stores = [], %% list of upper layer stores for nested trans
level = 1}). %% transaction level
--define(unique_cookie, {erlang:now(), node()}).
+-define(unique_cookie, {{erlang:monotonic_time() + erlang:time_offset(),
+ erlang:unique_integer(),1},
+ node()}).
-record(cstruct, {name, % Atom
type = set, % set | bag
diff --git a/lib/mnesia/src/mnesia_bup.erl b/lib/mnesia/src/mnesia_bup.erl
index 3b084e7371..3fee952d77 100644
--- a/lib/mnesia/src/mnesia_bup.erl
+++ b/lib/mnesia/src/mnesia_bup.erl
@@ -78,24 +78,21 @@
%% BunchOfRecords will be [] when the iteration is done.
iterate(Mod, Fun, Opaque, Acc) ->
R = #restore{bup_module = Mod, bup_data = Opaque},
- case catch read_schema_section(R) of
- {error, Reason} ->
- {error, Reason};
- {R2, {Header, Schema, Rest}} ->
- case catch iter(R2, Header, Schema, Fun, Acc, Rest) of
- {ok, R3, Res} ->
- catch safe_apply(R3, close_read, [R3#restore.bup_data]),
- {ok, Res};
- {error, Reason} ->
- catch safe_apply(R2, close_read, [R2#restore.bup_data]),
- {error, Reason};
- {'EXIT', Pid, Reason} ->
- catch safe_apply(R2, close_read, [R2#restore.bup_data]),
- {error, {'EXIT', Pid, Reason}};
- {'EXIT', Reason} ->
- catch safe_apply(R2, close_read, [R2#restore.bup_data]),
- {error, {'EXIT', Reason}}
- end
+ try read_schema_section(R) of
+ {R2, {Header, Schema, Rest}} ->
+ try iter(R2, Header, Schema, Fun, Acc, Rest) of
+ {ok, R3, Res} ->
+ close_read(R3),
+ {ok, Res}
+ catch throw:Err ->
+ close_read(R2),
+ Err;
+ _:Reason ->
+ close_read(R2),
+ {error, {Reason, erlang:get_stacktrace()}}
+ end
+ catch throw:{error,_} = Err ->
+ Err
end.
iter(R, Header, Schema, Fun, Acc, []) ->
@@ -116,7 +113,7 @@ safe_apply(R, write, [_, Items]) when Items =:= [] ->
safe_apply(R, What, Args) ->
Abort = fun(Re) -> abort_restore(R, What, Args, Re) end,
Mod = R#restore.bup_module,
- case catch apply(Mod, What, Args) of
+ try apply(Mod, What, Args) of
{ok, Opaque, Items} when What =:= read ->
{R#restore{bup_data = Opaque}, Items};
{ok, Opaque} when What =/= read->
@@ -125,16 +122,19 @@ safe_apply(R, What, Args) ->
Abort(Re);
Re ->
Abort(Re)
+ catch _:Re ->
+ Abort(Re)
end.
-abort_restore(R, What, Args, Reason) ->
- Mod = R#restore.bup_module,
- Opaque = R#restore.bup_data,
+abort_restore(R = #restore{bup_module=Mod}, What, Args, Reason) ->
dbg_out("Restore aborted. ~p:~p~p -> ~p~n",
[Mod, What, Args, Reason]),
- catch apply(Mod, close_read, [Opaque]),
+ close_read(R),
throw({error, Reason}).
+close_read(#restore{bup_module=Mod, bup_data=Opaque}) ->
+ ?SAFE(Mod:close_read(Opaque)).
+
fallback_to_schema() ->
Fname = fallback_bup(),
fallback_to_schema(Fname).
@@ -145,40 +145,30 @@ fallback_to_schema(Fname) ->
{error, Reason} ->
{error, Reason};
Schema ->
- case catch lookup_schema(schema, Schema) of
- {error, _} ->
- {error, "No schema in fallback"};
- List ->
- {ok, fallback, List}
+ try lookup_schema(schema, Schema) of
+ List -> {ok, fallback, List}
+ catch throw:_ ->
+ {error, "No schema in fallback"}
end
end.
%% Opens Opaque reads schema and then close
read_schema(Mod, Opaque) ->
R = #restore{bup_module = Mod, bup_data = Opaque},
- case catch read_schema_section(R) of
- {error, Reason} ->
- {error, Reason};
- {R2, {_Header, Schema, _}} ->
- catch safe_apply(R2, close_read, [R2#restore.bup_data]),
- Schema
+ try read_schema_section(R) of
+ {_, {_Header, Schema, _}} -> Schema
+ catch throw:{error,_} = Error ->
+ Error
+ after close_read(R)
end.
%% Open backup media and extract schema
%% rewind backup media and leave it open
%% Returns {R, {Header, Schema}}
read_schema_section(R) ->
- case catch do_read_schema_section(R) of
- {'EXIT', Reason} ->
- catch safe_apply(R, close_read, [R#restore.bup_data]),
- {error, {'EXIT', Reason}};
- {error, Reason} ->
- catch safe_apply(R, close_read, [R#restore.bup_data]),
- {error, Reason};
- {R2, {H, Schema, Rest}} ->
- Schema2 = convert_schema(H#log_header.log_version, Schema),
- {R2, {H, Schema2, Rest}}
- end.
+ {R2, {H, Schema, Rest}} = do_read_schema_section(R),
+ Schema2 = convert_schema(H#log_header.log_version, Schema),
+ {R2, {H, Schema2, Rest}}.
do_read_schema_section(R) ->
R2 = safe_apply(R, open_read, [R#restore.bup_data]),
@@ -201,7 +191,7 @@ do_read_schema_section(R, {ok, B, _C, Rest}, Acc) ->
{R, {B, Acc, Rest}};
do_read_schema_section(_R, {error, Reason}, _Acc) ->
- {error, Reason}.
+ throw({error, Reason}).
verify_header([H | RawSchema]) when is_record(H, log_header) ->
Current = mnesia_log:backup_log_header(),
@@ -218,7 +208,7 @@ verify_header([H | RawSchema]) when is_record(H, log_header) ->
{error, {"Bad kind of header. Cannot be used as backup.", H}}
end;
verify_header(RawSchema) ->
- {error, {"Missing header. Cannot be used as backup.", catch hd(RawSchema)}}.
+ {error, {"Missing header. Cannot be used as backup.", ?CATCH(hd(RawSchema))}}.
refresh_cookie(Schema, NewCookie) ->
case lists:keysearch(schema, 2, Schema) of
@@ -345,7 +335,7 @@ create_schema(Ns, ok) ->
Str = mk_str(),
File = mnesia_lib:dir(Str),
file:delete(File),
- case catch make_initial_backup(Ns, File, Mod) of
+ try make_initial_backup(Ns, File, Mod) of
{ok, _Res} ->
case do_install_fallback(File, Mod) of
ok ->
@@ -353,8 +343,8 @@ create_schema(Ns, ok) ->
ok;
{error, Reason} ->
{error, Reason}
- end;
- {error, Reason} ->
+ end
+ catch throw:{error, Reason} ->
{error, Reason}
end
end
@@ -368,7 +358,7 @@ create_schema(_Ns, Reason) ->
{error, Reason}.
mk_str() ->
- Now = [integer_to_list(I) || I <- tuple_to_list(now())],
+ Now = integer_to_list(erlang:unique_integer([positive])),
lists:concat([node()] ++ Now ++ ".TMP").
make_initial_backup(Ns, Opaque, Mod) ->
@@ -384,10 +374,11 @@ make_initial_backup(Ns, Opaque, Mod) ->
do_apply(_, write, [_, Items], Opaque) when Items =:= [] ->
Opaque;
do_apply(Mod, What, Args, _Opaque) ->
- case catch apply(Mod, What, Args) of
+ try apply(Mod, What, Args) of
{ok, Opaque2} -> Opaque2;
- {error, Reason} -> throw({error, Reason});
- {'EXIT', Reason} -> throw({error, {'EXIT', Reason}})
+ {error, Reason} -> throw({error, Reason})
+ catch _:Reason ->
+ throw({error, {'EXIT', Reason}})
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -425,11 +416,11 @@ do_install_fallback(_Opaque, Args) ->
{error, {badarg, Args}}.
check_fallback_args([Arg | Tail], FA) ->
- case catch check_fallback_arg_type(Arg, FA) of
- {'EXIT', _Reason} ->
- {error, {badarg, Arg}};
+ try check_fallback_arg_type(Arg, FA) of
FA2 ->
check_fallback_args(Tail, FA2)
+ catch error:_ ->
+ {error, {badarg, Arg}}
end;
check_fallback_args([], FA) ->
{ok, FA}.
@@ -484,7 +475,7 @@ install_fallback_master(ClientPid, FA) ->
State = {start, FA},
Opaque = FA#fallback_args.opaque,
Mod = FA#fallback_args.module,
- Res = (catch iterate(Mod, fun restore_recs/4, Opaque, State)),
+ Res = iterate(Mod, fun restore_recs/4, Opaque, State),
unlink(ClientPid),
ClientPid ! {self(), Res},
exit(shutdown).
@@ -496,9 +487,7 @@ restore_recs(Recs, Header, Schema, {start, FA}) ->
%% No records in backup
Schema2 = convert_schema(Header#log_header.log_version, Schema),
CreateList = lookup_schema(schema, Schema2),
- case catch mnesia_schema:list2cs(CreateList) of
- {'EXIT', Reason} ->
- throw({error, {"Bad schema in restore_recs", Reason}});
+ try mnesia_schema:list2cs(CreateList) of
Cs ->
Ns = get_fallback_nodes(FA, Cs#cstruct.disc_copies),
global:set_lock({{mnesia_table_lock, schema}, self()}, Ns, infinity),
@@ -508,6 +497,8 @@ restore_recs(Recs, Header, Schema, {start, FA}) ->
Res = restore_recs(Recs, Header, Schema2, Pids),
global:del_lock({{mnesia_table_lock, schema}, self()}, Ns),
Res
+ catch _:Reason ->
+ throw({error, {"Bad schema in restore_recs", Reason}})
end;
restore_recs([], _Header, _Schema, Pids) ->
@@ -579,45 +570,46 @@ fallback_tmp_name() -> "FALLBACK.TMP".
fallback_receiver(Master, FA) ->
process_flag(trap_exit, true),
- case catch register(mnesia_fallback, self()) of
- {'EXIT', _} ->
- Reason = {already_exists, node()},
- local_fallback_error(Master, Reason);
- true ->
- FA2 = check_fallback_dir(Master, FA),
- Bup = FA2#fallback_args.fallback_bup,
- case mnesia_lib:exists(Bup) of
- true ->
- Reason2 = {already_exists, node()},
- local_fallback_error(Master, Reason2);
- false ->
- Mod = mnesia_backup,
- Tmp = FA2#fallback_args.fallback_tmp,
- R = #restore{mode = replace,
- bup_module = Mod,
- bup_data = Tmp},
- file:delete(Tmp),
- case catch fallback_receiver_loop(Master, R, FA2, schema) of
- {error, Reason} ->
- local_fallback_error(Master, Reason);
- Other ->
- exit(Other)
- end
- end
- end.
+ Res = try
+ register(mnesia_fallback, self()),
+ FA2 = check_fallback_dir(FA),
+ Bup = FA2#fallback_args.fallback_bup,
+ false = mnesia_lib:exists(Bup),
+ Mod = mnesia_backup,
+ Tmp = FA2#fallback_args.fallback_tmp,
+ R = #restore{mode = replace,
+ bup_module = Mod,
+ bup_data = Tmp},
+ file:delete(Tmp),
+ fallback_receiver_loop(Master, R, FA2, schema)
+ catch
+ error:_ ->
+ Reason = {already_exists, node()},
+ local_fallback_error(Master, Reason);
+ throw:{error, Reason} ->
+ local_fallback_error(Master, Reason)
+ end,
+ exit(Res).
local_fallback_error(Master, Reason) ->
Master ! {self(), {error, Reason}},
unlink(Master),
exit(Reason).
+
check_fallback_dir(Master, FA) ->
+ try check_fallback_dir(FA)
+ catch throw:{error,Reason} ->
+ local_fallback_error(Master, Reason)
+ end.
+
+check_fallback_dir(FA) ->
case mnesia:system_info(schema_location) of
ram ->
Reason = {has_no_disc, node()},
- local_fallback_error(Master, Reason);
+ throw({error, Reason});
_ ->
- Dir = check_fallback_dir_arg(Master, FA),
+ Dir = check_fallback_dir_arg(FA),
Bup = filename:join([Dir, fallback_name()]),
Tmp = filename:join([Dir, fallback_tmp_name()]),
FA#fallback_args{fallback_bup = Bup,
@@ -625,22 +617,20 @@ check_fallback_dir(Master, FA) ->
mnesia_dir = Dir}
end.
-check_fallback_dir_arg(Master, FA) ->
+check_fallback_dir_arg(FA) ->
case FA#fallback_args.use_default_dir of
true ->
mnesia_lib:dir();
false when FA#fallback_args.scope =:= local ->
Dir = FA#fallback_args.mnesia_dir,
- case catch mnesia_monitor:do_check_type(dir, Dir) of
- {'EXIT', _R} ->
+ try mnesia_monitor:do_check_type(dir, Dir)
+ catch _:_ ->
Reason = {badarg, {dir, Dir}, node()},
- local_fallback_error(Master, Reason);
- AbsDir->
- AbsDir
- end;
+ throw({error, Reason})
+ end;
false when FA#fallback_args.scope =:= global ->
Reason = {combine_error, global, dir, node()},
- local_fallback_error(Master, Reason)
+ throw({error, Reason})
end.
fallback_receiver_loop(Master, R, FA, State) ->
@@ -666,7 +656,7 @@ fallback_receiver_loop(Master, R, FA, State) ->
Bup = FA#fallback_args.fallback_bup,
Tmp = FA#fallback_args.fallback_tmp,
throw_bad_res(ok, file:rename(Tmp, Bup)),
- catch mnesia_lib:set(active_fallback, true),
+ ?SAFE(mnesia_lib:set(active_fallback, true)),
?eval_debug_fun({?MODULE, fallback_receiver_loop, post_swap}, []),
Master ! {self(), ok},
fallback_receiver_loop(Master, R, FA, stop);
@@ -697,7 +687,7 @@ throw_bad_res(_Expected, Actual) -> throw({error, Actual}).
tm_fallback_start(IgnoreFallback) ->
mnesia_schema:lock_schema(),
Res = do_fallback_start(fallback_exists(), IgnoreFallback),
- mnesia_schema: unlock_schema(),
+ mnesia_schema:unlock_schema(),
case Res of
ok -> ok;
{error, Reason} -> exit(Reason)
@@ -715,9 +705,9 @@ do_fallback_start(true, false) ->
BupFile = fallback_bup(),
Mod = mnesia_backup,
LocalTabs = ?ets_new_table(mnesia_local_tables, [set, public, {keypos, 2}]),
- case catch iterate(Mod, fun restore_tables/4, BupFile, {start, LocalTabs}) of
+ case iterate(Mod, fun restore_tables/4, BupFile, {start, LocalTabs}) of
{ok, _Res} ->
- catch dets:close(schema),
+ ?SAFE(dets:close(schema)),
TmpSchema = mnesia_lib:tab2tmp(schema),
DatSchema = mnesia_lib:tab2dat(schema),
AllLT = ?ets_match_object(LocalTabs, '_'),
@@ -733,8 +723,6 @@ do_fallback_start(true, false) ->
{error, {"Cannot start from fallback. Rename error.", Reason}}
end;
{error, Reason} ->
- {error, {"Cannot start from fallback", Reason}};
- {'EXIT', Reason} ->
{error, {"Cannot start from fallback", Reason}}
end.
@@ -996,10 +984,10 @@ uninstall_fallback_master(ClientPid, FA) ->
case fallback_to_schema(Bup) of
{ok, fallback, List} ->
Cs = mnesia_schema:list2cs(List),
- case catch get_fallback_nodes(FA, Cs#cstruct.disc_copies) of
+ try get_fallback_nodes(FA, Cs#cstruct.disc_copies) of
Ns when is_list(Ns) ->
- do_uninstall(ClientPid, Ns, FA);
- {error, Reason} ->
+ do_uninstall(ClientPid, Ns, FA)
+ catch throw:{error, Reason} ->
local_fallback_error(ClientPid, Reason)
end;
{error, Reason} ->
@@ -1042,13 +1030,13 @@ local_uninstall_fallback(Master, FA) ->
%% Don't trap exit
register(mnesia_fallback, self()), % May exit
- FA2 = check_fallback_dir(Master, FA), % May exit
+ FA2 = check_fallback_dir(Master, FA), % May exit
Master ! {self(), started},
receive
{Master, do_uninstall} ->
?eval_debug_fun({?MODULE, uninstall_fallback2, pre_delete}, []),
- catch mnesia_lib:set(active_fallback, false),
+ ?SAFE(mnesia_lib:set(active_fallback, false)),
Tmp = FA2#fallback_args.fallback_tmp,
Bup = FA2#fallback_args.fallback_bup,
file:delete(Tmp),
@@ -1071,10 +1059,8 @@ rec_uninstall(ClientPid, [Pid | Pids], AccRes) ->
{Pid, BadRes} ->
rec_uninstall(ClientPid, Pids, BadRes)
end;
-rec_uninstall(ClientPid, [], Res) ->
- ClientPid ! {self(), Res},
- unlink(ClientPid),
- exit(normal).
+rec_uninstall(_, [], Res) ->
+ Res.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Backup traversal
@@ -1125,12 +1111,11 @@ do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) ->
Iter =
if
TargetMod =/= read_only ->
- case catch do_apply(TargetMod, open_write, [Target], Target) of
- {error, Error} ->
+ try do_apply(TargetMod, open_write, [Target], Target)
+ catch throw:{error, Error} ->
unlink(ClientPid),
ClientPid ! {iter_done, self(), {error, Error}},
- exit(Error);
- Else -> Else
+ exit(Error)
end;
true ->
ignore
@@ -1139,16 +1124,16 @@ do_traverse_backup(ClientPid, Source, SourceMod, Target, TargetMod, Fun, Acc) ->
Res =
case iterate(SourceMod, fun trav_apply/4, Source, A) of
{ok, {iter, _, Acc2, _, Iter2}} when TargetMod =/= read_only ->
- case catch do_apply(TargetMod, commit_write, [Iter2], Iter2) of
- {error, Reason} ->
- {error, Reason};
- _ ->
- {ok, Acc2}
+ try
+ do_apply(TargetMod, commit_write, [Iter2], Iter2),
+ {ok, Acc2}
+ catch throw:{error, Reason} ->
+ {error, Reason}
end;
{ok, {iter, _, Acc2, _, _}} ->
{ok, Acc2};
{error, Reason} when TargetMod =/= read_only->
- catch do_apply(TargetMod, abort_write, [Iter], Iter),
+ ?CATCH(do_apply(TargetMod, abort_write, [Iter], Iter)),
{error, {"Backup traversal failed", Reason}};
{error, Reason} ->
{error, {"Backup traversal failed", Reason}}
diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl
index 173e3be2f5..0a3ea8d769 100644
--- a/lib/mnesia/src/mnesia_checkpoint.erl
+++ b/lib/mnesia/src/mnesia_checkpoint.erl
@@ -68,12 +68,12 @@
-import(mnesia_lib, [add/2, del/2, set/2, unset/1]).
-import(mnesia_lib, [dbg_out/2]).
--record(checkpoint_args, {name = {now(), node()},
+-record(checkpoint_args, {name = {erlang:unique_integer([positive]), node()},
allow_remote = true,
ram_overrides_dump = false,
nodes = [],
node = node(),
- now = now(),
+ now, %% unused
cookie = ?unique_cookie,
min = [],
max = [],
@@ -128,7 +128,7 @@ tm_enter_pending([], Pending) ->
Pending;
tm_enter_pending([Tab | Tabs], Pending) ->
%% io:format("Add ~p ~p ~p~n",[Tab, Pending, hd(tl(element(2, process_info(self(), current_stacktrace))))]),
- catch ?ets_insert(Tab, Pending),
+ ?SAFE(?ets_insert(Tab, Pending)),
tm_enter_pending(Tabs, Pending).
tm_exit_pending(Tid) ->
@@ -427,22 +427,22 @@ check_tables(Cp) ->
arrange_retainers(Cp, Overriders, AllTabs) ->
R = #retainer{cp_name = Cp#checkpoint_args.name},
- case catch [R#retainer{tab_name = Tab,
- writers = select_writers(Cp, Tab)}
- || Tab <- AllTabs] of
- {'EXIT', Reason} ->
- {error, Reason};
+ try [R#retainer{tab_name = Tab,
+ writers = select_writers(Cp, Tab)}
+ || Tab <- AllTabs] of
Retainers ->
{ok, Cp#checkpoint_args{ram_overrides_dump = Overriders,
- retainers = Retainers,
- nodes = writers(Retainers)}}
+ retainers = Retainers,
+ nodes = writers(Retainers)}}
+ catch throw:Reason ->
+ {error, Reason}
end.
select_writers(Cp, Tab) ->
case filter_remote(Cp, val({Tab, active_replicas})) of
[] ->
- exit({"Cannot prepare checkpoint (replica not available)",
- [Tab, Cp#checkpoint_args.name]});
+ throw({"Cannot prepare checkpoint (replica not available)",
+ [Tab, Cp#checkpoint_args.name]});
Writers ->
This = node(),
case {lists:member(Tab, Cp#checkpoint_args.max),
@@ -492,12 +492,12 @@ check_prep([], Name, Nodes, IgnoreNew) ->
collect_pending(Name, Nodes, IgnoreNew) ->
case rpc:multicall(Nodes, ?MODULE, call, [Name, collect_pending]) of
{Replies, []} ->
- case catch ?ets_new_table(mnesia_union, [bag]) of
- {'EXIT', Reason} -> %% system limit
+ try
+ UnionTab = ?ets_new_table(mnesia_union, [bag]),
+ compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew)
+ catch error:Reason -> %% system limit
Msg = "Cannot create an ets table pending union",
- {error, {system_limit, Msg, Reason}};
- UnionTab ->
- compute_union(Replies, Nodes, Name, UnionTab, IgnoreNew)
+ {error, {system_limit, Msg, Reason}}
end;
{_, BadNodes} ->
deactivate(Nodes, Name),
@@ -1170,7 +1170,7 @@ iterate(Name, Tab, Fun, Acc, Source, Val) ->
{error, Reason};
{ok, Iter, Pid} ->
link(Pid), % We don't want any pending fixtable's
- Res = (catch iter(Fun, Acc, Iter)),
+ Res = ?CATCH(iter(Fun, Acc, Iter)),
unlink(Pid),
call(Name, {iter_end, Iter}),
case Res of
@@ -1246,7 +1246,7 @@ system_code_change(Cp, _Module, _OldVsn, _Extra) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
- _VaLuE_ -> _VaLuE_
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
+ _VaLuE_ -> _VaLuE_
end.
diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl
index 5a9bae54da..b9d3779e9a 100644
--- a/lib/mnesia/src/mnesia_controller.erl
+++ b/lib/mnesia/src/mnesia_controller.erl
@@ -51,6 +51,7 @@
force_load_table/1,
async_dump_log/1,
sync_dump_log/1,
+ snapshot_dcd/1,
connect_nodes/1,
connect_nodes/2,
wait_for_schema_commit_lock/0,
@@ -139,7 +140,8 @@ max_loaders() ->
-record(block_controller, {owner}).
-record(dump_log, {initiated_by,
- opt_reply_to
+ opt_reply_to,
+ operation = dump_log
}).
-record(net_load, {table,
@@ -184,7 +186,7 @@ max_loaders() ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
Value -> Value
end.
@@ -201,6 +203,15 @@ async_dump_log(InitBy) ->
?SERVER_NAME ! {async_dump_log, InitBy},
ok.
+snapshot_dcd(Tables) when is_list(Tables) ->
+ case [T || T <- Tables,
+ mnesia_lib:storage_type_at_node(node(), T) =/= disc_copies] of
+ [] ->
+ call({snapshot_dcd, Tables});
+ BadTabs ->
+ {error, {not_disc_copies, BadTabs}}
+ end.
+
%% Wait for tables to be active
%% If needed, we will wait for Mnesia to start
%% If Mnesia stops, we will wait for Mnesia to restart
@@ -230,9 +241,7 @@ do_wait_for_tables(Tabs, Timeout) ->
end.
reply_wait(Tabs) ->
- case catch mnesia_lib:active_tables() of
- {'EXIT', _} ->
- {error, {node_not_running, node()}};
+ try mnesia_lib:active_tables() of
Active when is_list(Active) ->
case Tabs -- Active of
[] ->
@@ -240,6 +249,7 @@ reply_wait(Tabs) ->
BadTabs ->
{timeout, BadTabs}
end
+ catch exit:_ -> {error, {node_not_running, node()}}
end.
wait_for_tables_init(From, Tabs) ->
@@ -250,13 +260,12 @@ wait_for_tables_init(From, Tabs) ->
exit(normal).
wait_for_init(From, Tabs, Init) ->
- case catch link(Init) of
- {'EXIT', _} ->
- %% Mnesia is not started
- {error, {node_not_running, node()}};
+ try link(Init) of
true when is_pid(Init) ->
cast({sync_tabs, Tabs, self()}),
rec_tabs(Tabs, Tabs, From, Init)
+ catch error:_ -> %% Mnesia is not started
+ {error, {node_not_running, node()}}
end.
sync_reply(Waiter, Tab) ->
@@ -332,7 +341,7 @@ get_network_copy(Tab, Cs) ->
%% might be solved by using monitor in subscr instead.
process_flag(trap_exit, true),
Load = load_table_fun(Work),
- Res = (catch Load()),
+ Res = ?CATCH(Load()),
process_flag(trap_exit, false),
call({del_other, self()}),
case Res of
@@ -581,11 +590,8 @@ call(Msg) ->
end.
remote_call(Node, Func, Args) ->
- case catch gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity) of
- {'EXIT', Error} ->
- {error, Error};
- Else ->
- Else
+ try gen_server:call({?MODULE, Node}, {Func, Args, self()}, infinity)
+ catch exit:Error -> {error, Error}
end.
multicall(Nodes, Msg) ->
@@ -646,6 +652,15 @@ handle_call({sync_dump_log, InitBy}, From, State) ->
State2 = add_worker(Worker, State),
noreply(State2);
+handle_call({snapshot_dcd, Tables}, From, State) ->
+ Worker = #dump_log{initiated_by = user,
+ opt_reply_to = From,
+ operation = fun() ->
+ mnesia_dumper:snapshot_dcd(Tables)
+ end},
+ State2 = add_worker(Worker, State),
+ noreply(State2);
+
handle_call(wait_for_schema_commit_lock, From, State) ->
Worker = #schema_commit_lock{owner = From},
State2 = add_worker(Worker, State),
@@ -657,7 +672,7 @@ handle_call(block_controller, From, State) ->
noreply(State2);
handle_call({update,Fun}, From, State) ->
- Res = (catch Fun()),
+ Res = ?CATCH(Fun()),
reply(From, Res),
noreply(State);
@@ -1236,7 +1251,7 @@ handle_info(#sender_done{worker_pid=Pid, worker_res=Res}, State) ->
end;
handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
- catch set(mnesia_status, stopping),
+ ?SAFE(set(mnesia_status, stopping)),
case State#state.dumper_pid of
undefined ->
dbg_out("~p was ~p~n", [?SERVER_NAME, R]),
@@ -1460,9 +1475,9 @@ orphan_tables([], _, _, LocalOrphans, RemoteMasters) ->
node_has_tabs([Tab | Tabs], Node, State) when Node /= node() ->
State2 =
- case catch update_whereabouts(Tab, Node, State) of
- State1 = #state{} -> State1;
- {'EXIT', R} -> %% Tab was just deleted?
+ try update_whereabouts(Tab, Node, State) of
+ State1 = #state{} -> State1
+ catch exit:R -> %% Tab was just deleted?
case ?catch_val({Tab, cstruct}) of
{'EXIT', _} -> State; % yes
_ -> erlang:error(R)
@@ -1748,22 +1763,17 @@ change_table_majority(Cs) ->
update_where_to_wlock(Tab) ->
WNodes = val({Tab, where_to_write}),
- Majority = case catch val({Tab, majority}) of
- true -> true;
- _ -> false
- end,
+ Majority = ?catch_val({Tab, majority}) == true,
set({Tab, where_to_wlock}, {WNodes, Majority}).
%% node To now has tab loaded, but this must be undone
%% This code is rpc:call'ed from the tab_copier process
%% when it has *not* released it's table lock
unannounce_add_table_copy(Tab, To) ->
- catch del_active_replica(Tab, To),
- case catch val({Tab , where_to_read}) of
- To ->
- mnesia_lib:set_remote_where_to_read(Tab);
- _ ->
- ignore
+ ?SAFE(del_active_replica(Tab, To)),
+ try To = val({Tab , where_to_read}),
+ mnesia_lib:set_remote_where_to_read(Tab)
+ catch _:_ -> ignore
end.
user_sync_tab(Tab) ->
@@ -2089,7 +2099,12 @@ start_remote_sender(Node, Tab, Receiver, Storage) ->
dump_and_reply(ReplyTo, Worker) ->
%% No trap_exit, die intentionally instead
- Res = mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by),
+ Res = case Worker#dump_log.operation of
+ dump_log ->
+ mnesia_dumper:opt_dump_log(Worker#dump_log.initiated_by);
+ F when is_function(F, 0) ->
+ F()
+ end,
ReplyTo ! #dumper_done{worker_pid = self(),
worker_res = Res},
unlink(ReplyTo),
diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl
index 14665797a0..693f20dbc2 100644
--- a/lib/mnesia/src/mnesia_dumper.erl
+++ b/lib/mnesia/src/mnesia_dumper.erl
@@ -34,11 +34,13 @@
-export([
get_log_writes/0,
incr_log_writes/0,
+ needs_dump_ets/1,
raw_dump_table/2,
raw_named_dump_table/2,
start_regulator/0,
opt_dump_log/1,
- update/3
+ update/3,
+ snapshot_dcd/1
]).
%% Internal stuff
@@ -99,6 +101,19 @@ opt_dump_log(InitBy) ->
end,
perform_dump(InitBy, Reg).
+snapshot_dcd(Tables) ->
+ lists:foreach(
+ fun(Tab) ->
+ case mnesia_lib:storage_type_at_node(node(), Tab) of
+ disc_copies ->
+ mnesia_log:ets2dcd(Tab);
+ _ ->
+ %% Storage type was checked before queueing the op, though
+ skip
+ end
+ end, Tables),
+ dumped.
+
%% Scan for decisions
perform_dump(InitBy, Regulator) when InitBy == scan_decisions ->
?eval_debug_fun({?MODULE, perform_dump}, [InitBy]),
@@ -122,7 +137,7 @@ perform_dump(InitBy, Regulator) ->
U = mnesia_monitor:get_env(dump_log_update_in_place),
Cont = mnesia_log:init_log_dump(),
mnesia_recover:sync(),
- case catch do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
+ try do_perform_dump(Cont, U, InitBy, Regulator, undefined) of
ok ->
?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
case mnesia_monitor:use_dir() of
@@ -133,17 +148,15 @@ perform_dump(InitBy, Regulator) ->
end,
mnesia_recover:allow_garb(),
%% And now to the crucial point...
- mnesia_log:confirm_log_dump(Diff);
- {error, Reason} ->
- {error, Reason};
- {'EXIT', {Desc, Reason}} ->
+ mnesia_log:confirm_log_dump(Diff)
+ catch exit:Reason when Reason =/= fatal ->
case mnesia_monitor:get_env(auto_repair) of
true ->
- mnesia_lib:important(Desc, Reason),
+ mnesia_lib:important(error, Reason),
%% Ignore rest of the log
mnesia_log:confirm_log_dump(Diff);
false ->
- fatal(Desc, Reason)
+ fatal(error, Reason)
end
end;
{error, Reason} ->
@@ -161,24 +174,25 @@ scan_decisions(Fname, InitBy, Regulator) ->
mnesia_log:open_log(Name, Header, Fname, Exists,
mnesia_monitor:get_env(auto_repair), read_only),
Cont = start,
- Res = (catch do_perform_dump(Cont, false, InitBy, Regulator, undefined)),
- mnesia_log:close_log(Name),
- case Res of
- ok -> ok;
- {'EXIT', Reason} -> {error, Reason}
+ try
+ do_perform_dump(Cont, false, InitBy, Regulator, undefined)
+ catch exit:Reason when Reason =/= fatal ->
+ {error, Reason}
+ after mnesia_log:close_log(Name)
end
end.
do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
case mnesia_log:chunk_log(Cont) of
{C2, Recs} ->
- case catch insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
- {'EXIT', R} ->
- Reason = {"Transaction log dump error: ~p~n", [R]},
- close_files(InPlace, {error, Reason}, InitBy),
- exit(Reason);
+ try insert_recs(Recs, InPlace, InitBy, Regulator, OldVersion) of
Version ->
do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
+ catch _:R when R =/= fatal ->
+ ST = erlang:get_stacktrace(),
+ Reason = {"Transaction log dump error: ~p~n", [{R, ST}]},
+ close_files(InPlace, {error, Reason}, InitBy),
+ exit(Reason)
end;
eof ->
close_files(InPlace, ok, InitBy),
@@ -288,17 +302,16 @@ perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
InitBy = fast_schema_update,
InPlace = mnesia_monitor:get_env(dump_log_update_in_place),
- ?eval_debug_fun({?MODULE, dump_schema_op}, [InitBy]),
- case catch insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
- mnesia_log:version()) of
- {'EXIT', Reason} ->
- Error = {error, {"Schema update error", Reason}},
+ try insert_ops(Tid, schema_ops, SchemaOps, InPlace, InitBy,
+ mnesia_log:version()),
+ ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
+ close_files(InPlace, ok, InitBy),
+ ok
+ catch _:Reason when Reason =/= fatal ->
+ ST = erlang:get_stacktrace(),
+ Error = {error, {"Schema update error", {Reason, ST}}},
close_files(InPlace, Error, InitBy),
- fatal("Schema update error ~p ~p", [Reason, SchemaOps]);
- _ ->
- ?eval_debug_fun({?MODULE, post_dump}, [InitBy]),
- close_files(InPlace, ok, InitBy),
- ok
+ fatal("Schema update error ~p ~p", [{Reason,ST}, SchemaOps])
end.
insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
@@ -347,13 +360,11 @@ dets_insert(Op,Tab,Key,Val) ->
case dets_incr_counter(Tab,Key) of
true ->
{RecName, Incr} = Val,
- case catch dets:update_counter(Tab, Key, Incr) of
- CounterVal when is_integer(CounterVal) ->
- ok;
- _ when Incr < 0 ->
+ try _ = dets:update_counter(Tab, Key, Incr)
+ catch error:_ when Incr < 0 ->
Zero = {RecName, Key, 0},
ok = dets:insert(Tab, Zero);
- _ ->
+ error:_ ->
Init = {RecName, Key, Incr},
ok = dets:insert(Tab, Init)
end;
@@ -771,7 +782,7 @@ insert_op(Tid, _, {op, clear_table, TabDef}, InPlace, InitBy) ->
end,
%% Need to catch this, it crashes on ram_copies if
%% the op comes before table is loaded at startup.
- catch insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy)
+ ?CATCH(insert(Tid, Storage, Tab, '_', Oid, clear_table, InPlace, InitBy))
end;
insert_op(Tid, _, {op, merge_schema, TabDef}, InPlace, InitBy) ->
@@ -981,28 +992,10 @@ open_files(_Tab, _Storage, _UpdateInPlace, _InitBy) ->
false.
open_disc_copies(Tab, InitBy) ->
- DclF = mnesia_lib:tab2dcl(Tab),
- DumpEts =
- case file:read_file_info(DclF) of
- {error, enoent} ->
- false;
- {ok, DclInfo} ->
- DcdF = mnesia_lib:tab2dcd(Tab),
- case file:read_file_info(DcdF) of
- {error, Reason} ->
- mnesia_lib:dbg_out("File ~p info_error ~p ~n",
- [DcdF, Reason]),
- true;
- {ok, DcdInfo} ->
- Mul = case ?catch_val(dc_dump_limit) of
- {'EXIT', _} -> ?DumpToEtsMultiplier;
- Val -> Val
- end,
- DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
- end
- end,
+ DumpEts = needs_dump_ets(Tab),
if
DumpEts == false; InitBy == startup ->
+ DclF = mnesia_lib:tab2dcl(Tab),
mnesia_log:open_log({?MODULE,Tab},
mnesia_log:dcl_log_header(),
DclF,
@@ -1017,6 +1010,27 @@ open_disc_copies(Tab, InitBy) ->
false
end.
+needs_dump_ets(Tab) ->
+ DclF = mnesia_lib:tab2dcl(Tab),
+ case file:read_file_info(DclF) of
+ {error, enoent} ->
+ false;
+ {ok, DclInfo} ->
+ DcdF = mnesia_lib:tab2dcd(Tab),
+ case file:read_file_info(DcdF) of
+ {error, Reason} ->
+ mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ [DcdF, Reason]),
+ true;
+ {ok, DcdInfo} ->
+ Mul = case ?catch_val(dc_dump_limit) of
+ {'EXIT', _} -> ?DumpToEtsMultiplier;
+ Val -> Val
+ end,
+ DcdInfo#file_info.size =< (DclInfo#file_info.size * Mul)
+ end
+ end.
+
%% Always opens the dcl file for writing overriding already_dumped
%% mechanismen, used for schema transactions.
open_dcl(Tab) ->
@@ -1042,14 +1056,13 @@ prepare_open(Tab, UpdateInPlace) ->
Dat;
false ->
Tmp = mnesia_lib:tab2tmp(Tab),
- case catch mnesia_lib:copy_file(Dat, Tmp) of
- ok ->
- Tmp;
- Error ->
+ try ok = mnesia_lib:copy_file(Dat, Tmp)
+ catch error:Error ->
fatal("Cannot copy dets file ~p to ~p: ~p~n",
[Dat, Tmp, Error])
- end
- end.
+ end,
+ Tmp
+ end.
del_opened_tab(Tab) ->
erase({?MODULE, Tab}).
@@ -1171,18 +1184,16 @@ raw_named_dump_table(Tab, Ftype) ->
Storage = ram_copies,
mnesia_lib:db_fixtable(Storage, Tab, true),
- case catch raw_dump_table(TabRef, Tab) of
- {'EXIT', Reason} ->
- mnesia_lib:db_fixtable(Storage, Tab, false),
- mnesia_lib:dets_sync_close(Tab),
- file:delete(TmpFname),
- mnesia_lib:unlock_table(Tab),
- exit({"Dump of table to disc failed", Reason});
- ok ->
- mnesia_lib:db_fixtable(Storage, Tab, false),
- mnesia_lib:dets_sync_close(Tab),
- mnesia_lib:unlock_table(Tab),
- ok = file:rename(TmpFname, Fname)
+ try
+ ok = raw_dump_table(TabRef, Tab),
+ ok = file:rename(TmpFname, Fname)
+ catch _:Reason ->
+ ?SAFE(file:delete(TmpFname)),
+ exit({"Dump of table to disc failed", Reason})
+ after
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ mnesia_lib:dets_sync_close(Tab),
+ mnesia_lib:unlock_table(Tab)
end;
{error, Reason} ->
mnesia_lib:unlock_table(Tab),
@@ -1248,6 +1259,6 @@ regulate(RegulatorPid) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
Value -> Value
end.
diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl
index 67ec9d7399..8a4be88e9a 100644
--- a/lib/mnesia/src/mnesia_event.erl
+++ b/lib/mnesia/src/mnesia_event.erl
@@ -235,8 +235,7 @@ report_fatal(Format, Args, BinaryCore, CoreDumped) ->
end.
core_file(CoreDir,BinaryCore,Format,Args) ->
- %% Integers = tuple_to_list(date()) ++ tuple_to_list(time()),
- Integers = tuple_to_list(now()),
+ Integers = tuple_to_list(erlang:timestamp()),
Fun = fun(I) when I < 10 -> ["_0",I];
(I) -> ["_",I]
end,
diff --git a/lib/mnesia/src/mnesia_frag.erl b/lib/mnesia/src/mnesia_frag.erl
index 66fc20913c..6036ac4e8f 100644
--- a/lib/mnesia/src/mnesia_frag.erl
+++ b/lib/mnesia/src/mnesia_frag.erl
@@ -406,10 +406,11 @@ verify_numbers(FH,MatchSpec) ->
VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false;
(_F) -> true
end,
- case catch lists:filter(VerifyFun, FragNumbers) of
- [] ->
- FragNumbers;
- BadFrags ->
+ try
+ Frags = lists:filter(VerifyFun, FragNumbers),
+ Frags == [] orelse error(Frags),
+ FragNumbers
+ catch error:BadFrags ->
mnesia:abort({"match_spec_to_frag_numbers: Fragment numbers out of range",
BadFrags, {range, 1, N}})
end.
@@ -437,7 +438,7 @@ remote_select(ReplyTo, Ref, NameNodes, MatchSpec) ->
do_remote_select(ReplyTo, Ref, [{Name, Node} | NameNodes], MatchSpec) ->
if
Node == node() ->
- Res = (catch {ok, mnesia:dirty_select(Name, MatchSpec)}),
+ Res = ?CATCH({ok, mnesia:dirty_select(Name, MatchSpec)}),
ReplyTo ! {remote_select, Ref, Node, Res},
do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec);
true ->
@@ -886,17 +887,19 @@ adjust_before_split(FH) ->
HashMod:add_frag(HashState)
end,
N = FH#frag_state.n_fragments + 1,
- FromFrags2 = (catch lists:sort(FromFrags)),
- UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false;
(_F) -> true
end,
- case catch lists:filter(VerifyFun, UnionFrags) of
- [] ->
- FH2 = FH#frag_state{n_fragments = N,
- hash_state = HashState2},
- {FH2, FromFrags2, UnionFrags};
- BadFrags ->
+ try
+ FromFrags2 = lists:sort(FromFrags),
+ UnionFrags = lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags)),
+
+ Frags = lists:filter(VerifyFun, UnionFrags),
+ Frags == [] orelse error(Frags),
+ FH2 = FH#frag_state{n_fragments = N,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags}
+ catch error:BadFrags ->
mnesia:abort({"add_frag: Fragment numbers out of range",
BadFrags, {range, 1, N}})
end.
@@ -981,22 +984,24 @@ adjust_before_merge(FH) ->
HashMod:del_frag(HashState)
end,
N = FH#frag_state.n_fragments,
- FromFrags2 = (catch lists:sort(FromFrags)),
- UnionFrags = (catch lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags))),
VerifyFun = fun(F) when is_integer(F), F >= 1, F =< N -> false;
(_F) -> true
end,
- case catch lists:filter(VerifyFun, UnionFrags) of
- [] ->
- case lists:member(N, FromFrags2) of
- true ->
- FH2 = FH#frag_state{n_fragments = N - 1,
- hash_state = HashState2},
- {FH2, FromFrags2, UnionFrags};
+ try
+ FromFrags2 = lists:sort(FromFrags),
+ UnionFrags = lists:merge(FromFrags2, lists:sort(AdditionalWriteFrags)),
+
+ Frags = lists:filter(VerifyFun, UnionFrags),
+ [] == Frags orelse error(Frags),
+ case lists:member(N, FromFrags2) of
+ true ->
+ FH2 = FH#frag_state{n_fragments = N - 1,
+ hash_state = HashState2},
+ {FH2, FromFrags2, UnionFrags};
false ->
- mnesia:abort({"del_frag: Last fragment number not included", N})
- end;
- BadFrags ->
+ mnesia:abort({"del_frag: Last fragment number not included", N})
+ end
+ catch error:BadFrags ->
mnesia:abort({"del_frag: Fragment numbers out of range",
BadFrags, {range, 1, N}})
end.
@@ -1141,8 +1146,8 @@ remove_node(Node, Cs) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
- Value -> Value
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
+ Value -> Value
end.
set_frag_hash(Tab, Props) ->
diff --git a/lib/mnesia/src/mnesia_index.erl b/lib/mnesia/src/mnesia_index.erl
index 87cb58dae1..6a7c964fce 100644
--- a/lib/mnesia/src/mnesia_index.erl
+++ b/lib/mnesia/src/mnesia_index.erl
@@ -45,21 +45,11 @@
del_transient/3,
del_index_table/3]).
--import(mnesia_lib, [verbose/2]).
+-import(mnesia_lib, [val/1, verbose/2]).
-include("mnesia.hrl").
-record(index, {setorbag, pos_list}).
-val(Var) ->
- case ?catch_val(Var) of
- {'EXIT', _ReASoN_} ->
- case mnesia_lib:other_val(Var) of
- error -> mnesia_lib:pr_other(Var, _ReASoN_);
- Val -> Val
- end;
- _VaLuE_ -> _VaLuE_
- end.
-
%% read an object list throuh its index table
%% we assume that table Tab has index on attribute number Pos
diff --git a/lib/mnesia/src/mnesia_late_loader.erl b/lib/mnesia/src/mnesia_late_loader.erl
index d09de3ca66..9a113c6306 100644
--- a/lib/mnesia/src/mnesia_late_loader.erl
+++ b/lib/mnesia/src/mnesia_late_loader.erl
@@ -36,17 +36,19 @@
-define(SERVER_NAME, ?MODULE).
+-include("mnesia.hrl").
+
-record(state, {supervisor}).
async_late_disc_load(_, [], _) -> ok;
async_late_disc_load(Node, Tabs, Reason) ->
Msg = {async_late_disc_load, Tabs, Reason},
- catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+ ?SAFE({?SERVER_NAME, Node} ! {self(), Msg}).
maybe_async_late_disc_load(_, [], _) -> ok;
maybe_async_late_disc_load(Node, Tabs, Reason) ->
Msg = {maybe_async_late_disc_load, Tabs, Reason},
- catch ({?SERVER_NAME, Node} ! {self(), Msg}).
+ ?SAFE({?SERVER_NAME, Node} ! {self(), Msg}).
start() ->
mnesia_monitor:start_proc(?SERVER_NAME, ?MODULE, init, [self()]).
diff --git a/lib/mnesia/src/mnesia_lib.erl b/lib/mnesia/src/mnesia_lib.erl
index a32c69c59e..fc7362a31d 100644
--- a/lib/mnesia/src/mnesia_lib.erl
+++ b/lib/mnesia/src/mnesia_lib.erl
@@ -114,9 +114,7 @@
lock_table/1,
mkcore/1,
not_active_here/1,
- other_val/2,
other_val/1,
- pr_other/2,
overload_read/0,
overload_read/1,
overload_set/2,
@@ -380,8 +378,8 @@ search_key(_Key, []) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
- _VaLuE_ -> _VaLuE_
+ {'EXIT', _} -> other_val(Var);
+ _VaLuE_ -> _VaLuE_
end.
set(Var, Val) ->
@@ -390,13 +388,13 @@ set(Var, Val) ->
unset(Var) ->
?ets_delete(mnesia_gvar, Var).
-other_val(Var, Other) ->
- case other_val(Var) of
- error -> pr_other(Var, Other);
+other_val(Var) ->
+ case other_val_1(Var) of
+ error -> pr_other(Var);
Val -> Val
end.
-other_val(Var) ->
+other_val_1(Var) ->
case Var of
{_, where_to_read} -> nowhere;
{_, where_to_write} -> [];
@@ -404,21 +402,16 @@ other_val(Var) ->
_ -> error
end.
-pr_other(Var, Other) ->
- Why =
+pr_other(Var) ->
+ Why =
case is_running() of
no -> {node_not_running, node()};
_ -> {no_exists, Var}
end,
- verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~p ~n",
+ verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~n",
[self(), process_info(self(), registered_name),
- Var, Other, Why]),
- case Other of
- {badarg, [{ets, lookup_element, _, _}|_]} ->
- exit(Why);
- _ ->
- erlang:error(Why)
- end.
+ Var, Why]),
+ mnesia:abort(Why).
%% Some functions for list valued variables
add(Var, Val) ->
@@ -905,7 +898,7 @@ dirty_rpc_error_tag(Reason) ->
end.
fatal(Format, Args) ->
- catch set(mnesia_status, stopping),
+ ?SAFE(catch set(mnesia_status, stopping)),
Core = mkcore({crashinfo, {Format, Args}}),
report_fatal(Format, Args, Core),
timer:sleep(10000), % Enough to write the core dump to disc?
@@ -917,7 +910,7 @@ report_fatal(Format, Args) ->
report_fatal(Format, Args, Core) ->
report_system_event({mnesia_fatal, Format, Args, Core}),
- catch exit(whereis(mnesia_monitor), fatal).
+ ?SAFE(exit(whereis(mnesia_monitor), fatal)).
%% We sleep longer and longer the more we try
%% Made some testing and came up with the following constants
@@ -930,8 +923,9 @@ random_time(Retries, _Counter0) ->
case get(random_seed) of
undefined ->
- {X, Y, Z} = erlang:now(), %% time()
- _ = random:seed(X, Y, Z),
+ _ = random:seed(erlang:unique_integer(),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
Time = Dup + random:uniform(MaxIntv),
%% dbg_out("---random_test rs ~w max ~w val ~w---~n", [Retries, MaxIntv, Time]),
Time;
@@ -1013,7 +1007,7 @@ dbg_out(Format, Args) ->
%% Keep the last 10 debug print outs
save(DbgInfo) ->
- catch save2(DbgInfo).
+ ?SAFE(save2(DbgInfo)).
save2(DbgInfo) ->
Key = {'$$$_report', current_pos},
@@ -1089,35 +1083,29 @@ db_match_object(Tab, Pat) ->
db_match_object(val({Tab, storage_type}), Tab, Pat).
db_match_object(Storage, Tab, Pat) ->
db_fixtable(Storage, Tab, true),
- Res = catch_match_object(Storage, Tab, Pat),
- db_fixtable(Storage, Tab, false),
- case Res of
- {'EXIT', Reason} -> exit(Reason);
- _ -> Res
+ try
+ case Storage of
+ disc_only_copies -> dets:match_object(Tab, Pat);
+ _ -> ets:match_object(Tab, Pat)
+ end
+ after
+ db_fixtable(Storage, Tab, false)
end.
-catch_match_object(disc_only_copies, Tab, Pat) ->
- catch dets:match_object(Tab, Pat);
-catch_match_object(_, Tab, Pat) ->
- catch ets:match_object(Tab, Pat).
-
db_select(Tab, Pat) ->
db_select(val({Tab, storage_type}), Tab, Pat).
db_select(Storage, Tab, Pat) ->
db_fixtable(Storage, Tab, true),
- Res = catch_select(Storage, Tab, Pat),
- db_fixtable(Storage, Tab, false),
- case Res of
- {'EXIT', Reason} -> exit(Reason);
- _ -> Res
+ try
+ case Storage of
+ disc_only_copies -> dets:select(Tab, Pat);
+ _ -> ets:select(Tab, Pat)
+ end
+ after
+ db_fixtable(Storage, Tab, false)
end.
-catch_select(disc_only_copies, Tab, Pat) ->
- catch dets:select(Tab, Pat);
-catch_select(_, Tab, Pat) ->
- catch ets:select(Tab, Pat).
-
db_select_init(disc_only_copies, Tab, Pat, Limit) ->
dets:select(Tab, Pat, Limit);
db_select_init(_, Tab, Pat, Limit) ->
@@ -1261,7 +1249,7 @@ dets_sync_open(Tab, Args) ->
end.
dets_sync_close(Tab) ->
- catch dets:close(Tab),
+ ?SAFE(dets:close(Tab)),
unlock_table(Tab),
ok.
@@ -1297,7 +1285,7 @@ readable_indecies(Tab) ->
scratch_debug_fun() ->
dbg_out("scratch_debug_fun(): ~p~n", [?DEBUG_TAB]),
- (catch ?ets_delete_table(?DEBUG_TAB)),
+ ?SAFE(?ets_delete_table(?DEBUG_TAB)),
?ets_new_table(?DEBUG_TAB, [set, public, named_table, {keypos, 2}]).
activate_debug_fun(FunId, Fun, InitialContext, File, Line) ->
@@ -1310,43 +1298,45 @@ activate_debug_fun(FunId, Fun, InitialContext, File, Line) ->
update_debug_info(Info).
update_debug_info(Info) ->
- case catch ?ets_insert(?DEBUG_TAB, Info) of
- {'EXIT', _} ->
+ try ?ets_insert(?DEBUG_TAB, Info),
+ ok
+ catch error:_ ->
scratch_debug_fun(),
- ?ets_insert(?DEBUG_TAB, Info);
- _ ->
- ok
+ ?ets_insert(?DEBUG_TAB, Info)
end,
dbg_out("update_debug_info(~p)~n", [Info]),
ok.
deactivate_debug_fun(FunId, _File, _Line) ->
- catch ?ets_delete(?DEBUG_TAB, FunId),
+ ?SAFE(?ets_delete(?DEBUG_TAB, FunId)),
ok.
eval_debug_fun(FunId, EvalContext, EvalFile, EvalLine) ->
- case catch ?ets_lookup(?DEBUG_TAB, FunId) of
- [] ->
- ok;
- [Info] ->
- OldContext = Info#debug_info.context,
- dbg_out("~s(~p): ~w "
- "activated in ~s(~p)~n "
- "eval_debug_fun(~w, ~w)~n",
- [filename:basename(EvalFile), EvalLine, Info#debug_info.id,
- filename:basename(Info#debug_info.file), Info#debug_info.line,
- OldContext, EvalContext]),
- Fun = Info#debug_info.function,
- NewContext = Fun(OldContext, EvalContext),
-
- case catch ?ets_lookup(?DEBUG_TAB, FunId) of
- [Info] when NewContext /= OldContext ->
- NewInfo = Info#debug_info{context = NewContext},
- update_debug_info(NewInfo);
- _ ->
- ok
- end;
- {'EXIT', _} -> ok
+ try
+ case ?ets_lookup(?DEBUG_TAB, FunId) of
+ [] ->
+ ok;
+ [Info] ->
+ OldContext = Info#debug_info.context,
+ dbg_out("~s(~p): ~w "
+ "activated in ~s(~p)~n "
+ "eval_debug_fun(~w, ~w)~n",
+ [filename:basename(EvalFile), EvalLine, Info#debug_info.id,
+ filename:basename(Info#debug_info.file), Info#debug_info.line,
+ OldContext, EvalContext]),
+ Fun = Info#debug_info.function,
+ NewContext = Fun(OldContext, EvalContext),
+
+ case ?ets_lookup(?DEBUG_TAB, FunId) of
+ [Info] when NewContext /= OldContext ->
+ NewInfo = Info#debug_info{context = NewContext},
+ update_debug_info(NewInfo);
+ _ ->
+ ok
+ end
+ end
+ catch error ->
+ ok
end.
-ifdef(debug).
diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl
index 530317bcdd..65ea743fd3 100644
--- a/lib/mnesia/src/mnesia_loader.erl
+++ b/lib/mnesia/src/mnesia_loader.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -35,7 +35,7 @@
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
Value -> Value
end.
@@ -69,9 +69,10 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
- Count = mnesia_log:dcd2ets(Tab, Repair),
- case ets:info(Tab, size) of
- X when X < Count * 4 ->
+ _Count = mnesia_log:dcd2ets(Tab, Repair),
+ case mnesia_monitor:get_env(dump_disc_copies_at_startup)
+ andalso mnesia_dumper:needs_dump_ets(Tab) of
+ true ->
ok = mnesia_log:ets2dcd(Tab);
_ ->
ignore
@@ -331,7 +332,7 @@ wait_on_load_complete(Pid) ->
{Pid, Res} ->
Res;
{'EXIT', Pid, Reason} ->
- exit(Reason);
+ error(Reason);
Else ->
Pid ! Else,
wait_on_load_complete(Pid)
@@ -441,18 +442,18 @@ init_table(Tab, disc_only_copies, Fun, DetsInfo,Sender) ->
ErtsVer = erlang:system_info(version),
case DetsInfo of
{ErtsVer, DetsData} ->
- Res = (catch dets:is_compatible_bchunk_format(Tab, DetsData)),
- case Res of
- {'EXIT',{undef,[{dets,_,_,_}|_]}} ->
- Sender ! {self(), {old_protocol, Tab}},
- dets:init_table(Tab, Fun); %% Old dets version
- {'EXIT', What} ->
- exit(What);
+ try dets:is_compatible_bchunk_format(Tab, DetsData) of
false ->
Sender ! {self(), {old_protocol, Tab}},
dets:init_table(Tab, Fun); %% Old dets version
true ->
dets:init_table(Tab, Fun, [{format, bchunk}])
+ catch
+ error:{undef,[{dets,_,_,_}|_]} ->
+ Sender ! {self(), {old_protocol, Tab}},
+ dets:init_table(Tab, Fun); %% Old dets version
+ error:What ->
+ What
end;
Old when Old /= false ->
Sender ! {self(), {old_protocol, Tab}},
@@ -461,10 +462,10 @@ init_table(Tab, disc_only_copies, Fun, DetsInfo,Sender) ->
dets:init_table(Tab, Fun)
end;
init_table(Tab, _, Fun, _DetsInfo,_) ->
- case catch ets:init_table(Tab, Fun) of
- true ->
- ok;
- {'EXIT', Else} -> Else
+ try
+ true = ets:init_table(Tab, Fun),
+ ok
+ catch _:Else -> {Else, erlang:get_stacktrace()}
end.
@@ -571,9 +572,9 @@ handle_last({ram_copies, Tab}, _Type, DatBin) ->
down(Tab, Storage) ->
case Storage of
ram_copies ->
- catch ?ets_delete_table(Tab);
+ ?SAFE(?ets_delete_table(Tab));
disc_copies ->
- catch ?ets_delete_table(Tab);
+ ?SAFE(?ets_delete_table(Tab));
disc_only_copies ->
TmpFile = mnesia_lib:tab2tmp(Tab),
mnesia_lib:dets_sync_close(Tab),
@@ -657,26 +658,23 @@ send_table(Pid, Tab, RemoteS) ->
{Init, Chunk} = reader_funcs(UseDetsChunk, Tab, Storage, KeysPerTransfer),
SendIt = fun() ->
- prepare_copy(Pid, Tab, Storage),
+ {atomic, ok} = prepare_copy(Pid, Tab, Storage),
send_more(Pid, 1, Chunk, Init(), Tab),
finish_copy(Pid, Tab, Storage, RemoteS)
end,
- case catch SendIt() of
- receiver_died ->
+ try SendIt() of
+ {_, receiver_died} -> ok;
+ {atomic, no_more} -> ok
+ catch
+ throw:receiver_died ->
cleanup_tab_copier(Pid, Storage, Tab),
- unlink(whereis(mnesia_tm)),
ok;
- {_, receiver_died} ->
- unlink(whereis(mnesia_tm)),
- ok;
- {atomic, no_more} ->
- unlink(whereis(mnesia_tm)),
- ok;
- Reason ->
+ error:Reason -> %% Prepare failed
cleanup_tab_copier(Pid, Storage, Tab),
- unlink(whereis(mnesia_tm)),
- {error, Reason}
+ {error, {tab_copier, Tab, {Reason, erlang:get_stacktrace()}}}
+ after
+ unlink(whereis(mnesia_tm))
end
end.
@@ -689,12 +687,7 @@ prepare_copy(Pid, Tab, Storage) ->
mnesia_lib:db_fixtable(Storage, Tab, true),
ok
end,
- case mnesia:transaction(Trans) of
- {atomic, ok} ->
- ok;
- {aborted, Reason} ->
- exit({tab_copier_prepare, Tab, Reason})
- end.
+ mnesia:transaction(Trans).
update_where_to_write(Tab, Node) ->
case val({Tab, access_mode}) of
@@ -827,6 +820,6 @@ dat2bin(_Tab, _LocalS, _RemoteS) ->
nobin.
handle_exit(Pid, Reason) when node(Pid) == node() ->
- exit(Reason);
+ error(Reason);
handle_exit(_Pid, _Reason) -> %% Not from our node, this will be handled by
ignore. %% mnesia_down soon.
diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl
index e27396731f..6658dbeacb 100644
--- a/lib/mnesia/src/mnesia_locker.erl
+++ b/lib/mnesia/src/mnesia_locker.erl
@@ -98,7 +98,7 @@ init(Parent) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
_VaLuE_ -> _VaLuE_
end.
@@ -982,8 +982,14 @@ sticky_flush(Ns=[Node | Tail], Store) ->
flush_remaining([], _SkipNode, Res) ->
del_debug(),
exit(Res);
-flush_remaining([SkipNode | Tail ], SkipNode, Res) ->
- flush_remaining(Tail, SkipNode, Res);
+flush_remaining(Ns=[SkipNode | Tail ], SkipNode, Res) ->
+ add_debug(Ns),
+ receive
+ {?MODULE, SkipNode, _} ->
+ flush_remaining(Tail, SkipNode, Res)
+ after 0 ->
+ flush_remaining(Tail, SkipNode, Res)
+ end;
flush_remaining(Ns=[Node | Tail], SkipNode, Res) ->
add_debug(Ns),
receive
@@ -995,13 +1001,11 @@ flush_remaining(Ns=[Node | Tail], SkipNode, Res) ->
opt_lookup_in_client(lookup_in_client, Oid, Lock) ->
{Tab, Key} = Oid,
- case catch mnesia_lib:db_get(Tab, Key) of
- {'EXIT', _} ->
+ try mnesia_lib:db_get(Tab, Key)
+ catch error:_ ->
%% Table has been deleted from this node,
%% restart the transaction.
- #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere};
- Val ->
- Val
+ #cyclic{op = read, lock = Lock, oid = Oid, lucky = nowhere}
end;
opt_lookup_in_client(Val, _Oid, _Lock) ->
Val.
@@ -1133,11 +1137,10 @@ send_requests([], _X) ->
rec_requests([Node | Nodes], Oid, Store) ->
Res = l_req_rec(Node, Store),
- case catch rlock_get_reply(Node, Store, Oid, Res) of
- {'EXIT', Reason} ->
- flush_remaining(Nodes, Node, Reason);
- _ ->
- rec_requests(Nodes, Oid, Store)
+ try rlock_get_reply(Node, Store, Oid, Res) of
+ _ -> rec_requests(Nodes, Oid, Store)
+ catch _:Reason ->
+ flush_remaining(Nodes, Node, Reason)
end;
rec_requests([], _Oid, _Store) ->
ok.
diff --git a/lib/mnesia/src/mnesia_log.erl b/lib/mnesia/src/mnesia_log.erl
index d2fd04a60b..21ad0ffdb6 100644
--- a/lib/mnesia/src/mnesia_log.erl
+++ b/lib/mnesia/src/mnesia_log.erl
@@ -200,7 +200,7 @@ log_header(Kind, Version) ->
log_kind=Kind,
mnesia_version=mnesia:system_info(version),
node=node(),
- now=now()}.
+ now=erlang:timestamp()}.
version() -> "4.3".
@@ -462,7 +462,7 @@ chunk_log(Cont) ->
chunk_log(_Log, eof) ->
eof;
chunk_log(Log, Cont) ->
- case catch disk_log:chunk(Log, Cont) of
+ case disk_log:chunk(Log, Cont) of
{error, Reason} ->
fatal("Possibly truncated ~p file: ~p~n",
[Log, Reason]);
@@ -647,11 +647,11 @@ backup_checkpoint(Name, Opaque, Args) when is_list(Args) ->
end.
check_backup_args([Arg | Tail], B) ->
- case catch check_backup_arg_type(Arg, B) of
- {'EXIT', _Reason} ->
- {error, {badarg, Arg}};
+ try check_backup_arg_type(Arg, B) of
B2 ->
check_backup_args(Tail, B2)
+ catch error:_ ->
+ {error, {badarg, Arg}}
end;
check_backup_args([], B) ->
@@ -674,11 +674,11 @@ check_backup_arg_type(Arg, B) ->
backup_master(ClientPid, B) ->
process_flag(trap_exit, true),
- case catch do_backup_master(B) of
- {'EXIT', Reason} ->
- ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}};
+ try do_backup_master(B) of
Res ->
ClientPid ! {self(), ClientPid, Res}
+ catch _:Reason ->
+ ClientPid ! {self(), ClientPid, {error, {'EXIT', Reason}}}
end,
unlink(ClientPid),
exit(normal).
@@ -736,10 +736,10 @@ safe_apply(B, What, Args) ->
{'EXIT', Pid, R} -> Abort({'EXIT', Pid, R})
after 0 ->
Mod = B#backup_args.module,
- case catch apply(Mod, What, Args) of
+ try apply(Mod, What, Args) of
{ok, Opaque} -> B#backup_args{opaque=Opaque};
- {error, R} -> Abort(R);
- R -> Abort(R)
+ {error, R} -> Abort(R)
+ catch _:R -> Abort(R)
end
end.
@@ -748,10 +748,9 @@ abort_write(B, What, Args, Reason) ->
Opaque = B#backup_args.opaque,
dbg_out("Failed to perform backup. M=~p:F=~p:A=~p -> ~p~n",
[Mod, What, Args, Reason]),
- case catch apply(Mod, abort_write, [Opaque]) of
- {ok, _Res} ->
- throw({error, Reason});
- Other ->
+ try apply(Mod, abort_write, [Opaque]) of
+ {ok, _Res} -> throw({error, Reason})
+ catch _:Other ->
error("Failed to abort backup. ~p:~p~p -> ~p~n",
[Mod, abort_write, [Opaque], Other]),
throw({error, Reason})
@@ -892,10 +891,8 @@ tab_receiver(Pid, B, Tab, RecName, Slot) ->
end.
rec_filter(B, schema, _RecName, Recs) ->
- case catch mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie) of
- Recs2 when is_list(Recs2) ->
- Recs2;
- {error, _Reason} ->
+ try mnesia_bup:refresh_cookie(Recs, B#backup_args.cookie)
+ catch throw:{error, _Reason} ->
%% No schema table cookie
Recs
end;
@@ -1006,13 +1003,14 @@ add_recs([{{Tab, _Key}, Val, delete_object} | Rest], N) ->
add_recs(Rest, N+1);
add_recs([{{Tab, Key}, Val, update_counter} | Rest], N) ->
{RecName, Incr} = Val,
- case catch ets:update_counter(Tab, Key, Incr) of
- CounterVal when is_integer(CounterVal) ->
- ok;
- _ when Incr < 0 ->
+ try
+ CounterVal = ets:update_counter(Tab, Key, Incr),
+ true = (CounterVal >= 0)
+ catch
+ error:_ when Incr < 0 ->
Zero = {RecName, Key, 0},
true = ets:insert(Tab, Zero);
- _ ->
+ error:_ ->
Zero = {RecName, Key, Incr},
true = ets:insert(Tab, Zero)
end,
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index 6fc1a394a6..14b1ab5c1a 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -268,7 +268,7 @@ init([Parent]) ->
set(version, Version),
dbg_out("Version: ~p~n", [Version]),
- case catch process_config_args(env()) of
+ try process_config_args(env()) of
ok ->
mnesia_lib:set({'$$$_report', current_pos}, 0),
Level = mnesia_lib:val(debug),
@@ -288,8 +288,8 @@ init([Parent]) ->
set(pending_checkpoints, []),
set(pending_checkpoint_pids, []),
- {ok, #state{supervisor = Parent}};
- {'EXIT', Reason} ->
+ {ok, #state{supervisor = Parent}}
+ catch _:Reason ->
mnesia_lib:report_fatal("Bad configuration: ~p~n", [Reason]),
{stop, {bad_config, Reason}}
end.
@@ -323,25 +323,24 @@ non_empty_dir() ->
%%----------------------------------------------------------------------
handle_call({mktab, Tab, Args}, _From, State) ->
- case catch ?ets_new_table(Tab, Args) of
- {'EXIT', ExitReason} ->
+ try ?ets_new_table(Tab, Args) of
+ Reply ->
+ {reply, Reply, State}
+ catch error:ExitReason ->
Msg = "Cannot create ets table",
Reason = {system_limit, Msg, Tab, Args, ExitReason},
fatal("~p~n", [Reason]),
- {noreply, State};
- Reply ->
- {reply, Reply, State}
+ {noreply, State}
end;
handle_call({unsafe_mktab, Tab, Args}, _From, State) ->
- case catch ?ets_new_table(Tab, Args) of
- {'EXIT', ExitReason} ->
- {reply, {error, ExitReason}, State};
+ try ?ets_new_table(Tab, Args) of
Reply ->
{reply, Reply, State}
+ catch error:ExitReason ->
+ {reply, {error, ExitReason}, State}
end;
-
handle_call({open_dets, Tab, Args}, _From, State) ->
case mnesia_lib:dets_sync_open(Tab, Args) of
{ok, Tab} ->
@@ -546,7 +545,7 @@ handle_info({'EXIT', Pid, fatal}, State) when node(Pid) == node() ->
%% is in progress
%% exit(State#state.supervisor, shutdown),
%% It is better to kill an innocent process
- catch exit(whereis(mnesia_locker), kill),
+ ?SAFE(exit(whereis(mnesia_locker), kill)),
{noreply, State};
handle_info(Msg = {'EXIT',Pid,_}, State) ->
@@ -664,6 +663,7 @@ env() ->
backup_module,
debug,
dir,
+ dump_disc_copies_at_startup,
dump_log_load_regulation,
dump_log_time_threshold,
dump_log_update_in_place,
@@ -692,6 +692,8 @@ default_env(debug) ->
default_env(dir) ->
Name = lists:concat(["Mnesia.", node()]),
filename:absname(Name);
+default_env(dump_disc_copies_at_startup) ->
+ true;
default_env(dump_log_load_regulation) ->
false;
default_env(dump_log_time_threshold) ->
@@ -724,11 +726,8 @@ default_env(send_compressed) ->
0.
check_type(Env, Val) ->
- case catch do_check_type(Env, Val) of
- {'EXIT', _Reason} ->
- exit({bad_config, Env, Val});
- NewVal ->
- NewVal
+ try do_check_type(Env, Val)
+ catch error:_ -> exit({bad_config, Env, Val})
end.
do_check_type(access_module, A) when is_atom(A) -> A;
@@ -741,6 +740,7 @@ do_check_type(debug, trace) -> trace;
do_check_type(debug, true) -> debug;
do_check_type(debug, verbose) -> verbose;
do_check_type(dir, V) -> filename:absname(V);
+do_check_type(dump_disc_copies_at_startup, B) -> bool(B);
do_check_type(dump_log_load_regulation, B) -> bool(B);
do_check_type(dump_log_time_threshold, I) when is_integer(I), I > 0 -> I;
do_check_type(dump_log_update_in_place, B) -> bool(B);
@@ -777,12 +777,12 @@ media(opt_disc) -> opt_disc;
media(ram) -> ram.
patch_env(Env, Val) ->
- case catch do_check_type(Env, Val) of
- {'EXIT', _Reason} ->
- {error, {bad_type, Env, Val}};
+ try do_check_type(Env, Val) of
NewVal ->
application_controller:set_env(mnesia, Env, NewVal),
NewVal
+ catch error:_ ->
+ {error, {bad_type, Env, Val}}
end.
detect_partitioned_network(Mon, Node) ->
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index b6492707e2..aa567a23cb 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -178,11 +178,8 @@ log_decision(D) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} ->
- case mnesia_lib:other_val(Var) of
- error -> mnesia_lib:pr_other(Var, Reason);
- Val -> Val
- end;
+ {'EXIT', _Reason} ->
+ mnesia_lib:other_val(Var);
Value -> Value
end.
@@ -373,11 +370,8 @@ log_master_nodes2([], _UseDir, IsRunning, WorstRes) ->
get_master_node_info() ->
Tab = mnesia_decision,
Pat = {master_nodes, '_', '_'},
- case catch mnesia_lib:db_match_object(ram_copies,Tab, Pat) of
- {'EXIT', _} ->
- [];
- Masters ->
- Masters
+ try mnesia_lib:db_match_object(ram_copies,Tab, Pat)
+ catch error:_ -> []
end.
get_master_node_tables() ->
@@ -385,9 +379,8 @@ get_master_node_tables() ->
[Tab || {master_nodes, Tab, _Nodes} <- Masters].
get_master_nodes(Tab) ->
- case catch ?ets_lookup_element(mnesia_decision, Tab, 3) of
- {'EXIT', _} -> [];
- Nodes -> Nodes
+ try ?ets_lookup_element(mnesia_decision, Tab, 3)
+ catch error:_ -> []
end.
%% Determine what has happened to the transaction
@@ -485,8 +478,6 @@ load_decision_tab() ->
load_decision_tab(Cont, load_decision_tab),
mnesia_log:close_decision_tab().
-load_decision_tab(eof, _InitBy) ->
- ok;
load_decision_tab(Cont, InitBy) ->
case mnesia_log:chunk_decision_tab(Cont) of
{Cont2, Decisions} ->
@@ -519,8 +510,6 @@ dump_decision_log(InitBy) ->
Cont = mnesia_log:prepare_decision_log_dump(),
perform_dump_decision_log(Cont, InitBy).
-perform_dump_decision_log(eof, _InitBy) ->
- confirm_decision_log_dump();
perform_dump_decision_log(Cont, InitBy) when InitBy == startup ->
case mnesia_log:chunk_decision_log(Cont) of
{Cont2, Decisions} ->
@@ -689,12 +678,29 @@ handle_call({connect_nodes, Ns}, From, State) ->
%% called from handle_info
gen_server:reply(From, {[], AlreadyConnected}),
{noreply, State};
- GoodNodes ->
+ ProbablyGoodNodes ->
%% Now we have agreed upon a protocol with some new nodes
- %% and we may use them when we recover transactions
+ %% and we may use them when we recover transactions.
+ %%
+ %% Just in case Mnesia was stopped on some of those nodes
+ %% between the protocol negotiation and now, we check one
+ %% more time the state of Mnesia.
+ %%
+ %% Of course, there is still a chance that mnesia_down
+ %% events occur during this check and we miss them. To
+ %% prevent it, handle_cast({mnesia_down, ...}, ...) removes
+ %% the down node again, in addition to mnesia_down/1.
+ %%
+ %% See a comment in handle_cast({mnesia_down, ...}, ...).
+ Verify = fun(N) ->
+ Run = mnesia_lib:is_running(N),
+ Run =:= yes orelse Run =:= starting
+ end,
+ GoodNodes = [N || N <- ProbablyGoodNodes, Verify(N)],
+
mnesia_lib:add_list(recover_nodes, GoodNodes),
cast({announce_all, GoodNodes}),
- case get_master_nodes(schema) of
+ case get_master_nodes(schema) of
[] ->
Context = starting_partitioned_network,
mnesia_monitor:detect_inconcistency(GoodNodes, Context);
@@ -842,6 +848,14 @@ handle_cast({what_decision, Node, OtherD}, State) ->
{noreply, State};
handle_cast({mnesia_down, Node}, State) ->
+ %% The node was already removed from recover_nodes in mnesia_down/1,
+ %% but we do it again here in the mnesia_recover process, in case
+ %% another event incorrectly added it back. This can happen during
+ %% Mnesia startup which takes time betweenthe connection, the
+ %% protocol negotiation and the merge of the schema.
+ %%
+ %% See a comment in handle_call({connect_nodes, ...), ...).
+ mnesia_lib:del(recover_nodes, Node),
case State#state.unclear_decision of
undefined ->
{noreply, State};
@@ -999,7 +1013,7 @@ decision(Tid) ->
decision(Tid, tabs()).
decision(Tid, [Tab | Tabs]) ->
- case catch ?ets_lookup(Tab, Tid) of
+ try ?ets_lookup(Tab, Tid) of
[D] when is_record(D, decision) ->
D;
[C] when is_record(C, transient_decision) ->
@@ -1009,8 +1023,8 @@ decision(Tid, [Tab | Tabs]) ->
ram_nodes = []
};
[] ->
- decision(Tid, Tabs);
- {'EXIT', _} ->
+ decision(Tid, Tabs)
+ catch error:_ ->
%% Recently switched transient decision table
decision(Tid, Tabs)
end;
@@ -1021,11 +1035,8 @@ outcome(Tid, Default) ->
outcome(Tid, Default, tabs()).
outcome(Tid, Default, [Tab | Tabs]) ->
- case catch ?ets_lookup_element(Tab, Tid, 3) of
- {'EXIT', _} ->
- outcome(Tid, Default, Tabs);
- Val ->
- Val
+ try ?ets_lookup_element(Tab, Tid, 3)
+ catch error:_ -> outcome(Tid, Default, Tabs)
end;
outcome(_Tid, Default, []) ->
Default.
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index 6e43052fb0..4c8234bbc7 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -151,7 +151,7 @@ exit_on_error(GoodRes) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', Reason} -> mnesia_lib:other_val(Var, Reason);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
Value -> Value
end.
@@ -262,7 +262,7 @@ incr_version(Cs) ->
[] -> {Major + 1, 0}; % All replicas are active
_ -> {Major, Minor + 1} % Some replicas are inactive
end,
- Cs#cstruct{version = {V, {node(), now()}}}.
+ Cs#cstruct{version = {V, {node(), erlang:timestamp()}}}.
%% Returns table name
insert_cstruct(Tid, Cs, KeepWhereabouts) ->
@@ -2151,14 +2151,14 @@ prepare_op(_Tid, {op, transform, Fun, TabDef}, _WaitFor) ->
mnesia_lib:db_fixtable(Storage, Tab, true),
Key = mnesia_lib:db_first(Tab),
Op = {op, transform, Fun, TabDef},
- case catch transform_objs(Fun, Tab, RecName,
- Key, NewArity, Storage, Type, [Op]) of
- {'EXIT', Reason} ->
- mnesia_lib:db_fixtable(Storage, Tab, false),
- exit({"Bad transform function", Tab, Fun, node(), Reason});
+ try transform_objs(Fun, Tab, RecName, Key,
+ NewArity, Storage, Type, [Op]) of
Objs ->
mnesia_lib:db_fixtable(Storage, Tab, false),
{true, Objs, mandatory}
+ catch _:Reason ->
+ mnesia_lib:db_fixtable(Storage, Tab, false),
+ exit({"Bad transform function", Tab, Fun, node(), Reason})
end
end;
@@ -2342,7 +2342,7 @@ undo_prepare_commit(Tid, Commit) ->
ignore;
Ops ->
%% Catch to allow failure mnesia_controller may not be started
- catch mnesia_controller:release_schema_commit_lock(),
+ ?SAFE(mnesia_controller:release_schema_commit_lock()),
undo_prepare_ops(Tid, Ops)
end,
Commit.
@@ -2489,14 +2489,14 @@ ram_delete_table(Tab, Storage) ->
%% delete possible index files and data .....
%% Got to catch this since if no info has been set in the
%% mnesia_gvar it will crash
- catch mnesia_index:del_transient(Tab, Storage),
+ ?CATCH(mnesia_index:del_transient(Tab, Storage)),
case ?catch_val({Tab, {index, snmp}}) of
{'EXIT', _} ->
ignore;
Etab ->
- catch mnesia_snmp_hook:delete_table(Tab, Etab)
+ ?SAFE(mnesia_snmp_hook:delete_table(Tab, Etab))
end,
- catch ?ets_delete_table(Tab)
+ ?SAFE(?ets_delete_table(Tab))
end.
purge_dir(Dir, KeepFiles) ->
@@ -2584,10 +2584,7 @@ info2(_, []) ->
io:format("~n", []).
get_table_properties(Tab) ->
- case catch mnesia_lib:db_match_object(ram_copies,
- mnesia_gvar, {{Tab, '_'}, '_'}) of
- {'EXIT', _} ->
- mnesia:abort({no_exists, Tab, all});
+ try mnesia_lib:db_match_object(ram_copies, mnesia_gvar, {{Tab, '_'}, '_'}) of
RawGvar ->
case [{Item, Val} || {{_Tab, Item}, Val} <- RawGvar] of
[] ->
@@ -2598,6 +2595,8 @@ get_table_properties(Tab) ->
Master = {master_nodes, mnesia:table_info(Tab, master_nodes)},
lists:sort([Size, Memory, Master | Gvar])
end
+ catch error:_ ->
+ mnesia:abort({no_exists, Tab, all})
end.
%%%%%%%%%%% RESTORE %%%%%%%%%%%
@@ -2620,15 +2619,15 @@ restore(_Opaque, BadArg) ->
{aborted, {badarg, BadArg}}.
restore(Opaque, Args, Module) when is_list(Args), is_atom(Module) ->
InitR = #r{opaque = Opaque, module = Module},
- case catch lists:foldl(fun check_restore_arg/2, InitR, Args) of
+ try lists:foldl(fun check_restore_arg/2, InitR, Args) of
R when is_record(R, r) ->
case mnesia_bup:read_schema(R#r.module, Opaque) of
{error, Reason} ->
{aborted, Reason};
BupSchema ->
schema_transaction(fun() -> do_restore(R, BupSchema) end)
- end;
- {'EXIT', Reason} ->
+ end
+ catch exit:Reason ->
{aborted, Reason}
end;
restore(_Opaque, Args, Module) ->
@@ -3073,15 +3072,13 @@ do_make_merge_schema(Node, NeedsConv, RemoteCs = #cstruct{}) ->
%% Returns a new cstruct or issues a fatal error
merge_cstructs(Cs, RemoteCs, Force) ->
verify_cstruct(Cs),
- case catch do_merge_cstructs(Cs, RemoteCs, Force) of
- {'EXIT', {aborted, _Reason}} when Force == true ->
- Cs;
- {'EXIT', Reason} ->
- exit(Reason);
+ try do_merge_cstructs(Cs, RemoteCs, Force) of
MergedCs when is_record(MergedCs, cstruct) ->
- MergedCs;
- Other ->
- throw(Other)
+ MergedCs
+ catch exit:{aborted, _Reason} when Force == true ->
+ Cs;
+ exit:Reason -> exit(Reason);
+ error:Reason -> exit(Reason)
end.
do_merge_cstructs(Cs, RemoteCs, Force) ->
diff --git a/lib/mnesia/src/mnesia_snmp_hook.erl b/lib/mnesia/src/mnesia_snmp_hook.erl
index 256f83b029..c76cf89ebb 100644
--- a/lib/mnesia/src/mnesia_snmp_hook.erl
+++ b/lib/mnesia/src/mnesia_snmp_hook.erl
@@ -30,15 +30,6 @@
-include("mnesia.hrl").
-val(Var) ->
- case ?catch_val(Var) of
- {'EXIT', _ReASoN_} ->
- case mnesia_lib:other_val(Var) of
- error -> mnesia_lib:pr_other(Var, _ReASoN_);
- Val -> Val
- end;
- _VaLuE_ -> _VaLuE_
- end.
check_ustruct([]) ->
true; %% default value, not SNMP'ified
@@ -85,12 +76,12 @@ delete_table(_MnesiaTab, Tree) ->
%%-----------------------------------------------------------------
update({clear_table, MnesiaTab}) ->
- Tree = val({MnesiaTab, {index, snmp}}),
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
b_clear(Tree),
ok;
update({Op, MnesiaTab, MnesiaKey, SnmpKey}) ->
- Tree = val({MnesiaTab, {index, snmp}}),
+ Tree = mnesia_lib:val({MnesiaTab, {index, snmp}}),
update(Op, Tree, MnesiaKey, SnmpKey).
update(Op, Tree, MnesiaKey, SnmpKey) ->
@@ -120,7 +111,7 @@ update(Op, Tree, MnesiaKey, SnmpKey) ->
%%-----------------------------------------------------------------
key_to_oid(Tab,Key) ->
- Types = val({Tab,snmp}),
+ Types = mnesia_lib:val({Tab,snmp}),
key_to_oid(Tab, Key, Types).
key_to_oid(Tab, Key, [{key, Types}]) ->
@@ -144,7 +135,7 @@ keys_to_oid(N, Key, Oid, Types) ->
%% This can be lookup up in tree but that might be on a remote node.
%% It's probably faster to look it up, but use when it migth be remote
oid_to_key(Oid, Tab) ->
- [{key, Types}] = val({Tab,snmp}),
+ [{key, Types}] = mnesia_lib:val({Tab,snmp}),
oid_to_key_1(Types, Oid).
oid_to_key_1(integer, [Key]) -> Key;
diff --git a/lib/mnesia/src/mnesia_subscr.erl b/lib/mnesia/src/mnesia_subscr.erl
index 866a57e370..c39edea9e3 100644
--- a/lib/mnesia/src/mnesia_subscr.erl
+++ b/lib/mnesia/src/mnesia_subscr.erl
@@ -186,11 +186,11 @@ patch_record(Tab, Obj) ->
end.
what(Tab, Tid, {RecName, Key}, delete, undefined) ->
- case catch mnesia_lib:db_get(Tab, Key) of
- Old when is_list(Old) -> %% Op only allowed for set table.
- {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}};
- _ ->
- %% Record just deleted by a dirty_op or
+ try mnesia_lib:db_get(Tab, Key) of
+ Old -> %% Op only allowed for set table.
+ {mnesia_table_event, {delete, Tab, {RecName, Key}, Old, Tid}}
+ catch error:_ ->
+ %% Record just deleted by a dirty_op or
%% the whole table has been deleted
ignore
end;
@@ -199,10 +199,10 @@ what(Tab, Tid, Obj, delete, Old) ->
what(Tab, Tid, Obj, delete_object, _Old) ->
{mnesia_table_event, {delete, Tab, Obj, [Obj], Tid}};
what(Tab, Tid, Obj, write, undefined) ->
- case catch mnesia_lib:db_get(Tab, element(2, Obj)) of
- Old when is_list(Old) ->
- {mnesia_table_event, {write, Tab, Obj, Old, Tid}};
- {'EXIT', _} ->
+ try mnesia_lib:db_get(Tab, element(2, Obj)) of
+ Old ->
+ {mnesia_table_event, {write, Tab, Obj, Old, Tid}}
+ catch error:_ ->
ignore
end;
what(Tab, Tid, Obj, write, Old) ->
@@ -386,12 +386,12 @@ activate(ClientPid, What, Var, OldSubscribers, SubscrTab) ->
case lists:member(ClientPid, Old) of
false ->
%% Don't care about checking old links
- case catch link(ClientPid) of
+ try link(ClientPid) of
true ->
?ets_insert(SubscrTab, {ClientPid, What}),
add_subscr(Var, What, ClientPid),
- {ok, node()};
- {'EXIT', _Reason} ->
+ {ok, node()}
+ catch error:_ ->
{error, {no_exists, ClientPid}}
end;
true ->
@@ -443,11 +443,10 @@ add_subscr({Tab, commit_work}, What, Pid) ->
deactivate(ClientPid, What, Var, SubscrTab) ->
?ets_match_delete(SubscrTab, {ClientPid, What}),
- case catch ?ets_lookup_element(SubscrTab, ClientPid, 1) of
- List when is_list(List) ->
- ignore;
- {'EXIT', _} ->
- unlink(ClientPid)
+ try
+ ?ets_lookup_element(SubscrTab, ClientPid, 1),
+ ignore
+ catch error:_ -> unlink(ClientPid)
end,
try
del_subscr(Var, What, ClientPid),
diff --git a/lib/mnesia/src/mnesia_text.erl b/lib/mnesia/src/mnesia_text.erl
index 0906d18da9..794e633238 100644
--- a/lib/mnesia/src/mnesia_text.erl
+++ b/lib/mnesia/src/mnesia_text.erl
@@ -84,8 +84,12 @@ validate_tab({Tabname, RecName, List}) ->
validate_tab(_) -> error(badtab).
make_tabs([{Tab, Def} | Tail]) ->
- case catch mnesia:table_info(Tab, where_to_read) of
- {'EXIT', _} -> %% non-existing table
+ try mnesia:table_info(Tab, where_to_read) of
+ Node ->
+ io:format("** Table ~w already exists on ~p, just entering data~n",
+ [Tab, Node]),
+ make_tabs(Tail)
+ catch exit:_ -> %% non-existing table
case mnesia:create_table(Tab, Def) of
{aborted, Reason} ->
io:format("** Failed to create table ~w ~n"
@@ -95,11 +99,7 @@ make_tabs([{Tab, Def} | Tail]) ->
_ ->
io:format("New table ~w~n", [Tab]),
make_tabs(Tail)
- end;
- Node ->
- io:format("** Table ~w already exists on ~p, just entering data~n",
- [Tab, Node]),
- make_tabs(Tail)
+ end
end;
make_tabs([]) ->
@@ -118,11 +118,9 @@ load_data(L) ->
parse(File) ->
case file(File) of
{ok, Terms} ->
- case catch collect(Terms) of
- {error, X} ->
- {error, X};
- Other ->
- {ok, Other}
+ try collect(Terms) of
+ Other -> {ok, Other}
+ catch throw:Error -> Error
end;
Other ->
Other
diff --git a/lib/mnesia/src/mnesia_tm.erl b/lib/mnesia/src/mnesia_tm.erl
index af658150da..b4b46228e9 100644
--- a/lib/mnesia/src/mnesia_tm.erl
+++ b/lib/mnesia/src/mnesia_tm.erl
@@ -51,6 +51,7 @@
]).
-include("mnesia.hrl").
+
-import(mnesia_lib, [set/2]).
-import(mnesia_lib, [fatal/2, verbose/2, dbg_out/2]).
@@ -119,7 +120,7 @@ init(Parent) ->
val(Var) ->
case ?catch_val(Var) of
- {'EXIT', _ReASoN_} -> mnesia_lib:other_val(Var, _ReASoN_);
+ {'EXIT', _} -> mnesia_lib:other_val(Var);
_VaLuE_ -> _VaLuE_
end.
@@ -224,11 +225,7 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
end;
{From, start_outer} -> %% Create and associate ets_tab with Tid
- case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
- {'EXIT', Reason} -> %% system limit
- Msg = "Cannot create an ets table for the "
- "local transaction store",
- reply(From, {error, {system_limit, Msg, Reason}}, State);
+ try ?ets_new_table(mnesia_trans_store, [bag, public]) of
Etab ->
tmlink(From),
C = mnesia_recover:incr_trans_tid_serial(),
@@ -237,6 +234,10 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
A2 = gb_trees:insert(Tid,[Etab],Coordinators),
S2 = State#state{coordinators = A2},
reply(From, {new_tid, Tid, Etab}, S2)
+ catch error:Reason -> %% system limit
+ Msg = "Cannot create an ets table for the "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State)
end;
{From, {ask_commit, Protocol, Tid, Commit, DiscNs, RamNs}} ->
@@ -339,15 +340,15 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
end;
{From, {add_store, Tid}} -> %% new store for nested transaction
- case catch ?ets_new_table(mnesia_trans_store, [bag, public]) of
- {'EXIT', Reason} -> %% system limit
- Msg = "Cannot create an ets table for a nested "
- "local transaction store",
- reply(From, {error, {system_limit, Msg, Reason}}, State);
+ try ?ets_new_table(mnesia_trans_store, [bag, public]) of
Etab ->
A2 = add_coord_store(Coordinators, Tid, Etab),
reply(From, {new_store, Etab},
State#state{coordinators = A2})
+ catch error:Reason -> %% system limit
+ Msg = "Cannot create an ets table for a nested "
+ "local transaction store",
+ reply(From, {error, {system_limit, Msg, Reason}}, State)
end;
{From, {del_store, Tid, Current, Obsolete, PropagateStore}} ->
@@ -471,13 +472,13 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
do_sync_dirty(From, Tid, Commit, _Tab) ->
?eval_debug_fun({?MODULE, sync_dirty, pre}, [{tid, Tid}]),
- Res = (catch do_dirty(Tid, Commit)),
+ Res = do_dirty(Tid, Commit),
?eval_debug_fun({?MODULE, sync_dirty, post}, [{tid, Tid}]),
From ! {?MODULE, node(), {dirty_res, Res}}.
do_async_dirty(Tid, Commit, _Tab) ->
?eval_debug_fun({?MODULE, async_dirty, pre}, [{tid, Tid}]),
- catch do_dirty(Tid, Commit),
+ do_dirty(Tid, Commit),
?eval_debug_fun({?MODULE, async_dirty, post}, [{tid, Tid}]).
@@ -501,7 +502,7 @@ process_dirty_queue(_Tab, []) ->
[].
prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) ->
- case catch ?ets_lookup(Store, pending) of
+ try ?ets_lookup(Store, pending) of
[] ->
prepare_pending_coordinators(Coords, IgnoreNew);
[Pending] ->
@@ -511,8 +512,8 @@ prepare_pending_coordinators([{Tid, [Store | _Etabs]} | Coords], IgnoreNew) ->
true ->
ignore
end,
- prepare_pending_coordinators(Coords, IgnoreNew);
- {'EXIT', _} ->
+ prepare_pending_coordinators(Coords, IgnoreNew)
+ catch error:_ ->
prepare_pending_coordinators(Coords, IgnoreNew)
end;
prepare_pending_coordinators([], _IgnoreNew) ->
@@ -573,11 +574,7 @@ recover_coordinator(Tid, Etabs) ->
Store = hd(Etabs),
CheckNodes = get_elements(nodes,Store),
TellNodes = CheckNodes -- [node()],
- case catch arrange(Tid, Store, async) of
- {'EXIT', Reason} ->
- dbg_out("Recovery of coordinator ~p failed:~n", [Tid, Reason]),
- Protocol = asym_trans,
- tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes);
+ try arrange(Tid, Store, async) of
{_N, Prep} ->
%% Tell the participants about the outcome
Protocol = Prep#prep.protocol,
@@ -596,6 +593,11 @@ recover_coordinator(Tid, Etabs) ->
false -> %% When killed before store havn't been copied to
ok %% to the new nested trans store.
end
+ catch _:Reason ->
+ dbg_out("Recovery of coordinator ~p failed:~n",
+ [Tid, {Reason, erlang:get_stacktrace()}]),
+ Protocol = asym_trans,
+ tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes)
end,
erase_ets_tabs(Etabs),
transaction_terminated(Tid),
@@ -724,33 +726,25 @@ non_transaction(OldState={_,_,Trans}, Fun, Args, ActivityKind, Mod)
_ -> async
end,
case transaction(OldState, Fun, Args, infinity, Mod, Kind) of
- {atomic, Res} ->
- Res;
- {aborted,Res} ->
- exit(Res)
+ {atomic, Res} -> Res;
+ {aborted,Res} -> exit(Res)
end;
non_transaction(OldState, Fun, Args, ActivityKind, Mod) ->
Id = {ActivityKind, self()},
NewState = {Mod, Id, non_transaction},
put(mnesia_activity_state, NewState),
- %% I Want something uniqe here, references are expensive
- Ref = mNeSia_nOn_TrAnSacTioN,
- RefRes = (catch {Ref, apply(Fun, Args)}),
- case OldState of
- undefined -> erase(mnesia_activity_state);
- _ -> put(mnesia_activity_state, OldState)
- end,
- case RefRes of
- {Ref, Res} ->
- case Res of
- {'EXIT', Reason} -> exit(Reason);
- {aborted, Reason} -> mnesia:abort(Reason);
- _ -> Res
- end;
- {'EXIT', Reason} ->
- exit(Reason);
- Throw ->
- throw(Throw)
+ try apply(Fun, Args) of
+ {'EXIT', Reason} -> exit(Reason);
+ {aborted, Reason} -> mnesia:abort(Reason);
+ Res -> Res
+ catch
+ throw:Throw -> throw(Throw);
+ _:Reason -> exit(Reason)
+ after
+ case OldState of
+ undefined -> erase(mnesia_activity_state);
+ _ -> put(mnesia_activity_state, OldState)
+ end
end.
transaction(OldTidTs, Fun, Args, Retries, Mod, Type) ->
@@ -810,23 +804,28 @@ insert_objs([], _Tab) ->
ok.
execute_transaction(Fun, Args, Factor, Retries, Type) ->
- case catch apply_fun(Fun, Args, Type) of
- {'EXIT', Reason} ->
- check_exit(Fun, Args, Factor, Retries, Reason, Type);
+ try apply_fun(Fun, Args, Type) of
{atomic, Value} ->
mnesia_lib:incr_counter(trans_commits),
erase(mnesia_activity_state),
%% no need to clear locks, already done by commit ...
%% Flush any un processed mnesia_down messages we might have
flush_downs(),
- catch unlink(whereis(?MODULE)),
+ ?SAFE(unlink(whereis(?MODULE))),
{atomic, Value};
+ {do_abort, Reason} ->
+ check_exit(Fun, Args, Factor, Retries, {aborted, Reason}, Type);
{nested_atomic, Value} ->
mnesia_lib:incr_counter(trans_commits),
- {atomic, Value};
- Value -> %% User called throw
+ {atomic, Value}
+ catch throw:Value -> %% User called throw
Reason = {aborted, {throw, Value}},
- return_abort(Fun, Args, Reason)
+ return_abort(Fun, Args, Reason);
+ error:Reason ->
+ ST = erlang:get_stacktrace(),
+ check_exit(Fun, Args, Factor, Retries, {Reason,ST}, Type);
+ _:Reason ->
+ check_exit(Fun, Args, Factor, Retries, Reason, Type)
end.
apply_fun(Fun, Args, Type) ->
@@ -836,10 +835,10 @@ apply_fun(Fun, Args, Type) ->
{atomic, Result};
do_commit_nested ->
{nested_atomic, Result};
- {do_abort, {aborted, Reason}} ->
- {'EXIT', {aborted, Reason}};
- {do_abort, Reason} ->
- {'EXIT', {aborted, Reason}}
+ {do_abort, {aborted, Reason}} ->
+ {do_abort, Reason};
+ {do_abort, _} = Abort ->
+ Abort
end.
check_exit(Fun, Args, Factor, Retries, Reason, Type) ->
@@ -943,7 +942,7 @@ return_abort(Fun, Args, Reason) ->
OldStore = Ts#tidstore.store,
Nodes = get_elements(nodes, OldStore),
intercept_friends(Tid, Ts),
- catch mnesia_lib:incr_counter(trans_failures),
+ ?SAFE(mnesia_lib:incr_counter(trans_failures)),
Level = Ts#tidstore.level,
if
Level == 1 ->
@@ -951,7 +950,7 @@ return_abort(Fun, Args, Reason) ->
?MODULE ! {delete_transaction, Tid},
erase(mnesia_activity_state),
flush_downs(),
- catch unlink(whereis(?MODULE)),
+ ?SAFE(unlink(whereis(?MODULE))),
{aborted, mnesia_lib:fix_error(Reason)};
true ->
%% Nested transaction
@@ -1005,11 +1004,11 @@ erase_activity_id() ->
erase(mnesia_activity_state).
get_elements(Type,Store) ->
- case catch ?ets_lookup(Store, Type) of
+ try ?ets_lookup(Store, Type) of
[] -> [];
[{_,Val}] -> [Val];
- {'EXIT', _} -> [];
Vals -> [Val|| {_,Val} <- Vals]
+ catch error:_ -> []
end.
opt_propagate_store(_Current, _Obsolete, false) ->
@@ -1032,7 +1031,7 @@ intercept_friends(_Tid, Ts) ->
intercept_best_friend([],_) -> ok;
intercept_best_friend([{stop,Fun} | R],Ignore) ->
- catch Fun(),
+ ?CATCH(Fun()),
intercept_best_friend(R,Ignore);
intercept_best_friend([Pid | R],false) ->
Pid ! {activity_ended, undefined, self()},
@@ -1046,25 +1045,12 @@ wait_for_best_friend(Pid, Timeout) ->
{'EXIT', Pid, _} -> ok;
{activity_ended, _, Pid} -> ok
after Timeout ->
- case my_process_is_alive(Pid) of
+ case erlang:is_process_alive(Pid) of
true -> wait_for_best_friend(Pid, 1000);
false -> ok
end
end.
-my_process_is_alive(Pid) ->
- case catch erlang:is_process_alive(Pid) of % New BIF in R5
- true ->
- true;
- false ->
- false;
- {'EXIT', _} -> % Pre R5 backward compatibility
- case process_info(Pid, message_queue_len) of
- undefined -> false;
- _ -> true
- end
- end.
-
dirty(Protocol, Item) ->
{{Tab, Key}, _Val, _Op} = Item,
Tid = {dirty, self()},
@@ -1144,18 +1130,8 @@ arrange(Tid, Store, Type) ->
async -> #prep{protocol = sym_trans, records = Recs};
sync -> #prep{protocol = sync_sym_trans, records = Recs}
end,
- case catch do_arrange(Tid, Store, Key, Prep, N) of
- {'EXIT', Reason} ->
- dbg_out("do_arrange failed ~p ~p~n", [Reason, Tid]),
- case Reason of
- {aborted, R} ->
- mnesia:abort(R);
- _ ->
- mnesia:abort(Reason)
- end;
- {New, Prepared} ->
- {New, Prepared#prep{records = reverse(Prepared#prep.records)}}
- end.
+ {New, Prepared} = do_arrange(Tid, Store, Key, Prep, N),
+ {New, Prepared#prep{records = reverse(Prepared#prep.records)}}.
reverse([]) ->
[];
@@ -1522,7 +1498,7 @@ multi_commit(asym_trans, Majority, Tid, CR, Store) ->
Pending = mnesia_checkpoint:tm_enter_pending(Tid, DiscNs, RamNs),
?ets_insert(Store, Pending),
{WaitFor, Local} = ask_commit(asym_trans, Tid, CR2, DiscNs, RamNs),
- SchemaPrep = (catch mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})),
+ SchemaPrep = ?CATCH(mnesia_schema:prepare_commit(Tid, Local, {coord, WaitFor})),
{Votes, Pids} = rec_all(WaitFor, Tid, do_commit, []),
?eval_debug_fun({?MODULE, multi_commit_asym_got_votes},
@@ -1589,7 +1565,7 @@ rec_acc_pre_commit([Pid | Tail], Tid, Store, Commit, Res, DumperMode,
GoodPids, SchemaAckPids);
{mnesia_down, Node} when Node == node(Pid) ->
AbortRes = {do_abort, {bad_commit, Node}},
- catch Pid ! {Tid, AbortRes}, %% Tell him that he has died
+ ?SAFE(Pid ! {Tid, AbortRes}), %% Tell him that he has died
rec_acc_pre_commit(Tail, Tid, Store, Commit, AbortRes, DumperMode,
GoodPids, SchemaAckPids)
end;
@@ -1666,7 +1642,7 @@ commit_participant(Coord, Tid, C = #commit{}, DiscNs, RamNs) ->
commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
?eval_debug_fun({?MODULE, commit_participant, pre}, [{tid, Tid}]),
- case catch mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of
+ try mnesia_schema:prepare_commit(Tid, C0, {part, Coord}) of
{Modified, C = #commit{}, DumperMode} ->
%% If we can not find any local unclear decision
%% we should presume abort at startup recovery
@@ -1742,9 +1718,8 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
reply(Coord, {do_abort, Tid, self(), {bad_commit,internal}}),
verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
[Tid, Msg])
- end;
-
- {'EXIT', Reason} ->
+ end
+ catch _:Reason ->
?eval_debug_fun({?MODULE, commit_participant, vote_no},
[{tid, Tid}]),
reply(Coord, {vote_no, Tid, Reason}),
@@ -1790,22 +1765,20 @@ do_commit(Tid, C, DumperMode) ->
%% Update the items
do_update(Tid, Storage, [Op | Ops], OldRes) ->
- case catch do_update_op(Tid, Storage, Op) of
- ok ->
- do_update(Tid, Storage, Ops, OldRes);
- {'EXIT', Reason} ->
+ try do_update_op(Tid, Storage, Op) of
+ ok -> do_update(Tid, Storage, Ops, OldRes);
+ NewRes -> do_update(Tid, Storage, Ops, NewRes)
+ catch _:Reason ->
%% This may only happen when we recently have
%% deleted our local replica, changed storage_type
%% or transformed table
%% BUGBUG: Updates may be lost if storage_type is changed.
%% Determine actual storage type and try again.
%% BUGBUG: Updates may be lost if table is transformed.
-
+ ST = erlang:get_stacktrace(),
verbose("do_update in ~w failed: ~p -> {'EXIT', ~p}~n",
- [Tid, Op, Reason]),
- do_update(Tid, Storage, Ops, OldRes);
- NewRes ->
- do_update(Tid, Storage, Ops, NewRes)
+ [Tid, Op, {Reason, ST}]),
+ do_update(Tid, Storage, Ops, OldRes)
end;
do_update(_Tid, _Storage, [], Res) ->
Res.
@@ -1821,14 +1794,15 @@ do_update_op(Tid, Storage, {{Tab, K}, Val, delete}) ->
do_update_op(Tid, Storage, {{Tab, K}, {RecName, Incr}, update_counter}) ->
{NewObj, OldObjs} =
- case catch mnesia_lib:db_update_counter(Storage, Tab, K, Incr) of
- NewVal when is_integer(NewVal), NewVal >= 0 ->
- {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]};
- _ when Incr > 0 ->
+ try
+ NewVal = mnesia_lib:db_update_counter(Storage, Tab, K, Incr),
+ true = is_integer(NewVal) andalso (NewVal >= 0),
+ {{RecName, K, NewVal}, [{RecName, K, NewVal - Incr}]}
+ catch error:_ when Incr > 0 ->
New = {RecName, K, Incr},
mnesia_lib:db_put(Storage, Tab, New),
{New, []};
- _ ->
+ error:_ ->
Zero = {RecName, K, 0},
mnesia_lib:db_put(Storage, Tab, Zero),
{Zero, []}
@@ -1913,16 +1887,14 @@ commit_clear([H|R], Tid, Tab, K, Obj)
do_snmp(_, []) -> ok;
do_snmp(Tid, [Head | Tail]) ->
- case catch mnesia_snmp_hook:update(Head) of
- {'EXIT', Reason} ->
+ try mnesia_snmp_hook:update(Head)
+ catch _:Reason ->
%% This should only happen when we recently have
%% deleted our local replica or recently deattached
%% the snmp table
-
+ ST = erlang:get_stacktrace(),
verbose("do_snmp in ~w failed: ~p -> {'EXIT', ~p}~n",
- [Tid, Head, Reason]);
- ok ->
- ignore
+ [Tid, Head, {Reason, ST}])
end,
do_snmp(Tid, Tail).
@@ -2093,7 +2065,7 @@ rec_all([Node | Tail], Tid, Res, Pids) ->
%% Make sure that mnesia_tm knows it has died
%% it may have been restarted
Abort = {do_abort, {bad_commit, Node}},
- catch {?MODULE, Node} ! {Tid, Abort},
+ ?SAFE({?MODULE, Node} ! {Tid, Abort}),
rec_all(Tail, Tid, Abort, Pids)
end;
rec_all([], _Tid, Res, Pids) ->
diff --git a/lib/mnesia/test/mnesia_config_backup.erl b/lib/mnesia/test/mnesia_config_backup.erl
index 0916e255e2..a7d8c04a45 100644
--- a/lib/mnesia/test/mnesia_config_backup.erl
+++ b/lib/mnesia/test/mnesia_config_backup.erl
@@ -90,7 +90,8 @@ open_read(Name) ->
List = lists:reverse(ReverseList),
{ok, #backup{name = Name, mode = read, items = List}};
{error, Reason} ->
- {error, {open_read, Reason}}
+ %% {error, {open_read, Reason}}
+ {Reason, error} %% Testing error handling in mnesia
end.
read(Opaque) when Opaque#backup.mode == read ->
diff --git a/lib/mnesia/test/mnesia_config_test.erl b/lib/mnesia/test/mnesia_config_test.erl
index c495bce63f..a8fb93b28e 100644
--- a/lib/mnesia/test/mnesia_config_test.erl
+++ b/lib/mnesia/test/mnesia_config_test.erl
@@ -37,7 +37,6 @@
dump_log_update_in_place/1,
event_module/1,
- ignore_fallback_at_startup/1,
inconsistent_database/1,
max_wait_for_decision/1,
send_compressed/1,
@@ -104,7 +103,7 @@ all() ->
[access_module, auto_repair, backup_module, debug, dir,
dump_log_load_regulation, {group, dump_log_thresholds},
dump_log_update_in_place,
- event_module, ignore_fallback_at_startup,
+ event_module,
inconsistent_database, max_wait_for_decision,
send_compressed, app_test, {group, schema_config},
unknown_config].
@@ -317,11 +316,17 @@ backup_module(Config) when is_list(Config) ->
?match([], mnesia_test_lib:start_mnesia(Nodes, [test_table, test_table2])),
%% Now check newly started tables
- ?match({atomic, [1,2]},
+ ?match({atomic, [1,2]},
mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table)) end)),
- ?match({atomic, [3,4]},
+ ?match({atomic, [3,4]},
mnesia:transaction(fun() -> lists:sort(mnesia:all_keys(test_table2)) end)),
-
+
+ %% Test some error cases
+ mnesia:set_debug_level(debug),
+ ?match({error, _}, mnesia:install_fallback("NonExisting.FILE")),
+ ?match({error, _}, mnesia:install_fallback(filename:join(mnesia_lib:dir(), "LATEST.LOG"))),
+
+ %% Cleanup
file:delete(File),
?verify_mnesia(Nodes, []),
?cleanup(1, Config),
@@ -609,13 +614,6 @@ dump_log_load_regulation(Config) when is_list(Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-ignore_fallback_at_startup(doc) ->
- ["Start Mnesia without rollback of the database to the fallback. ",
- "Once Mnesia has been (re)started the installed fallback should",
- "be handled as a normal active fallback.",
- "Install a customized event module which disables the termination",
- "of Mnesia when mnesia_down occurrs with an active fallback."].
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
max_wait_for_decision(doc) ->
diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl
index 9e0a8db1ae..68efa3f6ea 100644
--- a/lib/mnesia/test/mnesia_evil_backup.erl
+++ b/lib/mnesia/test/mnesia_evil_backup.erl
@@ -142,6 +142,9 @@ restore_errors(Config) when is_list(Config) ->
?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{skip_tables, xxx}])),
?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{recreate_tables, [schema]}])),
?match({aborted, {badarg, _}}, mnesia:restore(notAfile, [{default_op, asdklasd}])),
+ MnesiaDir = mnesia_lib:dir(),
+ ?match({aborted, {not_a_log_file, _}}, mnesia:restore(filename:join(MnesiaDir, "schema.DAT"), [])),
+ ?match({aborted, _}, mnesia:restore(filename:join(MnesiaDir, "LATEST.LOG"), [])),
ok.
restore_clear(suite) -> [];
@@ -488,6 +491,14 @@ install_fallback(Config) when is_list(Config) ->
mnesia_test_lib:kill_mnesia([Node1, Node2]),
timer:sleep(timer:seconds(1)), % Let it die!
+ ok = mnesia:start([{ignore_fallback_at_startup, true}]),
+ ok = mnesia:wait_for_tables([Tab, Tab2, Tab3], 10000),
+ ?match([{Tab, 6, test_nok}], mnesia:dirty_read({Tab, 6})),
+ mnesia_test_lib:kill_mnesia([Node1]),
+ application:set_env(mnesia, ignore_fallback_at_startup, false),
+
+ timer:sleep(timer:seconds(1)), % Let it die!
+
?match([], mnesia_test_lib:start_mnesia([Node1, Node2], [Tab, Tab2, Tab3])),
% Verify
@@ -510,6 +521,13 @@ install_fallback(Config) when is_list(Config) ->
file:delete(File3),
?match({error, _}, mnesia:install_fallback(File3)),
?match({error, _}, mnesia:install_fallback(File2, mnesia_badmod)),
+ ?match({error, _}, mnesia:install_fallback(File2, {foo, foo})),
+ ?match({error, _}, mnesia:install_fallback(File2, [{foo, foo}])),
+ ?match({error, {badarg, {skip_tables, _}}},
+ mnesia:install_fallback(File2, [{default_op, skip_tables},
+ {default_op, keep_tables},
+ {keep_tables, [Tab, Tab2, Tab3]},
+ {skip_tables, [foo,{asd}]}])),
?match(ok, mnesia:install_fallback(File2, mnesia_backup)),
?match(ok, file:delete(File)),
?match(ok, file:delete(File2)),
@@ -535,6 +553,7 @@ uninstall_fallback(Config) when is_list(Config) ->
?match(ok, mnesia:install_fallback(File2)),
?match(ok, file:delete(File)),
?match(ok, file:delete(File2)),
+ ?match({error, _}, mnesia:uninstall_fallback([foobar])),
?match(ok, mnesia:uninstall_fallback()),
mnesia_test_lib:kill_mnesia([Node1, Node2]),
diff --git a/lib/mnesia/test/mnesia_evil_coverage_test.erl b/lib/mnesia/test/mnesia_evil_coverage_test.erl
index 2d1623b6ca..430c1f1d84 100644
--- a/lib/mnesia/test/mnesia_evil_coverage_test.erl
+++ b/lib/mnesia/test/mnesia_evil_coverage_test.erl
@@ -1338,11 +1338,11 @@ user_properties(Config) when is_list(Config) ->
?match([], mnesia:table_info(Tab2, user_properties)),
?match([], mnesia:table_info(Tab3, user_properties)),
- ?match({'EXIT', {no_exists, {Tab1, user_property, PropKey}}},
+ ?match({'EXIT', {aborted, {no_exists, {Tab1, user_property, PropKey}}}},
mnesia:read_table_property(Tab1, PropKey)),
- ?match({'EXIT', {no_exists, {Tab2, user_property, PropKey}}},
+ ?match({'EXIT', {aborted, {no_exists, {Tab2, user_property, PropKey}}}},
mnesia:read_table_property(Tab2, PropKey)),
- ?match({'EXIT', {no_exists, {Tab3, user_property, PropKey}}},
+ ?match({'EXIT', {aborted, {no_exists, {Tab3, user_property, PropKey}}}},
mnesia:read_table_property(Tab3, PropKey)),
?match({atomic, ok}, mnesia:write_table_property(Tab1, Prop)),
diff --git a/lib/mnesia/test/mnesia_recovery_test.erl b/lib/mnesia/test/mnesia_recovery_test.erl
index 0d0ad32fb0..946a9f97ba 100644
--- a/lib/mnesia/test/mnesia_recovery_test.erl
+++ b/lib/mnesia/test/mnesia_recovery_test.erl
@@ -320,7 +320,9 @@ read_during_down(Op, Config) when is_list(Config) ->
?log("W2R ~p~n", [W2R]),
loop_and_kill_mnesia(10, hd(W2R), Tabs),
[Pid ! self() || Pid <- Readers],
- ?match([ok, ok, ok], [receive ok -> ok after 1000 -> {Pid, mnesia_lib:dist_coredump()} end || Pid <- Readers]),
+ ?match([ok, ok, ok],
+ [receive ok -> ok after 5000 -> {Pid, mnesia_lib:dist_coredump()} end
+ || Pid <- Readers]),
?verify_mnesia(Ns, []).
reader(Tab, OP) ->
@@ -338,8 +340,12 @@ reader(Tab, OP) ->
?error("Expected ~p Got ~p ~n", [[{Tab, key, val}], Else]),
erlang:error(test_failed)
end,
- receive Pid ->
- Pid ! ok
+ receive
+ Pid when is_pid(Pid) ->
+ Pid ! ok;
+ Other ->
+ io:format("Msg: ~p~n", [Other]),
+ error(Other)
after 50 ->
reader(Tab, OP)
end.
@@ -1537,6 +1543,7 @@ disc_less(Config) when is_list(Config) ->
timer:sleep(500),
?match(ok, rpc:call(Node3, mnesia, start, [[{extra_db_nodes, [Node1, Node2]}]])),
?match(ok, rpc:call(Node3, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])),
+ ?match(ok, rpc:call(Node1, mnesia, wait_for_tables, [[Tab1, Tab2, Tab3], 20000])),
?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab1, 100])),
?match(ok, rpc:call(Node3, ?MODULE, verify_data, [Tab2, 100])),
diff --git a/lib/mnesia/test/mnesia_test_lib.hrl b/lib/mnesia/test/mnesia_test_lib.hrl
index 94a195f01f..cd76377df6 100644
--- a/lib/mnesia/test/mnesia_test_lib.hrl
+++ b/lib/mnesia/test/mnesia_test_lib.hrl
@@ -66,12 +66,14 @@
?verbose("ok, ~n Result as expected:~p~n",[_AR_2]),
{success,_AR_2};
_AR_2 ->
- ?error("Not Matching Actual result was:~n ~p~n", [_AR_2]),
+ ?error("Not Matching Actual result was:~n ~p~n ~p~n",
+ [_AR_2, erlang:get_stacktrace()]),
{fail,_AR_2}
end;
- _:_AR_1 ->
- ?error("Not Matching Actual result was:~n ~p~n", [_AR_1]),
- {fail,_AR_1}
+ _T1_:_AR_1 ->
+ ?error("Not Matching Actual result was:~n ~p~n ~p~n",
+ [{_T1_,_AR_1}, erlang:get_stacktrace()]),
+ {fail,{_T1_,_AR_1}}
end
end()).
diff --git a/lib/mnesia/test/mnesia_trans_access_test.erl b/lib/mnesia/test/mnesia_trans_access_test.erl
index 237984978e..f906670296 100644
--- a/lib/mnesia/test/mnesia_trans_access_test.erl
+++ b/lib/mnesia/test/mnesia_trans_access_test.erl
@@ -930,20 +930,20 @@ index_update_bag(Config)when is_list(Config) ->
[IPos] = mnesia_lib:val({Tab,index}),
ITab = mnesia_lib:val({index_test,{index, IPos}}),
io:format("~n Index ~p @ ~p => ~p ~n~n",[IPos,ITab, ets:tab2list(ITab)]),
- ?match([{2,1},{2,2},{12,1}], ets:tab2list(ITab)),
+ ?match([{2,1},{2,2},{12,1}], lists:keysort(1,ets:tab2list(ITab))),
?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec5) end)),
{atomic, R60} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
?match([Rec1,Rec5,Rec2], lists:sort(R60)),
- ?match([{2,1},{2,2},{12,1}], ets:tab2list(ITab)),
+ ?match([{2,1},{2,2},{12,1}], lists:keysort(1,ets:tab2list(ITab))),
?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:delete_object(Rec3) end)),
{atomic, R61} = mnesia:transaction(fun() -> mnesia:index_read(Tab, 2, ValPos) end),
?match([Rec1,Rec5,Rec2], lists:sort(R61)),
{atomic, R62} = mnesia:transaction(fun() -> mnesia:index_read(Tab,12, ValPos) end),
?match([], lists:sort(R62)),
- ?match([{2,1},{2,2}], ets:tab2list(ITab)),
+ ?match([{2,1},{2,2}], lists:keysort(1,ets:tab2list(ITab))),
%% reset for rest of testcase
?match({atomic, ok}, mnesia:transaction(fun() -> mnesia:write(Rec3) end)),
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 94eb360591..79dd495c4b 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1 +1 @@
-MNESIA_VSN = 4.12.4
+MNESIA_VSN = 4.13
diff --git a/lib/observer/doc/src/crashdump_ug.xml b/lib/observer/doc/src/crashdump_ug.xml
index d22fb4cc40..ccd4d8a5b3 100644
--- a/lib/observer/doc/src/crashdump_ug.xml
+++ b/lib/observer/doc/src/crashdump_ug.xml
@@ -228,20 +228,17 @@
<p>The <em>ETS Tables</em> panel shows all ETS table information
found in the dump. The 'Id' is the same as the 'Table' field found
in the raw crashdump, and 'Memory' is the 'Words' field from the
- raw crashdump translated into bytes. 'Type' is the type of table,
- and it can be either "hash" or "tree". For tree tables there will
- be no value in the 'Bucket' field.</p>
+ raw crashdump translated into bytes. For tree tables there will
+ be no value in the 'Objects' field.</p>
+
+ <p>To open the detailed information page about the table, double
+ click or right click the row and select "Properties for
+ 'Identifier'".</p>
<p>To open the detailed information page about the owner process
of an ETS table, right click the row and select "Properties for
&lt;pid&gt;".</p>
- <p>Double clicking a row in the ETS Tables panel has no
- effect.</p>
-
- <p>From the left hand menu you can also select to see internal ETS
- tables.</p>
-
<p>
<seealso marker="erts:crash_dump#ets_tables">
More...</seealso>
@@ -267,6 +264,22 @@
</section>
<section>
+ <marker id="schedulers"/>
+ <title>Schedulers</title>
+
+ <p>The <em>Schedulers</em> panel shows all scheduler information
+ found in the dump.</p>
+
+ <p>To open the detailed information page about the scheduler,
+ double click or right click the row and select "Properties for
+ 'Identifier'".</p>
+
+ <p>
+ <seealso marker="erts:crash_dump">More...</seealso>
+ </p>
+ </section>
+
+ <section>
<marker id="funs"/>
<title>Funs</title>
diff --git a/lib/observer/doc/src/notes.xml b/lib/observer/doc/src/notes.xml
index 11729078c2..a9ec68fc9e 100644
--- a/lib/observer/doc/src/notes.xml
+++ b/lib/observer/doc/src/notes.xml
@@ -31,6 +31,21 @@
<p>This document describes the changes made to the Observer
application.</p>
+<section><title>Observer 2.0.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix crash when opening a process information window.</p>
+ <p>
+ Own Id: OTP-12634</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Observer 2.0.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/observer/doc/src/observer_ug.xml b/lib/observer/doc/src/observer_ug.xml
index 62f99c5210..fcb42f6c31 100644
--- a/lib/observer/doc/src/observer_ug.xml
+++ b/lib/observer/doc/src/observer_ug.xml
@@ -104,6 +104,29 @@
<note>
<p><em>Reds</em> can be presented as accumulated values or as values since last update.</p>
</note>
+ <p><c>Process info</c> open a detailed information window on the selected process.
+ <taglist>
+ <tag>Process Information</tag>
+ <item>Shows the process information.</item>
+ <tag>Messages</tag>
+ <item>Shows the process messages.</item>
+ <tag>Dictionary</tag>
+ <item>Shows the process dictionary.</item>
+ <tag>Stack Trace</tag>
+ <item>Shows the process current stack trace.</item>
+ <tag>State</tag>
+ <item>Show the process state.</item>
+ <tag>Log</tag>
+ <item>If enabled and available, show the process SASL log entries.</item>
+ </taglist>
+ <note>
+ <p><c>Log</c> needs SASL application to be started on the observed node, with log_mf_h as log handler.
+ The Observed node must be R16B02 or higher.
+ <c>rb</c> server must not be started on the observed node when clicking on menu 'Log/Toggle log view'.
+ <c>rb</c> server will be stopped on the observed node when exiting or changing observed node.
+ </p>
+ </note>
+ </p>
<p><c>Trace Processes</c> will add the selected process identifiers to the <c>Trace Overview</c> view and the
node the processes reside on will be added as well.
<c>Trace Named Processes</c> will add the registered name of processes. This can be useful
diff --git a/lib/observer/src/Makefile b/lib/observer/src/Makefile
index c120865213..8c6606d0a6 100644
--- a/lib/observer/src/Makefile
+++ b/lib/observer/src/Makefile
@@ -51,6 +51,7 @@ MODULES= \
cdv_multi_wx \
cdv_port_cb \
cdv_proc_cb \
+ cdv_sched_cb \
cdv_table_wx \
cdv_term_cb \
cdv_timer_cb \
@@ -61,6 +62,7 @@ MODULES= \
etop_txt \
observer \
observer_app_wx \
+ observer_alloc_wx \
observer_html_lib \
observer_lib \
observer_perf_wx \
diff --git a/lib/observer/src/cdv_bin_cb.erl b/lib/observer/src/cdv_bin_cb.erl
index d5fbceff1e..8b427e92b7 100644
--- a/lib/observer/src/cdv_bin_cb.erl
+++ b/lib/observer/src/cdv_bin_cb.erl
@@ -17,14 +17,14 @@
%% %CopyrightEnd%
-module(cdv_bin_cb).
--export([get_details/1,
+-export([get_details/2,
detail_pages/0]).
%% Callbacks for cdv_detail_wx
-get_details({Type, {T,Key}}) ->
+get_details({Type, {T,Key}}, _) ->
[{Key,Term}] = ets:lookup(T,Key),
{ok,{"Expanded Binary", {Type, Term}, []}};
-get_details({cdv, Id}) ->
+get_details({cdv, Id}, _) ->
{ok,Bin} = crashdump_viewer:expand_binary(Id),
{ok,{"Expanded Binary", {cvd, Bin}, []}}.
diff --git a/lib/observer/src/cdv_detail_wx.erl b/lib/observer/src/cdv_detail_wx.erl
index dc93507a36..ec0d877d87 100644
--- a/lib/observer/src/cdv_detail_wx.erl
+++ b/lib/observer/src/cdv_detail_wx.erl
@@ -19,7 +19,7 @@
-behaviour(wx_object).
--export([start_link/3]).
+-export([start_link/4]).
-export([init/1, handle_event/2, handle_cast/2, terminate/2, code_change/3,
handle_call/3, handle_info/2]).
@@ -38,13 +38,13 @@
-define(ID_NOTEBOOK, 604).
%% Detail view
-start_link(Id, ParentFrame, Callback) ->
- wx_object:start_link(?MODULE, [Id, ParentFrame, Callback, self()], []).
+start_link(Id, Data, ParentFrame, Callback) ->
+ wx_object:start_link(?MODULE, [Id, Data, ParentFrame, Callback, self()], []).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-init([Id, ParentFrame, Callback, Parent]) ->
- case Callback:get_details(Id) of
+init([Id, Data, ParentFrame, Callback, Parent]) ->
+ case Callback:get_details(Id, Data) of
{ok,Details} ->
init(Id,ParentFrame,Callback,Parent,Details);
{yes_no, Info, Fun} ->
diff --git a/lib/observer/src/cdv_dist_cb.erl b/lib/observer/src/cdv_dist_cb.erl
index f7e6c9aded..f45fb1f524 100644
--- a/lib/observer/src/cdv_dist_cb.erl
+++ b/lib/observer/src/cdv_dist_cb.erl
@@ -21,7 +21,7 @@
col_spec/0,
get_info/1,
get_detail_cols/1,
- get_details/1,
+ get_details/2,
detail_pages/0,
format/1]).
@@ -55,10 +55,10 @@ get_info(_) ->
{Info,TW}.
get_detail_cols(_) ->
- {[?COL_CH,?COL_CTRL],true}.
+ {[{node, ?COL_CH},{port,?COL_CTRL}],true}.
%% Callbacks for cdv_detail_wx
-get_details(Id) ->
+get_details(Id, _) ->
case crashdump_viewer:node_info(Id) of
{ok,Info,TW} ->
Proplist = crashdump_viewer:to_proplist(record_info(fields,nod),Info),
diff --git a/lib/observer/src/cdv_ets_cb.erl b/lib/observer/src/cdv_ets_cb.erl
index 2a5c170e58..371c7f0b32 100644
--- a/lib/observer/src/cdv_ets_cb.erl
+++ b/lib/observer/src/cdv_ets_cb.erl
@@ -20,7 +20,10 @@
-export([col_to_elem/1,
col_spec/0,
get_info/1,
- get_detail_cols/1]).
+ get_details/2,
+ get_detail_cols/1,
+ detail_pages/0
+ ]).
-include_lib("wx/include/wx.hrl").
-include("crashdump_viewer.hrl").
@@ -41,7 +44,7 @@ col_to_elem(?COL_ID) -> #ets_table.id;
col_to_elem(?COL_NAME) -> #ets_table.name;
col_to_elem(?COL_SLOT) -> #ets_table.slot;
col_to_elem(?COL_OWNER) -> #ets_table.pid;
-col_to_elem(?COL_TYPE) -> #ets_table.type;
+col_to_elem(?COL_TYPE) -> #ets_table.data_type;
col_to_elem(?COL_BUCK) -> #ets_table.buckets;
col_to_elem(?COL_OBJ) -> #ets_table.size;
col_to_elem(?COL_MEM) -> #ets_table.memory.
@@ -50,18 +53,68 @@ col_spec() ->
[{"Id", ?wxLIST_FORMAT_LEFT, 200},
{"Name", ?wxLIST_FORMAT_LEFT, 200},
{"Slot", ?wxLIST_FORMAT_RIGHT, 50},
- {"Owner", ?wxLIST_FORMAT_CENTRE, 90},
- {"Buckets", ?wxLIST_FORMAT_RIGHT, 50},
- {"Objects", ?wxLIST_FORMAT_RIGHT, 50},
- {"Memory", ?wxLIST_FORMAT_RIGHT, 80},
- {"Type", ?wxLIST_FORMAT_LEFT, 50}
+ {"Owner", ?wxLIST_FORMAT_CENTRE, 120},
+ {"Objects", ?wxLIST_FORMAT_RIGHT, 80},
+ {"Memory", ?wxLIST_FORMAT_RIGHT, 80}
+% {"Type", ?wxLIST_FORMAT_LEFT, 50}
].
get_info(Owner) ->
{ok,Info,TW} = crashdump_viewer:ets_tables(Owner),
{Info,TW}.
+%% Callbacks for cdv_detail_wx
+get_details(_Id, not_found) ->
+ Info = "The table you are searching for could not be found.",
+ {info,Info};
+get_details(Id, Data) ->
+ Proplist = crashdump_viewer:to_proplist(record_info(fields,ets_table),Data),
+ {ok,{"Table:" ++ Id,Proplist,""}}.
+
get_detail_cols(all) ->
- {[?COL_OWNER],false};
-get_detail_cols(_) ->
- {[],false}.
+ {[{ets, ?COL_ID}, {process, ?COL_OWNER}],true};
+get_detail_cols(_W) ->
+ {[],true}.
+
+
+%%%%%%%%%%%%%%%%%%%%%%%%
+
+detail_pages() ->
+ [{"Table Information", fun init_gen_page/2}].
+
+init_gen_page(Parent, Info0) ->
+ Fields = info_fields(),
+ Details = proplists:get_value(details, Info0),
+ Info = if is_map(Details) -> Info0 ++ maps:to_list(Details);
+ true -> Info0
+ end,
+ cdv_info_wx:start_link(Parent,{Fields,Info,[]}).
+
+%%% Internal
+info_fields() ->
+ [{"Overview",
+ [{"Id", id},
+ {"Name", name},
+ {"Slot", slot},
+ {"Owner", owner},
+ {"Data Structure", data_type}
+ ]},
+ {"Settings",
+ [{"Type", type},
+ {"Protection", protection},
+ {"Compressed", compressed},
+ {"Fixed", fixed},
+ {"Lock write concurrency", write_c},
+ {"Lock read concurrency", read_c}
+ ]},
+ {"Memory Usage",
+ [{"Buckets", buckets},
+ {"Size", size},
+ {"Memory", memory},
+ {"Min Chain Length", chain_min},
+ {"Avg Chain Length", chain_avg},
+ {"Max Chain Length", chain_max},
+ {"Chain Length Std Dev", chain_stddev},
+ {"Chain Length Expected Std Dev", chain_exp_stddev}
+ ]}
+ ].
diff --git a/lib/observer/src/cdv_fun_cb.erl b/lib/observer/src/cdv_fun_cb.erl
index 689ef0e3bb..067377254a 100644
--- a/lib/observer/src/cdv_fun_cb.erl
+++ b/lib/observer/src/cdv_fun_cb.erl
@@ -55,4 +55,4 @@ get_info(_) ->
{Info,TW}.
get_detail_cols(_) ->
- {[?COL_MOD],false}.
+ {[{module, ?COL_MOD}],false}.
diff --git a/lib/observer/src/cdv_gen_cb.erl b/lib/observer/src/cdv_gen_cb.erl
index 6be717d76d..aa5e7c5182 100644
--- a/lib/observer/src/cdv_gen_cb.erl
+++ b/lib/observer/src/cdv_gen_cb.erl
@@ -42,4 +42,6 @@ info_fields() ->
{"Processes",num_procs},
{"ETS tables",num_ets},
{"Timers",num_timers},
- {"Funs",num_fun}]}].
+ {"Funs",num_fun},
+ {"Calling Thread", thread}
+ ]}].
diff --git a/lib/observer/src/cdv_html_wx.erl b/lib/observer/src/cdv_html_wx.erl
index b79c647f63..6d19589f5d 100644
--- a/lib/observer/src/cdv_html_wx.erl
+++ b/lib/observer/src/cdv_html_wx.erl
@@ -126,7 +126,7 @@ expand(Id,Callback,#state{expand_wins=Opened0}=State) ->
Opened =
case lists:keyfind(Id,1,Opened0) of
false ->
- EW = cdv_detail_wx:start_link(Id,State#state.panel,Callback),
+ EW = cdv_detail_wx:start_link(Id,[],State#state.panel,Callback),
wx_object:get_pid(EW) ! active,
[{Id,EW}|Opened0];
{_,EW} ->
diff --git a/lib/observer/src/cdv_mod_cb.erl b/lib/observer/src/cdv_mod_cb.erl
index e829ff4fca..8d33f9da9d 100644
--- a/lib/observer/src/cdv_mod_cb.erl
+++ b/lib/observer/src/cdv_mod_cb.erl
@@ -21,7 +21,7 @@
col_spec/0,
get_info/1,
get_detail_cols/1,
- get_details/1,
+ get_details/2,
detail_pages/0,
format/1]).
@@ -49,10 +49,10 @@ get_info(_) ->
{Info,TW}.
get_detail_cols(_) ->
- {[?COL_ID],true}.
+ {[{module, ?COL_ID}],true}.
%% Callbacks for cdv_detail_wx
-get_details(Id) ->
+get_details(Id, _) ->
{ok,Info,TW} = crashdump_viewer:loaded_mod_details(Id),
Proplist = crashdump_viewer:to_proplist(record_info(fields,loaded_mod),Info),
Title = io_lib:format("~s",[Info#loaded_mod.mod]),
diff --git a/lib/observer/src/cdv_port_cb.erl b/lib/observer/src/cdv_port_cb.erl
index 08488d3e34..409431218b 100644
--- a/lib/observer/src/cdv_port_cb.erl
+++ b/lib/observer/src/cdv_port_cb.erl
@@ -21,7 +21,7 @@
col_spec/0,
get_info/1,
get_detail_cols/1,
- get_details/1,
+ get_details/2,
detail_pages/0,
format/1]).
@@ -57,10 +57,10 @@ get_info(_) ->
{Info,TW}.
get_detail_cols(_) ->
- {[?COL_ID,?COL_CONN],true}.
+ {[{port, ?COL_ID},{process, ?COL_CONN}],true}.
%% Callbacks for cdv_detail_wx
-get_details(Id) ->
+get_details(Id, _Data) ->
case crashdump_viewer:port(Id) of
{ok,Info,TW} ->
Proplist =
@@ -70,7 +70,7 @@ get_details(Id) ->
Info = "The port you are searching for was residing on "
"a remote node. No port information is available. "
"Show information about the remote node?",
- Fun = fun() -> cdv_virtual_list_wx:start_detail_win(NodeId) end,
+ Fun = fun() -> cdv_virtual_list_wx:start_detail_win(NodeId, node) end,
{yes_no, Info, Fun};
{error,not_found} ->
Info = "The port you are searching for could not be found.",
diff --git a/lib/observer/src/cdv_proc_cb.erl b/lib/observer/src/cdv_proc_cb.erl
index dfc2df9c4c..0af6a9c235 100644
--- a/lib/observer/src/cdv_proc_cb.erl
+++ b/lib/observer/src/cdv_proc_cb.erl
@@ -21,7 +21,7 @@
col_spec/0,
get_info/1,
get_detail_cols/1,
- get_details/1,
+ get_details/2,
detail_pages/0]).
-include_lib("wx/include/wx.hrl").
@@ -57,10 +57,10 @@ get_info(_) ->
{Info,TW}.
get_detail_cols(_) ->
- {[?COL_ID],true}.
+ {[{process, ?COL_ID}],true}.
%% Callbacks for cdv_detail_wx
-get_details(Id) ->
+get_details(Id, _) ->
case crashdump_viewer:proc_details(Id) of
{ok,Info,TW} ->
%% The following table is used by observer_html_lib
@@ -76,7 +76,7 @@ get_details(Id) ->
Info = "The process you are searching for was residing on "
"a remote node. No process information is available. "
"Show information about the remote node?",
- Fun = fun() -> cdv_virtual_list_wx:start_detail_win(NodeId) end,
+ Fun = fun() -> cdv_virtual_list_wx:start_detail_win(NodeId, port) end,
{yes_no, Info, Fun};
{error,not_found} ->
Info = "The process you are searching for could not be found.",
@@ -126,10 +126,13 @@ info_fields() ->
{dynamic, current_func},
{"Registered Name", name},
{"Status", state},
+ {"Internal State", int_state},
{"Started", start_time},
{"Parent", {click,parent}},
{"Message Queue Len",msg_q_len},
+ {"Run queue", run_queue},
{"Reductions", reds},
+
{"Program counter", prog_count},
{"Continuation pointer",cp},
{"Arity",arity}]},
diff --git a/lib/observer/src/cdv_sched_cb.erl b/lib/observer/src/cdv_sched_cb.erl
new file mode 100644
index 0000000000..6ef4886c5e
--- /dev/null
+++ b/lib/observer/src/cdv_sched_cb.erl
@@ -0,0 +1,117 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+-module(cdv_sched_cb).
+
+-export([col_to_elem/1,
+ col_spec/0,
+ get_info/1,
+ get_details/2,
+ get_detail_cols/1,
+ detail_pages/0
+ ]).
+
+-include_lib("wx/include/wx.hrl").
+-include("crashdump_viewer.hrl").
+
+%% Columns
+-define(COL_ID, 0).
+-define(COL_PROC, ?COL_ID+1).
+-define(COL_PORT, ?COL_PROC+1).
+-define(COL_RQL, ?COL_PORT+1).
+-define(COL_PQL, ?COL_RQL+1).
+
+%% Callbacks for cdv_virtual_list_wx
+col_to_elem(id) -> col_to_elem(?COL_ID);
+col_to_elem(?COL_ID) -> #sched.name;
+col_to_elem(?COL_PROC) -> #sched.process;
+col_to_elem(?COL_PORT) -> #sched.port;
+col_to_elem(?COL_RQL) -> #sched.run_q;
+col_to_elem(?COL_PQL) -> #sched.port_q.
+
+col_spec() ->
+ [{"Id", ?wxLIST_FORMAT_RIGHT, 50},
+ {"Current Process", ?wxLIST_FORMAT_CENTER, 130},
+ {"Current Port", ?wxLIST_FORMAT_CENTER, 130},
+ {"Run Queue Length", ?wxLIST_FORMAT_RIGHT, 180},
+ {"Port Queue Length", ?wxLIST_FORMAT_RIGHT, 180}].
+
+get_info(_) ->
+ {ok,Info,TW} = crashdump_viewer:schedulers(),
+ {Info,TW}.
+
+get_details(_Id, not_found) ->
+ Info = "The scheduler you are searching for could not be found.",
+ {info,Info};
+get_details(Id, Data) ->
+ Proplist = crashdump_viewer:to_proplist(record_info(fields,sched),Data),
+ {ok,{"Scheduler: " ++ Id,Proplist,""}}.
+
+get_detail_cols(all) ->
+ {[{sched, ?COL_ID}, {process, ?COL_PROC}, {process, ?COL_PORT}],true};
+get_detail_cols(_) ->
+ {[],false}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%
+
+detail_pages() ->
+ [{"Scheduler Information", fun init_gen_page/2}].
+
+init_gen_page(Parent, Info0) ->
+ Fields = info_fields(),
+ Details = proplists:get_value(details, Info0),
+ Info = if is_map(Details) -> Info0 ++ maps:to_list(Details);
+ true -> Info0
+ end,
+ cdv_info_wx:start_link(Parent,{Fields,Info,[]}).
+
+%%% Internal
+info_fields() ->
+ [{"Scheduler Overview",
+ [{"Id", id},
+ {"Current Process",process},
+ {"Current Port", port},
+ {"Sleep Info Flags", sleep_info},
+ {"Sleep Aux Work", sleep_aux}
+ ]},
+ {"Run Queues",
+ [{"Flags", runq_flags},
+ {"Priority Max Length", runq_max},
+ {"Priority High Length", runq_high},
+ {"Priority Normal Length", runq_norm},
+ {"Priority Low Length", runq_low},
+ {"Port Length", port_q}
+ ]},
+ {"Current Process",
+ [{"State", currp_state},
+ {"Internal State", currp_int_state},
+ {"Program Counter", currp_prg_cnt},
+ {"CP", currp_cp},
+ {"Stack", {currp_stack, 0}},
+ {" ", {currp_stack, 1}},
+ {" ", {currp_stack, 2}},
+ {" ", {currp_stack, 3}},
+ {" ", {currp_stack, 4}},
+ {" ", {currp_stack, 5}},
+ {" ", {currp_stack, 6}},
+ {" ", {currp_stack, 7}},
+ {" ", {currp_stack, 8}},
+ {" ", {currp_stack, 9}},
+ {" ", {currp_stack, 10}},
+ {" ", {currp_stack, 11}}
+ ]}
+ ].
diff --git a/lib/observer/src/cdv_term_cb.erl b/lib/observer/src/cdv_term_cb.erl
index 4451045012..6db6d54514 100644
--- a/lib/observer/src/cdv_term_cb.erl
+++ b/lib/observer/src/cdv_term_cb.erl
@@ -17,11 +17,11 @@
%% %CopyrightEnd%
-module(cdv_term_cb).
--export([get_details/1,
+-export([get_details/2,
detail_pages/0]).
%% Callbacks for cdv_detail_wx
-get_details({Type, {T,Key}}) ->
+get_details({Type, {T,Key}}, _) ->
[{Key,Term}] = ets:lookup(T,Key),
{ok,{"Expanded Term", {Type,[Term, T]}, []}}.
diff --git a/lib/observer/src/cdv_timer_cb.erl b/lib/observer/src/cdv_timer_cb.erl
index d44592cf18..b4564941ea 100644
--- a/lib/observer/src/cdv_timer_cb.erl
+++ b/lib/observer/src/cdv_timer_cb.erl
@@ -49,6 +49,6 @@ get_info(Owner) ->
{Info,TW}.
get_detail_cols(all) ->
- {[?COL_OWNER],false};
+ {[{process, ?COL_OWNER}],false};
get_detail_cols(_) ->
{[],false}.
diff --git a/lib/observer/src/cdv_virtual_list_wx.erl b/lib/observer/src/cdv_virtual_list_wx.erl
index bfe115a42e..c0bc7018cb 100644
--- a/lib/observer/src/cdv_virtual_list_wx.erl
+++ b/lib/observer/src/cdv_virtual_list_wx.erl
@@ -19,7 +19,8 @@
-behaviour(wx_object).
--export([start_link/2, start_link/3, start_detail_win/1]).
+-export([start_link/2, start_link/3,
+ start_detail_win/1, start_detail_win/2]).
%% wx_object callbacks
-export([init/1, handle_info/2, terminate/2, code_change/3, handle_call/3,
@@ -65,22 +66,31 @@ start_link(ParentWin, Callback, Owner) ->
wx_object:start_link(?MODULE, [ParentWin, Callback, Owner], []).
start_detail_win(Id) ->
- Callback =
- case Id of
- "<"++_ ->
- cdv_proc_cb;
- "#Port"++_ ->
- cdv_port_cb;
- _ ->
- case catch list_to_integer(Id) of
- NodeId when is_integer(NodeId) ->
- cdv_dist_cb;
- _ ->
- cdv_mod_cb
- end
- end,
- start_detail_win(Callback,Id).
-start_detail_win(Callback,Id) ->
+ case Id of
+ "<"++_ ->
+ start_detail_win(Id, process);
+ "#Port"++_ ->
+ start_detail_win(Id, port);
+ _ ->
+ io:format("cdv: unknown identifier: ~p~n",[Id]),
+ ignore
+ end.
+
+start_detail_win(Id, process) ->
+ start_detail_win_2(cdv_proc_cb, Id);
+start_detail_win(Id, port) ->
+ start_detail_win_2(cdv_port_cb, Id);
+start_detail_win(Id, node) ->
+ start_detail_win_2(cdv_dist_cb, Id);
+start_detail_win(Id, module) ->
+ start_detail_win_2(cdv_mod_cb, Id);
+start_detail_win(Id, ets) ->
+ start_detail_win_2(cdv_ets_cb, Id);
+start_detail_win(Id, sched) ->
+ start_detail_win_2(cdv_sched_cb, Id).
+
+
+start_detail_win_2(Callback,Id) ->
wx_object:cast(Callback,{start_detail_win,Id}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -158,15 +168,14 @@ create_list_box(Panel, Holder, Callback, Owner) ->
do_start_detail_win(undefined, State) ->
State;
do_start_detail_win(Id, #state{panel=Panel,detail_wins=Opened,
- callback=Callback}=State) ->
+ holder=Holder,callback=Callback}=State) ->
NewOpened =
case lists:keyfind(Id, 1, Opened) of
false ->
- case cdv_detail_wx:start_link(Id, Panel, Callback) of
- {error, _} ->
- Opened;
- IW ->
- [{Id, IW} | Opened]
+ Data = call(Holder, {get_data, self(), Id}),
+ case cdv_detail_wx:start_link(Id, Data, Panel, Callback) of
+ {error, _} -> Opened;
+ IW -> [{Id, IW} | Opened]
end;
{_, IW} ->
wxFrame:raise(IW),
@@ -247,8 +256,8 @@ handle_event(#wx{id=MenuId,
event=#wxCommand{type = command_menu_selected}},
#state{menu_items=MenuItems} = State) ->
case lists:keyfind(MenuId,1,MenuItems) of
- {MenuId,Id} ->
- start_detail_win(Id);
+ {MenuId,Type,Id} ->
+ start_detail_win(Id, Type);
false ->
ok
end,
@@ -265,7 +274,7 @@ handle_event(#wx{event=#wxList{type=command_list_item_right_click,
Menu = wxMenu:new(),
MenuItems =
lists:flatmap(
- fun(Col) ->
+ fun({Type, Col}) ->
MenuId = ?ID_DETAILS + Col,
ColText = call(Holder, {get_row, self(), Row, Col}),
case ColText of
@@ -273,14 +282,15 @@ handle_event(#wx{event=#wxList{type=command_list_item_right_click,
_ ->
What =
case catch list_to_integer(ColText) of
- NodeId when is_integer(NodeId) ->
+ NodeId when is_integer(NodeId),
+ Type =:= node ->
"node " ++ ColText;
_ ->
ColText
end,
Text = "Properties for " ++ What,
wxMenu:append(Menu, MenuId, Text),
- [{MenuId,ColText}]
+ [{MenuId,Type,ColText}]
end
end,
MenuCols),
@@ -300,9 +310,14 @@ handle_event(#wx{event=#wxList{type=command_list_col_click, col=Col}},
handle_event(#wx{event=#wxList{type=command_list_item_activated,
itemIndex=Row}},
- #state{holder=Holder} = State) ->
- Id = call(Holder, {get_row, self(), Row, id}),
- start_detail_win(Id),
+ #state{holder=Holder, menu_cols=MenuCols} = State) ->
+ case MenuCols of
+ [{Type, _}|_] ->
+ Id = call(Holder, {get_row, self(), Row, id}),
+ start_detail_win(Id, Type);
+ _ ->
+ ignore
+ end,
{noreply, State};
handle_event(Event, State) ->
@@ -346,7 +361,7 @@ init_table_holder(Parent, Attrs, Callback, InfoList0) ->
attrs=Attrs,
callback=Callback}).
-table_holder(#holder{callback=Callback, attrs=Attrs}=S0) ->
+table_holder(#holder{callback=Callback, attrs=Attrs, info=Info}=S0) ->
receive
_M={get_row, From, Row, Col} ->
%% erlang:display(_M),
@@ -360,6 +375,9 @@ table_holder(#holder{callback=Callback, attrs=Attrs}=S0) ->
%% erlang:display(_M),
State = change_sort(Callback:col_to_elem(Col), S0),
table_holder(State);
+ _M={get_data, From, Id} ->
+ search_id(From, Id, Callback, Info),
+ table_holder(S0);
stop ->
ok;
What ->
@@ -367,6 +385,21 @@ table_holder(#holder{callback=Callback, attrs=Attrs}=S0) ->
table_holder(S0)
end.
+search_id(From, Id, Callback, Info) ->
+ Find = fun(_, RowInfo, _) ->
+ search_id(Callback, RowInfo, Id)
+ end,
+ Res = try array:foldl(Find, not_found, Info)
+ catch Data -> Data end,
+ From ! {self(), Res},
+ ok.
+
+search_id(Callback, RowInfo, Id) ->
+ case observer_lib:to_str(get_cell_data(Callback, id, RowInfo)) of
+ Id -> throw(RowInfo);
+ _Str -> not_found
+ end.
+
change_sort(Col, S0=#holder{parent=Parent, info=Info0, sort=Sort0}) ->
NRows = array:size(Info0),
InfoList0 = array:to_list(Info0),
diff --git a/lib/observer/src/cdv_wx.erl b/lib/observer/src/cdv_wx.erl
index 26df60b0a6..ec0c652a27 100644
--- a/lib/observer/src/cdv_wx.erl
+++ b/lib/observer/src/cdv_wx.erl
@@ -44,6 +44,7 @@
-define(PORT_STR, "Ports").
-define(ETS_STR, "ETS Tables").
-define(TIMER_STR, "Timers").
+-define(SCHEDULER_STR, "Schedulers").
-define(FUN_STR, "Funs").
-define(ATOM_STR, "Atoms").
-define(DIST_STR, "Nodes").
@@ -66,6 +67,7 @@
port_panel,
ets_panel,
timer_panel,
+ sched_panel,
fun_panel,
atom_panel,
dist_panel,
@@ -171,6 +173,9 @@ setup(#state{frame=Frame, notebook=Notebook}=State) ->
%% Timer Panel
TimerPanel = add_page(Notebook, ?TIMER_STR, cdv_virtual_list_wx,cdv_timer_cb),
+ %% Scheduler Panel
+ SchedPanel = add_page(Notebook, ?SCHEDULER_STR, cdv_virtual_list_wx, cdv_sched_cb),
+
%% Fun Panel
FunPanel = add_page(Notebook, ?FUN_STR, cdv_virtual_list_wx, cdv_fun_cb),
@@ -202,6 +207,7 @@ setup(#state{frame=Frame, notebook=Notebook}=State) ->
port_panel = PortPanel,
ets_panel = EtsPanel,
timer_panel = TimerPanel,
+ sched_panel = SchedPanel,
fun_panel = FunPanel,
atom_panel = AtomPanel,
dist_panel = DistPanel,
@@ -335,7 +341,8 @@ check_page_title(Notebook) ->
get_active_pid(#state{notebook=Notebook, gen_panel=Gen, pro_panel=Pro,
port_panel=Ports, ets_panel=Ets, timer_panel=Timers,
fun_panel=Funs, atom_panel=Atoms, dist_panel=Dist,
- mod_panel=Mods, mem_panel=Mem, int_panel=Int
+ mod_panel=Mods, mem_panel=Mem, int_panel=Int,
+ sched_panel=Sched
}) ->
Panel = case check_page_title(Notebook) of
?GEN_STR -> Gen;
@@ -343,6 +350,7 @@ get_active_pid(#state{notebook=Notebook, gen_panel=Gen, pro_panel=Pro,
?PORT_STR -> Ports;
?ETS_STR -> Ets;
?TIMER_STR -> Timers;
+ ?SCHEDULER_STR -> Sched;
?FUN_STR -> Funs;
?ATOM_STR -> Atoms;
?DIST_STR -> Dist;
diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl
index 99329b94e2..007fc74279 100644
--- a/lib/observer/src/crashdump_viewer.erl
+++ b/lib/observer/src/crashdump_viewer.erl
@@ -63,6 +63,7 @@
allocator_info/0,
hash_tables/0,
index_tables/0,
+ schedulers/0,
expand_binary/1]).
%% Library function
@@ -114,6 +115,7 @@
-define(proc_heap,proc_heap).
-define(proc_messages,proc_messages).
-define(proc_stack,proc_stack).
+-define(scheduler,scheduler).
-define(timer,timer).
-define(visible_node,visible_node).
@@ -267,6 +269,8 @@ hash_tables() ->
call(hash_tables).
index_tables() ->
call(index_tables).
+schedulers() ->
+ call(schedulers).
%%%-----------------------------------------------------------------
%%% Called when a link to a process (Pid) is clicked.
@@ -320,6 +324,8 @@ handle_call(general_info,_From,State=#state{file=File}) ->
"Some information might be missing."];
false -> []
end,
+ ets:insert(cdv_reg_proc_table,
+ {cdv_dump_node_name,GenInfo#general_info.node_name}),
{reply,{ok,GenInfo,TW},State#state{wordsize=WS, num_atoms=NumAtoms}};
handle_call({expand_binary,{Offset,Size,Pos}},_From,State=#state{file=File}) ->
Fd = open(File),
@@ -429,7 +435,11 @@ handle_call(hash_tables,_From,State=#state{file=File}) ->
handle_call(index_tables,_From,State=#state{file=File}) ->
IndexTables=index_tables(File),
TW = truncated_warning([?hash_table,?index_table]),
- {reply,{ok,IndexTables,TW},State}.
+ {reply,{ok,IndexTables,TW},State};
+handle_call(schedulers,_From,State=#state{file=File}) ->
+ Schedulers=schedulers(File),
+ TW = truncated_warning([?scheduler]),
+ {reply,{ok,Schedulers,TW},State}.
@@ -675,9 +685,11 @@ skip(Fd,<<>>) ->
val(Fd) ->
+ val(Fd, "-1").
+val(Fd, NoExist) ->
case get_rest_of_line(Fd) of
- {eof,[]} -> "-1";
- [] -> "-1";
+ {eof,[]} -> NoExist;
+ [] -> NoExist;
{eof,Val} -> Val;
Val -> Val
end.
@@ -926,7 +938,7 @@ general_info(File) ->
N;
[] ->
case lookup_index(?no_distribution) of
- [_] -> "nonode@nohost";
+ [_] -> "'nonode@nohost'";
[] -> "unknown"
end
end,
@@ -965,6 +977,8 @@ get_general_info(Fd,GenInfo) ->
get_general_info(Fd,GenInfo#general_info{taints=Val});
"Atoms" ->
get_general_info(Fd,GenInfo#general_info{num_atoms=val(Fd)});
+ "Calling Thread" ->
+ get_general_info(Fd,GenInfo#general_info{thread=val(Fd)});
"=" ++ _next_tag ->
GenInfo;
Other ->
@@ -1131,6 +1145,10 @@ all_procinfo(Fd,Fun,Proc,WS,LineHead) ->
"arity = " ++ Arity ->
%%! Temporary workaround
get_procinfo(Fd,Fun,Proc#proc{arity=Arity--"\r\n"},WS);
+ "Run queue" ->
+ get_procinfo(Fd,Fun,Proc#proc{run_queue=val(Fd)},WS);
+ "Internal State" ->
+ get_procinfo(Fd,Fun,Proc#proc{int_state=val(Fd)},WS);
"=" ++ _next_tag ->
Proc;
Other ->
@@ -1165,6 +1183,19 @@ parse_pid(Str) ->
{Pid,Rest} = parse_link(Str,[]),
{{Pid,Pid},Rest}.
+parse_monitor("{"++Str) ->
+ %% Named process
+ {Name,Node,Rest1} = parse_name_node(Str,[]),
+ Pid = get_pid_from_name(Name,Node),
+ case parse_link(string:strip(Rest1,left,$,),[]) of
+ {Ref,"}"++Rest2} ->
+ %% Bug in break.c - prints an extra "}" for remote
+ %% nodes... thus the strip
+ {{Pid,"{"++Name++","++Node++"} ("++Ref++")"},
+ string:strip(Rest2,left,$})};
+ {Ref,[]} ->
+ {{Pid,"{"++Name++","++Node++"} ("++Ref++")"},[]}
+ end;
parse_monitor(Str) ->
case parse_link(Str,[]) of
{Pid,","++Rest1} ->
@@ -1186,23 +1217,58 @@ parse_link([],Acc) ->
%% truncated
{lists:reverse(Acc),[]}.
+parse_name_node(","++Rest,Name) ->
+ parse_name_node(Rest,Name,[]);
+parse_name_node([H|T],Name) ->
+ parse_name_node(T,[H|Name]);
+parse_name_node([],Name) ->
+ %% truncated
+ {lists:reverse(Name),[],[]}.
+
+parse_name_node("}"++Rest,Name,Node) ->
+ {lists:reverse(Name),lists:reverse(Node),Rest};
+parse_name_node([H|T],Name,Node) ->
+ parse_name_node(T,Name,[H|Node]);
+parse_name_node([],Name,Node) ->
+ %% truncated
+ {lists:reverse(Name),lists:reverse(Node),[]}.
+
+get_pid_from_name(Name,Node) ->
+ case ets:lookup(cdv_reg_proc_table,cdv_dump_node_name) of
+ [{_,Node}] ->
+ case ets:lookup(cdv_reg_proc_table,Name) of
+ [{_,Pid}] when is_pid(Pid) ->
+ pid_to_list(Pid);
+ _ ->
+ "<unkonwn_pid>"
+ end;
+ _ ->
+ "<unknown_pid_other_node>"
+ end.
+
maybe_other_node(Id) ->
Channel =
case split($.,Id) of
{"<" ++ N, _Rest} ->
N;
{"#Port<" ++ N, _Rest} ->
- N
+ N;
+ {_, []} ->
+ not_found
end,
+ maybe_other_node2(Channel).
+
+maybe_other_node2(not_found) -> not_found;
+maybe_other_node2(Channel) ->
Ms = ets:fun2ms(
- fun({{Tag,Start},Ch}) when Tag=:=?visible_node, Ch=:=Channel ->
+ fun({{Tag,Start},Ch}) when Tag=:=?visible_node, Ch=:=Channel ->
{"Visible Node",Start};
({{Tag,Start},Ch}) when Tag=:=?hidden_node, Ch=:=Channel ->
{"Hidden Node",Start};
- ({{Tag,Start},Ch}) when Tag=:=?not_connected, Ch=:=Channel ->
+ ({{Tag,Start},Ch}) when Tag=:=?not_connected, Ch=:=Channel ->
{"Not Connected Node",Start}
end),
-
+
case ets:select(cdv_dump_index_table,Ms) of
[] ->
not_found;
@@ -1457,7 +1523,7 @@ get_ets_tables(File,Pid,WS) ->
end,
lookup_and_parse_index(File,{?ets,Pid},ParseFun,"ets").
-get_etsinfo(Fd,EtsTable,WS) ->
+get_etsinfo(Fd,EtsTable = #ets_table{details=Ds},WS) ->
case line_head(Fd) of
"Slot" ->
get_etsinfo(Fd,EtsTable#ets_table{slot=list_to_integer(val(Fd))},WS);
@@ -1467,7 +1533,7 @@ get_etsinfo(Fd,EtsTable,WS) ->
get_etsinfo(Fd,EtsTable#ets_table{name=val(Fd)},WS);
"Ordered set (AVL tree), Elements" ->
skip_rest_of_line(Fd),
- get_etsinfo(Fd,EtsTable#ets_table{type="tree",buckets="-"},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{data_type="tree"},WS);
"Buckets" ->
%% A bug in erl_db_hash.c prints a space after the buckets
%% - need to strip the string to make list_to_integer/1 happy.
@@ -1482,9 +1548,42 @@ get_etsinfo(Fd,EtsTable,WS) ->
-1 -> -1; % probably truncated
_ -> Words * WS
end,
- get_etsinfo(Fd,EtsTable#ets_table{memory=Bytes},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{memory={bytes,Bytes}},WS);
"=" ++ _next_tag ->
EtsTable;
+ "Chain Length Min" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_min=>Val}},WS);
+ "Chain Length Avg" ->
+ Val = try list_to_float(string:strip(val(Fd))) catch _:_ -> "-" end,
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_avg=>Val}},WS);
+ "Chain Length Max" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_max=>Val}},WS);
+ "Chain Length Std Dev" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_stddev=>Val}},WS);
+ "Chain Length Expected Std Dev" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_exp_stddev=>Val}},WS);
+ "Fixed" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{fixed=>Val}},WS);
+ "Type" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{data_type=>Val}},WS);
+ "Protection" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{protection=>Val}},WS);
+ "Compressed" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{compressed=>Val}},WS);
+ "Write Concurrency" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{write_c=>Val}},WS);
+ "Read Concurrency" ->
+ Val = val(Fd),
+ get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{read_c=>Val}},WS);
Other ->
unexpected(Fd,Other,"ETS info"),
EtsTable
@@ -2224,6 +2323,89 @@ get_indextableinfo1(Fd,IndexTable) ->
IndexTable
end.
+
+%%-----------------------------------------------------------------
+%% Page with scheduler table information
+schedulers(File) ->
+ case lookup_index(?scheduler) of
+ [] ->
+ [];
+ Schedulers ->
+ Fd = open(File),
+ R = lists:map(fun({Name,Start}) ->
+ get_schedulerinfo(Fd,Name,Start)
+ end,
+ Schedulers),
+ close(Fd),
+ R
+ end.
+
+get_schedulerinfo(Fd,Name,Start) ->
+ pos_bof(Fd,Start),
+ get_schedulerinfo1(Fd,#sched{name=Name}).
+
+get_schedulerinfo1(Fd,Sched=#sched{details=Ds}) ->
+ case line_head(Fd) of
+ "Current Process" ->
+ get_schedulerinfo1(Fd,Sched#sched{process=val(Fd, "None")});
+ "Current Port" ->
+ get_schedulerinfo1(Fd,Sched#sched{port=val(Fd, "None")});
+ "Run Queue Max Length" ->
+ RQMax = list_to_integer(val(Fd)),
+ RQ = RQMax + Sched#sched.run_q,
+ get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_max=>RQMax}});
+ "Run Queue High Length" ->
+ RQHigh = list_to_integer(val(Fd)),
+ RQ = RQHigh + Sched#sched.run_q,
+ get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_high=>RQHigh}});
+ "Run Queue Normal Length" ->
+ RQNorm = list_to_integer(val(Fd)),
+ RQ = RQNorm + Sched#sched.run_q,
+ get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_norm=>RQNorm}});
+ "Run Queue Low Length" ->
+ RQLow = list_to_integer(val(Fd)),
+ RQ = RQLow + Sched#sched.run_q,
+ get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_low=>RQLow}});
+ "Run Queue Port Length" ->
+ RQ = list_to_integer(val(Fd)),
+ get_schedulerinfo1(Fd,Sched#sched{port_q=RQ});
+
+ "Scheduler Sleep Info Flags" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_info=>val(Fd, "None")}});
+ "Scheduler Sleep Info Aux Work" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_aux=>val(Fd, "None")}});
+
+ "Run Queue Flags" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{runq_flags=>val(Fd, "None")}});
+
+ "Current Process State" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_state=>val(Fd)}});
+ "Current Process Internal State" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_int_state=>val(Fd)}});
+ "Current Process Program counter" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_prg_cnt=>val(Fd)}});
+ "Current Process CP" ->
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_cp=>val(Fd)}});
+ "Current Process Limited Stack Trace" ->
+ %% If there shall be last in scheduler information block
+ Sched#sched{details=get_limited_stack(Fd, 0, Ds)};
+ "=" ++ _next_tag ->
+ Sched;
+ Other ->
+ unexpected(Fd,Other,"scheduler information"),
+ Sched
+ end.
+
+get_limited_stack(Fd, N, Ds) ->
+ case val(Fd) of
+ Addr = "0x" ++ _ ->
+ get_limited_stack(Fd, N+1, Ds#{{currp_stack, N} => Addr});
+ "=" ++ _next_tag ->
+ Ds;
+ Line ->
+ get_limited_stack(Fd, N+1, Ds#{{currp_stack, N} => Line})
+ end.
+
%%%-----------------------------------------------------------------
%%% Parse memory in crashdump version 0.1 and newer
%%%
@@ -2526,6 +2708,7 @@ tag_to_atom("proc_dictionary") -> ?proc_dictionary;
tag_to_atom("proc_heap") -> ?proc_heap;
tag_to_atom("proc_messages") -> ?proc_messages;
tag_to_atom("proc_stack") -> ?proc_stack;
+tag_to_atom("scheduler") -> ?scheduler;
tag_to_atom("timer") -> ?timer;
tag_to_atom("visible_node") -> ?visible_node;
tag_to_atom(UnknownTag) ->
diff --git a/lib/observer/src/crashdump_viewer.hrl b/lib/observer/src/crashdump_viewer.hrl
index 0e2eba6dee..9515e74114 100644
--- a/lib/observer/src/crashdump_viewer.hrl
+++ b/lib/observer/src/crashdump_viewer.hrl
@@ -36,7 +36,9 @@
num_fun,
mem_tot,
mem_max,
- instr_info}).
+ instr_info,
+ thread
+ }).
-record(proc,
%% Initial data according to the follwoing:
@@ -85,7 +87,10 @@
old_heap_top,
old_heap_end,
memory,
- stack_dump}).
+ stack_dump,
+ run_queue=?unknown,
+ int_state
+ }).
-record(port,
{id,
@@ -96,15 +101,28 @@
monitors,
controls}).
+-record(sched,
+ {name,
+ process,
+ port,
+ run_q=0,
+ port_q=0,
+ details=#{}
+ }).
+
+
+
-record(ets_table,
{pid,
slot,
id,
name,
- type="hash",
- buckets,
+ data_type="hash",
+ buckets="-",
size,
- memory}).
+ memory,
+ details= #{}
+ }).
-record(timer,
{pid,
diff --git a/lib/observer/src/observer.app.src b/lib/observer/src/observer.app.src
index 97a54cd6f9..c12353f9e1 100644
--- a/lib/observer/src/observer.app.src
+++ b/lib/observer/src/observer.app.src
@@ -37,6 +37,7 @@
cdv_proc_cb,
cdv_table_wx,
cdv_term_cb,
+ cdv_sched_cb,
cdv_timer_cb,
cdv_virtual_list_wx,
cdv_wx,
@@ -44,6 +45,7 @@
etop_tr,
etop_txt,
observer,
+ observer_alloc_wx,
observer_app_wx,
observer_html_lib,
observer_lib,
@@ -63,6 +65,6 @@
{env, []},
{runtime_dependencies, ["wx-1.2","stdlib-2.0","runtime_tools-1.8.14",
"kernel-3.0","inets-5.10","et-1.5",
- "erts-6.0"]}]}.
+ "erts-7.0"]}]}.
diff --git a/lib/observer/src/observer_alloc_wx.erl b/lib/observer/src/observer_alloc_wx.erl
new file mode 100644
index 0000000000..0c4bc9ee4b
--- /dev/null
+++ b/lib/observer/src/observer_alloc_wx.erl
@@ -0,0 +1,256 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+-module(observer_alloc_wx).
+
+-export([start_link/2]).
+
+%% wx_object callbacks
+-export([init/1, handle_info/2, terminate/2, code_change/3, handle_call/3,
+ handle_event/2, handle_sync_event/3, handle_cast/2]).
+
+-behaviour(wx_object).
+-include_lib("wx/include/wx.hrl").
+-include("observer_defs.hrl").
+
+-record(state,
+ {
+ offset = 0.0,
+ active = false,
+ parent,
+ windows,
+ data = {0, queue:new()},
+ panel,
+ paint,
+ appmon,
+ async
+ }).
+
+-define(ALLOC_W, 1).
+-define(UTIL_W, 2).
+
+start_link(Notebook, Parent) ->
+ wx_object:start_link(?MODULE, [Notebook, Parent], []).
+
+init([Notebook, Parent]) ->
+ try
+ Panel = wxPanel:new(Notebook),
+ Main = wxBoxSizer:new(?wxVERTICAL),
+ Style = ?wxFULL_REPAINT_ON_RESIZE bor ?wxCLIP_CHILDREN,
+ Carrier = wxPanel:new(Panel, [{winid, ?ALLOC_W}, {style,Style}]),
+ Utilz = wxPanel:new(Panel, [{winid, ?UTIL_W}, {style,Style}]),
+ BorderFlags = ?wxLEFT bor ?wxRIGHT,
+ wxSizer:add(Main, Carrier, [{flag, ?wxEXPAND bor BorderFlags bor ?wxTOP},
+ {proportion, 1}, {border, 5}]),
+
+ wxSizer:add(Main, Utilz, [{flag, ?wxEXPAND bor BorderFlags},
+ {proportion, 1}, {border, 5}]),
+
+ MemWin = {MemPanel,_} = create_mem_info(Panel),
+ wxSizer:add(Main, MemPanel, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM},
+ {proportion, 1}, {border, 5}]),
+ wxWindow:setSizer(Panel, Main),
+
+ PaintInfo = observer_perf_wx:setup_graph_drawing([Carrier, Utilz]),
+ {Panel, #state{parent=Parent,
+ panel =Panel,
+ windows = {Carrier, Utilz, MemWin},
+ paint=PaintInfo}
+ }
+ catch _:Err ->
+ io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]),
+ {stop, Err}
+ end.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+handle_event(#wx{event=#wxCommand{type=command_menu_selected}},
+ State = #state{}) ->
+ {noreply, State};
+
+handle_event(Event, _State) ->
+ error({unhandled_event, Event}).
+
+%%%%%%%%%%
+handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_,
+ #state{active=Active, offset=Offset, paint=Paint,
+ windows=Windows, data=Data}) ->
+ %% Sigh workaround bug on MacOSX (Id in paint event is always 0)
+ Id = if Panel =:= element(?ALLOC_W, Windows) -> alloc;
+ Panel =:= element(?UTIL_W, Windows) -> utilz
+ end,
+ observer_perf_wx:refresh_panel(Panel, Id, Offset, Data, Active, Paint),
+ ok.
+%%%%%%%%%%
+handle_call(Event, From, _State) ->
+ error({unhandled_call, Event, From}).
+
+handle_cast(Event, _State) ->
+ error({unhandled_cast, Event}).
+%%%%%%%%%%
+
+handle_info({Key, {promise_reply, {badrpc, _}}}, #state{async=Key} = State) ->
+ {noreply, State#state{active=false, appmon=undefined}};
+
+handle_info({Key, {promise_reply, SysInfo}}, #state{async=Key, data=Data} = State) ->
+ Info = alloc_info(SysInfo),
+ update_alloc(State, Info),
+ {noreply, State#state{offset=0.0, data = add_data(Info, Data), async=undefined}};
+
+handle_info({refresh, Seq, Freq, Node}, #state{panel=Panel, appmon=Node, async=Key} = State) ->
+ wxWindow:refresh(Panel),
+ Next = Seq+1,
+ if
+ Next > Freq, Key =:= undefined ->
+ erlang:send_after(trunc(1000 / Freq), self(), {refresh, 1, Freq, Node}),
+ Req = rpc:async_call(Node, observer_backend, sys_info, []),
+ {noreply, State#state{offset=Seq/Freq, async=Req}};
+ true ->
+ erlang:send_after(trunc(1000 / Freq), self(), {refresh, Next, Freq, Node}),
+ {noreply, State#state{offset=Seq/Freq}}
+ end;
+handle_info({refresh, _Seq, _Freq, _Node}, State) ->
+ {noreply, State};
+
+handle_info({active, Node}, State = #state{parent=Parent, panel=Panel, appmon=Old}) ->
+ create_menus(Parent, []),
+ try
+ Node = Old,
+ wxWindow:refresh(Panel),
+ {noreply, State#state{active=true}}
+ catch _:_ ->
+ SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []),
+ Info = alloc_info(SysInfo),
+ Freq = 6,
+ erlang:send_after(trunc(1000 / Freq), self(), {refresh, 1, Freq, Node}),
+ wxWindow:refresh(Panel),
+ {noreply, State#state{active=true, appmon=Node, offset=0.0,
+ data = add_data(Info, {0, queue:new()})}}
+ end;
+
+handle_info(not_active, State = #state{appmon=_Pid}) ->
+ {noreply, State#state{active=false}};
+
+handle_info({'EXIT', Old, _}, State = #state{appmon=Old}) ->
+ {noreply, State#state{active=false, appmon=undefined}};
+
+handle_info(_Event, State) ->
+ %% io:format("~p:~p: ~p~n",[?MODULE,?LINE,_Event]),
+ {noreply, State}.
+
+terminate(_Event, #state{}) ->
+ ok.
+code_change(_, _, State) ->
+ State.
+
+%%%%%%%%%%
+
+add_data(Stats, {N, Q}) when N > 60 ->
+ {N, queue:drop(queue:in(Stats, Q))};
+add_data(Stats, {N, Q}) ->
+ {N+1, queue:in(Stats, Q)}.
+
+update_alloc(#state{windows={_, _, {_, Grid}}}, Fields) ->
+ Max = wxListCtrl:getItemCount(Grid),
+ Update = fun({Name, BS, CS}, Row) ->
+ (Row >= Max) andalso wxListCtrl:insertItem(Grid, Row, ""),
+ wxListCtrl:setItem(Grid, Row, 0, observer_lib:to_str(Name)),
+ wxListCtrl:setItem(Grid, Row, 1, observer_lib:to_str(BS div 1024)),
+ wxListCtrl:setItem(Grid, Row, 2, observer_lib:to_str(CS div 1024)),
+ Row + 1
+ end,
+ lists:foldl(Update, 0, Fields),
+ Fields.
+
+alloc_info(SysInfo) ->
+ AllocInfo = proplists:get_value(alloc_info, SysInfo, []),
+ alloc_info(AllocInfo, [], 0, 0, true).
+
+alloc_info([{Type,Instances}|Allocators],TypeAcc,TotalBS,TotalCS,IncludeTotal) ->
+ {BS,CS,NewTotalBS,NewTotalCS,NewIncludeTotal} =
+ sum_alloc_instances(Instances,0,0,TotalBS,TotalCS),
+ alloc_info(Allocators,[{Type,BS,CS}|TypeAcc],NewTotalBS,NewTotalCS,
+ IncludeTotal andalso NewIncludeTotal);
+alloc_info([],TypeAcc,TotalBS,TotalCS,IncludeTotal) ->
+ Types = [X || X={_,BS,CS} <- TypeAcc, (BS>0 orelse CS>0)],
+ case IncludeTotal of
+ true ->
+ [{total,TotalBS,TotalCS} | lists:reverse(Types)];
+ false ->
+ lists:reverse(Types)
+ end.
+
+sum_alloc_instances(false,BS,CS,TotalBS,TotalCS) ->
+ {BS,CS,TotalBS,TotalCS,false};
+sum_alloc_instances([{_,_,Data}|Instances],BS,CS,TotalBS,TotalCS) ->
+ {NewBS,NewCS,NewTotalBS,NewTotalCS} =
+ sum_alloc_one_instance(Data,BS,CS,TotalBS,TotalCS),
+ sum_alloc_instances(Instances,NewBS,NewCS,NewTotalBS,NewTotalCS);
+sum_alloc_instances([],BS,CS,TotalBS,TotalCS) ->
+ {BS,CS,TotalBS,TotalCS,true}.
+
+sum_alloc_one_instance([{sbmbcs,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}|
+ Rest],OldBS,OldCS,TotalBS,TotalCS) ->
+ sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS,TotalCS);
+sum_alloc_one_instance([{_,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}|
+ Rest],OldBS,OldCS,TotalBS,TotalCS) ->
+ sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS);
+sum_alloc_one_instance([{_,[{blocks_size,BS},{carriers_size,CS}]}|
+ Rest],OldBS,OldCS,TotalBS,TotalCS) ->
+ sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS);
+sum_alloc_one_instance([_|Rest],BS,CS,TotalBS,TotalCS) ->
+ sum_alloc_one_instance(Rest,BS,CS,TotalBS,TotalCS);
+sum_alloc_one_instance([],BS,CS,TotalBS,TotalCS) ->
+ {BS,CS,TotalBS,TotalCS}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+create_mem_info(Parent) ->
+ Panel = wxPanel:new(Parent),
+ wxWindow:setBackgroundColour(Panel, {255,255,255}),
+ Style = ?wxLC_REPORT bor ?wxLC_SINGLE_SEL bor ?wxLC_HRULES bor ?wxLC_VRULES,
+ Grid = wxListCtrl:new(Panel, [{style, Style}]),
+ Li = wxListItem:new(),
+ AddListEntry = fun({Name, Align, DefSize}, Col) ->
+ wxListItem:setText(Li, Name),
+ wxListItem:setAlign(Li, Align),
+ wxListCtrl:insertColumn(Grid, Col, Li),
+ wxListCtrl:setColumnWidth(Grid, Col, DefSize),
+ Col + 1
+ end,
+ ListItems = [{"Allocator Type", ?wxLIST_FORMAT_LEFT, 200},
+ {"Block size (kB)", ?wxLIST_FORMAT_RIGHT, 150},
+ {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}],
+ lists:foldl(AddListEntry, 0, ListItems),
+ wxListItem:destroy(Li),
+
+ Sizer = wxBoxSizer:new(?wxVERTICAL),
+ wxSizer:add(Sizer, Grid, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT},
+ {border, 5}, {proportion, 1}]),
+ wxWindow:setSizerAndFit(Panel, Sizer),
+ {Panel, Grid}.
+
+
+create_menus(Parent, _) ->
+ MenuEntries =
+ [{"File",
+ [
+ ]}
+ ],
+ observer_wx:create_menus(Parent, MenuEntries).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl
index c279218707..df0bc05312 100644
--- a/lib/observer/src/observer_html_lib.erl
+++ b/lib/observer/src/observer_html_lib.erl
@@ -60,7 +60,8 @@ expandable_term_body(Heading,[],_Tab) ->
"StackDump" -> "No stack dump was found";
"Dictionary" -> "No dictionary was found";
"ProcState" -> "Information could not be retrieved,"
- " system messages may not be handled by this process."
+ " system messages may not be handled by this process.";
+ "SaslLog" -> "No log entry was found"
end];
expandable_term_body(Heading,Expanded,Tab) ->
Attr = "BORDER=0 CELLPADDING=0 CELLSPACING=1 WIDTH=100%",
@@ -102,7 +103,10 @@ expandable_term_body(Heading,Expanded,Tab) ->
element(1, lists:mapfoldl(fun(Entry, Even) ->
{proc_state(Tab, Entry,Even),
not Even}
- end, true, Expanded))]);
+ end, true, Expanded))]);
+ "SaslLog" ->
+ table(Attr,
+ [tr("BGCOLOR=white",[td("ALIGN=left", pre(href_proc_port(Expanded)))])]) ;
_ ->
table(Attr,
[tr(
@@ -151,7 +155,7 @@ all_or_expand(_Tab,Term,Str,false)
href_proc_port(lists:flatten(Str));
all_or_expand(Tab,Term,Preview,true)
when not is_binary(Term) ->
- Key = {Key1,Key2,Key3} = now(),
+ Key = {Key1,Key2,Key3} = {erlang:unique_integer([positive]),1,2},
ets:insert(Tab,{Key,Term}),
[href_proc_port(lists:flatten(Preview), false), $\n,
href("TARGET=\"expanded\"",
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index 34c7b127ff..40a3eb8831 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -173,12 +173,17 @@ fill_info([{Str,SubStructure}|Rest], Data) when is_list(SubStructure) ->
[{Str, fill_info(SubStructure, Data)}|fill_info(Rest,Data)];
fill_info([{Str,Attrib,SubStructure}|Rest], Data) ->
[{Str, Attrib, fill_info(SubStructure, Data)}|fill_info(Rest,Data)];
+fill_info([{Str, Key = {K,N}}|Rest], Data) when is_atom(K), is_integer(N) ->
+ case get_value(Key, Data) of
+ undefined -> [undefined | fill_info(Rest, Data)];
+ Value -> [{Str, Value} | fill_info(Rest, Data)]
+ end;
fill_info([], _) -> [].
-get_value(Key, Data) when is_atom(Key) ->
- proplists:get_value(Key,Data);
get_value(Fun, Data) when is_function(Fun) ->
- Fun(Data).
+ Fun(Data);
+get_value(Key, Data) ->
+ proplists:get_value(Key,Data).
update_info([Fields|Fs], [{_Header, SubStructure}| Rest]) ->
update_info2(Fields, SubStructure),
@@ -269,6 +274,8 @@ to_str(Pid) when is_pid(Pid) ->
pid_to_list(Pid);
to_str(No) when is_integer(No) ->
integer_to_list(No);
+to_str(Float) when is_float(Float) ->
+ io_lib:format("~.3f", [Float]);
to_str(Term) ->
io_lib:format("~w", [Term]).
@@ -493,8 +500,11 @@ link_entry2(Panel,{Target,Str},Cursor) ->
wxWindow:setToolTip(TC, ToolTip),
TC.
-to_link(Tuple = {_Target, _Str}) ->
- Tuple;
+to_link(RegName={Name, Node}) when is_atom(Name), is_atom(Node) ->
+ Str = io_lib:format("{~p,~p}", [Name, Node]),
+ {RegName, Str};
+to_link(TI = {_Target, _Identifier}) ->
+ TI;
to_link(Target0) ->
Target=to_str(Target0),
{Target, Target}.
diff --git a/lib/observer/src/observer_perf_wx.erl b/lib/observer/src/observer_perf_wx.erl
index 8173349ed7..4df9218087 100644
--- a/lib/observer/src/observer_perf_wx.erl
+++ b/lib/observer/src/observer_perf_wx.erl
@@ -24,7 +24,8 @@
handle_event/2, handle_sync_event/3, handle_cast/2]).
%% Drawing wrappers for DC and GC areas
--export([haveGC/0,
+-export([setup_graph_drawing/1, refresh_panel/6,
+ haveGC/0,
setPen/2, setFont/3, setBrush/2,
strokeLine/5, strokeLines/2, drawRoundedRectangle/6,
drawText/4, getTextExtent/2]).
@@ -42,13 +43,12 @@
data = {0, queue:new()},
panel,
paint,
- appmon,
- usegc = false
+ appmon
}).
-define(wxGC, wxGraphicsContext).
--record(paint, {font, small, pen, pen2, pens}).
+-record(paint, {font, small, pen, pen2, pens, usegc = false}).
-define(RQ_W, 1).
-define(MEM_W, 2).
@@ -63,14 +63,11 @@ init([Notebook, Parent]) ->
Main = wxBoxSizer:new(?wxVERTICAL),
Style = ?wxFULL_REPAINT_ON_RESIZE bor ?wxCLIP_CHILDREN,
CPU = wxPanel:new(Panel, [{winid, ?RQ_W}, {style,Style}]),
- wxWindow:setBackgroundColour(CPU, ?wxWHITE),
wxSizer:add(Main, CPU, [{flag, ?wxEXPAND bor ?wxALL},
{proportion, 1}, {border, 5}]),
MemIO = wxBoxSizer:new(?wxHORIZONTAL),
MEM = wxPanel:new(Panel, [{winid, ?MEM_W}, {style,Style}]),
- wxWindow:setBackgroundColour(MEM, ?wxWHITE),
IO = wxPanel:new(Panel, [{winid, ?IO_W}, {style,Style}]),
- wxWindow:setBackgroundColour(IO, ?wxWHITE),
wxSizer:add(MemIO, MEM, [{flag, ?wxEXPAND bor ?wxLEFT},
{proportion, 1}, {border, 5}]),
wxSizer:add(MemIO, IO, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT},
@@ -79,53 +76,56 @@ init([Notebook, Parent]) ->
{proportion, 1}, {border, 5}]),
wxWindow:setSizer(Panel, Main),
- wxPanel:connect(CPU, paint, [callback]),
- wxPanel:connect(IO, paint, [callback]),
- wxPanel:connect(MEM, paint, [callback]),
- case os:type() of
- {win32, _} -> %% Ignore erase on windows
- wxPanel:connect(CPU, erase_background, [{callback, fun(_,_) -> ok end}]),
- wxPanel:connect(IO, erase_background, [{callback, fun(_,_) -> ok end}]),
- wxPanel:connect(MEM, erase_background, [{callback, fun(_,_) -> ok end}]);
- _ -> ok
- end,
+ PaintInfo = setup_graph_drawing([CPU, MEM, IO]),
+ process_flag(trap_exit, true),
+ {Panel, #state{parent=Parent,
+ panel =Panel,
+ windows = {CPU, MEM, IO},
+ paint=PaintInfo
+ }}
+ catch _:Err ->
+ io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]),
+ {stop, Err}
+ end.
+
+setup_graph_drawing(Panels) ->
+ IsWindows = element(1, os:type()) =:= win32,
+ IgnoreCB = {callback, fun(_,_) -> ok end},
+ Do = fun(Panel) ->
+ wxWindow:setBackgroundColour(Panel, ?wxWHITE),
+ wxPanel:connect(Panel, paint, [callback]),
+ IsWindows andalso
+ wxPanel:connect(Panel, erase_background, [IgnoreCB])
+ end,
+ _ = [Do(Panel) || Panel <- Panels],
UseGC = haveGC(),
Version28 = ?wxMAJOR_VERSION =:= 2 andalso ?wxMINOR_VERSION =:= 8,
{Font, SmallFont}
- = case os:type() of
- {unix, _} when UseGC, Version28 ->
+ = if UseGC, Version28 ->
%% Def font is really small when using Graphics contexts in 2.8
%% Hardcode it
F = wxFont:new(12,?wxFONTFAMILY_DECORATIVE,?wxFONTSTYLE_NORMAL,?wxFONTWEIGHT_BOLD),
SF = wxFont:new(10, ?wxFONTFAMILY_DECORATIVE, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL),
{F, SF};
- _ ->
+ true ->
DefFont = wxSystemSettings:getFont(?wxSYS_DEFAULT_GUI_FONT),
DefSize = wxFont:getPointSize(DefFont),
DefFamily = wxFont:getFamily(DefFont),
- F = wxFont:new(DefSize, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_BOLD),
- SF = wxFont:new(DefSize-1, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL),
+ F = wxFont:new(DefSize-1, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_BOLD),
+ SF = wxFont:new(DefSize-2, DefFamily, ?wxFONTSTYLE_NORMAL, ?wxFONTWEIGHT_NORMAL),
{F, SF}
end,
BlackPen = wxPen:new({0,0,0}, [{width, 2}]),
- Pens = [wxPen:new(Col, [{width, 2}]) || Col <- tuple_to_list(colors())],
- process_flag(trap_exit, true),
- {Panel, #state{parent=Parent,
- panel =Panel,
- windows = {CPU, MEM, IO},
- usegc=UseGC,
- paint=#paint{font = Font,
- small = SmallFont,
- pen = ?wxGREY_PEN,
- pen2 = BlackPen,
- pens = list_to_tuple(Pens)
- }
- }}
- catch _:Err ->
- io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]),
- {stop, Err}
- end.
+ Pens = [wxPen:new(Col, [{width, 3}]) || Col <- tuple_to_list(colors())],
+ #paint{usegc = UseGC,
+ font = Font,
+ small = SmallFont,
+ pen = ?wxGREY_PEN,
+ pen2 = BlackPen,
+ pens = list_to_tuple(Pens)
+ }.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -139,21 +139,25 @@ handle_event(Event, _State) ->
%%%%%%%%%%
handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_,
#state{active=Active, offset=Offset, paint=Paint,
- windows=Windows, data=Data, usegc=UseGC}) ->
- %% PaintDC must be created in a callback to work on windows.
+ windows=Windows, data=Data}) ->
%% Sigh workaround bug on MacOSX (Id in paint event is always 0)
%% Panel = element(Id, Windows),
- Id = if Panel =:= element(?RQ_W, Windows) -> ?RQ_W;
- Panel =:= element(?MEM_W, Windows) -> ?MEM_W;
- Panel =:= element(?IO_W, Windows) -> ?IO_W
+ Id = if Panel =:= element(?RQ_W, Windows) -> runq;
+ Panel =:= element(?MEM_W, Windows) -> memory;
+ Panel =:= element(?IO_W, Windows) -> io
end,
- IsWindows = element(1, os:type()) =:= win32,
- DC = if IsWindows ->
+ refresh_panel(Panel, Id, Offset, Data, Active, Paint),
+ ok.
+
+refresh_panel(Panel, Id, Offset, Data, Active, #paint{usegc=UseGC} = Paint) ->
+ %% PaintDC must be created in a callback to work on windows.
+ IsWindows = element(1, os:type()) =:= win32,
+ DC = if IsWindows ->
%% Ugly hack to aviod flickering on windows, works on windows only
%% But the other platforms are doublebuffered by default
wx:typeCast(wxBufferedPaintDC:new(Panel), wxPaintDC);
- true ->
+ true ->
wxPaintDC:new(Panel)
end,
IsWindows andalso wxDC:clear(DC),
@@ -167,8 +171,9 @@ handle_sync_event(#wx{obj=Panel, event = #wxPaint{}},_,
io:format("Internal error ~p ~p~n",[Err, erlang:get_stacktrace()])
end,
UseGC andalso ?wxGC:destroy(GC),
- wxPaintDC:destroy(DC),
- ok.
+ wxPaintDC:destroy(DC).
+
+
%%%%%%%%%%
handle_call(Event, From, _State) ->
error({unhandled_call, Event, From}).
@@ -247,10 +252,10 @@ create_menus(Parent, _) ->
observer_wx:create_menus(Parent, MenuEntries).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-collect_data(?RQ_W, {N, Q}) ->
+collect_data(runq, {N, Q}) ->
case queue:to_list(Q) of
- [] -> {0, 0, []};
- [_] -> {0, 0, []};
+ [] -> {0, 0, [], []};
+ [_] -> {0, 0, [], []};
[{stats, _Ver, Init0, _IO, _Mem}|Data0] ->
Init = lists:sort(Init0),
[_|Data=[First|_]] = lists:foldl(fun({stats, _, T0, _, _}, [Prev|Acc]) ->
@@ -258,25 +263,46 @@ collect_data(?RQ_W, {N, Q}) ->
Delta = calc_delta(TN, Prev),
[TN, list_to_tuple(Delta)|Acc]
end, [Init], Data0),
- {N, lmax(Data), lists:reverse([First|Data])}
+ NoGraphs = tuple_size(First),
+ {N, lmax(Data), lists:reverse([First|Data]), lists:seq(1, NoGraphs)}
end;
-collect_data(?MEM_W, {N, Q}) ->
+collect_data(memory, {N, Q}) ->
MemT = mem_types(),
Data = [list_to_tuple([Value || {Type,Value} <- MemInfo,
lists:member(Type, MemT)])
|| {stats, _Ver, _RQ, _IO, MemInfo} <- queue:to_list(Q)],
- {N, lmax(Data), Data};
-collect_data(?IO_W, {N, Q}) ->
+ {N, lmax(Data), Data, MemT};
+collect_data(io, {N, Q}) ->
case queue:to_list(Q) of
- [] -> {0, 0, []};
- [_] -> {0, 0, []};
+ [] -> {0, 0, [], []};
+ [_] -> {0, 0, [], []};
[{stats, _Ver, _RQ, {{_,In0}, {_,Out0}}, _Mem}|Data0] ->
[_,_|Data=[First|_]] =
lists:foldl(fun({stats, _, _, {{_,In}, {_,Out}}, _}, [PIn,Pout|Acc]) ->
[In,Out,{In-PIn,Out-Pout}|Acc]
end, [In0,Out0], Data0),
- {N, lmax(Data), lists:reverse([First|Data])}
- end.
+ {N, lmax(Data), lists:reverse([First|Data]), [input, output]}
+ end;
+collect_data(alloc, {N, Q}) ->
+ List = queue:to_list(Q),
+ Data = [list_to_tuple([Carrier || {_Type,_Block,Carrier} <- MemInfo])
+ || MemInfo <- List],
+ Info = case List of %% Varies depending on erlang build config/platform
+ [MInfo|_] -> [Type || {Type, _, _} <- MInfo];
+ _ -> []
+ end,
+ {N, lmax(Data), Data, Info};
+
+collect_data(utilz, {N, Q}) ->
+ List = queue:to_list(Q),
+ Data = [list_to_tuple([round(100*Block/Carrier) || {_Type,Block,Carrier} <- MemInfo])
+ || MemInfo <- List],
+ Info = case List of %% Varies depending on erlang build config/platform
+ [MInfo|_] -> [Type || {Type, _, _} <- MInfo];
+ _ -> []
+ end,
+ {N, lmax(Data), Data, Info}.
+
mem_types() ->
[total, processes, atom, binary, code, ets].
@@ -299,14 +325,14 @@ draw(Offset, Id, DC, Panel, Paint=#paint{pens=Pens, small=Small}, Data, Active)
%% This can be optimized a lot by collecting data once
%% and draw to memory and then blit memory and only draw new entries in new memory
%% area. Hmm now rewritten to use ?wxGC I don't now if it is feasable.
- {Len, Max0, Hs} = collect_data(Id, Data),
- Max = calc_max(Max0),
- NoGraphs = try tuple_size(hd(Hs)) catch _:_ -> 0 end,
+ {Len, Max0, Hs, Info} = collect_data(Id, Data),
+ {Max,_,_} = MaxDisp = calc_max(Id, Max0),
Size = wxWindow:getClientSize(Panel),
- {X0,Y0,WS,HS} = draw_borders(Id, NoGraphs, DC, Size, Max, Paint),
+ {X0,Y0,WS,HS, DrawBs} = draw_borders(Id, Info, DC, Size, MaxDisp, Paint),
Last = 60*WS+X0-1,
Start = max(61-Len, 0)*WS+X0 - Offset*WS,
Samples = length(Hs),
+ NoGraphs = try tuple_size(hd(Hs)) catch _:_ -> 0 end,
case Active andalso Samples > 1 andalso NoGraphs > 0 of
true ->
Draw = fun(N) ->
@@ -315,14 +341,16 @@ draw(Offset, Id, DC, Panel, Paint=#paint{pens=Pens, small=Small}, Data, Active)
strokeLines(DC, Lines),
N+1
end,
- [Draw(I) || I <- lists:seq(NoGraphs, 1, -1)];
+ [Draw(I) || I <- lists:seq(NoGraphs, 1, -1)],
+ DrawBs();
false ->
- Info = case Active andalso Samples =< 1 of
- true -> "Waiting on data";
+ DrawBs(),
+ Text = case Active andalso Samples =< 1 of
+ true -> "Waiting for data";
false -> "Information not available"
end,
setFont(DC, Small, {0,0,0}),
- drawText(DC, Info, X0 + 100, element(2,Size) div 2)
+ drawText(DC, Text, X0 + 100, element(2,Size) div 2)
end,
ok.
@@ -397,9 +425,8 @@ spline_tan(Y0, Y1, Y2, Y3) ->
-define(BW, 5).
-define(BH, 5).
-draw_borders(Type, NoGraphs, DC, {W,H}, Max,
+draw_borders(Type, Info, DC, {W,H}, {Max, Unit, MaxUnit},
#paint{pen=Pen, pen2=Pen2, font=Font, small=Small}) ->
- {Unit, MaxUnit} = bytes(Type, Max),
Str1 = observer_lib:to_str(MaxUnit),
Str2 = observer_lib:to_str(MaxUnit div 2),
Str3 = observer_lib:to_str(0),
@@ -410,10 +437,10 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max,
GraphX0 = ?BW+TW+?BW,
GraphX1 = W-?BW*4,
- TopTextX = ?BW+TW+?BW,
- MaxTextY = ?BH+TH+?BH,
+ TopTextX = ?BW*3+TW,
+ MaxTextY = TH+?BH,
BottomTextY = H-?BH-TH,
- SecondsY = BottomTextY - ?BH - TH,
+ SecondsY = BottomTextY - TH,
GraphY0 = MaxTextY + (TH / 2),
GraphY1 = SecondsY - ?BH,
GraphW = GraphX1-GraphX0-1,
@@ -447,17 +474,7 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max,
strokeLine(DC, GraphX0-3, GraphY50, GraphX1, GraphY50),
strokeLine(DC, GraphX0-3, GraphY75, GraphX1, GraphY75),
- setPen(DC, Pen2),
- strokeLines(DC, [{GraphX0, GraphY0-1}, {GraphX0, GraphY1+1},
- {GraphX1, GraphY1+1}, {GraphX1, GraphY0-1},
- {GraphX0, GraphY0-1}]),
-
setFont(DC, Font, {0,0,0}),
- case Type of
- ?RQ_W -> drawText(DC, "Scheduler Utilization (%) ", TopTextX,?BH);
- ?MEM_W -> drawText(DC, "Memory Usage " ++ Unit, TopTextX,?BH);
- ?IO_W -> drawText(DC, "IO Usage " ++ Unit, TopTextX,?BH)
- end,
Text = fun(X,Y, Str, PenId) ->
if PenId == 0 ->
@@ -468,32 +485,65 @@ draw_borders(Type, NoGraphs, DC, {W,H}, Max,
end,
drawText(DC, Str, X, Y),
{StrW, _} = getTextExtent(DC, Str),
- StrW + X + SpaceW
+ StrW + X + ?BW*2
end,
+
case Type of
- ?RQ_W ->
- TN0 = Text(?BW, BottomTextY, "Scheduler: ", 0),
+ runq ->
+ drawText(DC, "Scheduler Utilization (%) ", TopTextX, ?BH),
+ TN0 = Text(TopTextX, BottomTextY, "Scheduler: ", 0),
lists:foldl(fun(Id, Pos0) ->
Text(Pos0, BottomTextY, integer_to_list(Id), Id)
- end, TN0, lists:seq(1, NoGraphs));
- ?MEM_W ->
+ end, TN0, Info);
+ memory ->
+ drawText(DC, "Memory Usage " ++ Unit, TopTextX,?BH),
+ lists:foldl(fun(MType, {PenId, Pos0}) ->
+ Str = to_string(MType),
+ Pos = Text(Pos0, BottomTextY, Str, PenId),
+ {PenId+1, Pos}
+ end, {1, TopTextX}, Info);
+ io ->
+ drawText(DC, "IO Usage " ++ Unit, TopTextX,?BH),
+ lists:foldl(fun(MType, {PenId, Pos0}) ->
+ Str = to_string(MType),
+ Pos = Text(Pos0, BottomTextY, Str, PenId),
+ {PenId+1, Pos}
+ end, {1, TopTextX}, Info);
+ alloc ->
+ drawText(DC, "Carrier Size " ++ Unit, TopTextX,?BH);
+ utilz ->
+ drawText(DC, "Carrier Utilization (%)" ++ Unit, TopTextX,?BH),
lists:foldl(fun(MType, {PenId, Pos0}) ->
- Str = uppercase(atom_to_list(MType)),
+ Str = to_string(MType),
Pos = Text(Pos0, BottomTextY, Str, PenId),
{PenId+1, Pos}
- end, {1, ?BW}, mem_types());
- ?IO_W ->
- TN0 = Text(?BW, BottomTextY, "Input", 1),
- Text(TN0, BottomTextY, "Output", 2)
+ end, {1, TopTextX}, Info)
end,
- {GraphX0+1, GraphY1, ScaleW, ScaleH}.
+ DrawBorder = fun() ->
+ setPen(DC, Pen2),
+ strokeLines(DC, [{GraphX0, GraphY0-1}, {GraphX0, GraphY1+1},
+ {GraphX1, GraphY1+1}, {GraphX1, GraphY0-1},
+ {GraphX0, GraphY0-1}])
+ end,
+ {GraphX0+1, GraphY1, ScaleW, ScaleH, DrawBorder}.
+
+to_string(Atom) ->
+ Name = atom_to_list(Atom),
+ case lists:reverse(Name) of
+ "colla_" ++ Rev ->
+ uppercase(lists:reverse(Rev));
+ _ ->
+ uppercase(Name)
+ end.
uppercase([C|Rest]) ->
[C-$a+$A|Rest].
-calc_max(Max) when Max < 10 -> 10;
-calc_max(Max) -> calc_max1(Max).
+calc_max(Type, Max) ->
+ bytes(Type, Max).
+calc_max1(Max) when Max < 10 ->
+ 10;
calc_max1(Max) ->
case Max div 10 of
X when X < 10 ->
@@ -506,23 +556,36 @@ calc_max1(Max) ->
10*calc_max1(X)
end.
-bytes(?RQ_W, Val) -> {"", Val};
+bytes(runq, Val) ->
+ Upper = calc_max1(Val),
+ {Upper, "", Upper};
+bytes(utilz, Val) ->
+ Upper = calc_max1(Val),
+ {Upper, "", Upper};
bytes(_, B) ->
KB = B div 1024,
MB = KB div 1024,
GB = MB div 1024,
if
- GB > 10 -> {"(GB)", GB};
- MB > 10 -> {"(MB)", MB};
- KB > 0 -> {"(KB)", KB};
- true -> {"(B)", B}
+ GB > 10 ->
+ Upper = calc_max1(GB),
+ {Upper*1024*1024*1024, "(GB)", Upper};
+ MB > 10 ->
+ Upper = calc_max1(MB),
+ {Upper*1024*1024, "(MB)", Upper};
+ KB > 0 ->
+ Upper = calc_max1(KB),
+ {Upper*1024, "(KB)", Upper};
+ true ->
+ Upper = calc_max1(B),
+ {Upper, "(B)", Upper}
end.
colors() ->
- {{200, 50, 50}, {50, 200, 50}, {50, 50, 200},
- {255, 110, 0}, {50, 200, 200}, {200, 50, 200},
- {240, 200, 80}, {140, 2, 140},
- {100, 200, 240}, {100, 240, 100}
+ {{240, 100, 100}, {100, 240, 100}, {100, 100, 240},
+ {220, 220, 80}, {100, 240, 240}, {240, 100, 240},
+ {100, 25, 25}, {25, 100, 25}, {25, 25, 100},
+ {120, 120, 0}, {25, 100, 100}, {100, 50, 100}
}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/observer/src/observer_pro_wx.erl b/lib/observer/src/observer_pro_wx.erl
index 0be8c18893..026693ff56 100644
--- a/lib/observer/src/observer_pro_wx.erl
+++ b/lib/observer/src/observer_pro_wx.erl
@@ -578,7 +578,7 @@ get_row(From, Row, pid, Info) ->
end,
From ! {self(), Pid};
get_row(From, Row, Col, Info) ->
- Data = case Row > array:size(Info) of
+ Data = case Row >= array:size(Info) of
true ->
"";
false ->
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index 8e8a37fc93..d724cd9e96 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -43,6 +43,8 @@
-record(worker, {panel, callback}).
+-record(io, {rdata=""}).
+
start(Process, ParentFrame, Parent) ->
wx_object:start_link(?MODULE, [Process, ParentFrame, Parent], []).
@@ -69,6 +71,10 @@ init([Pid, ParentFrame, Parent]) ->
DictPage = init_panel(Notebook, "Dictionary", [Pid,Table], fun init_dict_page/3),
StackPage = init_panel(Notebook, "Stack Trace", [Pid], fun init_stack_page/2),
StatePage = init_panel(Notebook, "State", [Pid,Table], fun init_state_page/3),
+ Ps = case gen_server:call(observer, log_status) of
+ true -> [init_panel(Notebook, "Log", [Pid,Table], fun init_log_page/3)];
+ false -> []
+ end,
wxFrame:connect(Frame, close_window),
wxMenu:connect(Frame, command_menu_selected),
@@ -78,7 +84,7 @@ init([Pid, ParentFrame, Parent]) ->
pid=Pid,
frame=Frame,
notebook=Notebook,
- pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage],
+ pages=[ProcessPage,MessagePage,DictPage,StackPage,StatePage|Ps],
expand_table=Table
}}
catch error:{badrpc, _} ->
@@ -144,7 +150,7 @@ handle_event(#wx{event=#wxHtmlLink{linkInfo=#wxHtmlLinkInfo{href=Href}}},
Opened =
case lists:keyfind(Id,1,Opened0) of
false ->
- Win = cdv_detail_wx:start_link(Id,Frame,Callback),
+ Win = cdv_detail_wx:start_link(Id,[],Frame,Callback),
[{Id,Win}|Opened0];
{_,Win} ->
wxFrame:raise(Win),
@@ -327,6 +333,26 @@ fetch_state_info2(Pid, M) ->
{badrpc,{'EXIT',{timeout, _}}} -> []
end.
+init_log_page(Parent, Pid, Table) ->
+ Win = observer_lib:html_window(Parent),
+ Update = fun() ->
+ Fd = spawn_link(fun() -> io_server() end),
+ rpc:call(node(Pid), rb, rescan, [[{start_log, Fd}]]),
+ rpc:call(node(Pid), rb, grep, [local_pid_str(Pid)]),
+ Logs = io_get_data(Fd),
+ %% Replace remote local pid notation to global notation
+ Pref = global_pid_node_pref(Pid),
+ ExpPid = re:replace(Logs,"<0\.","<" ++ Pref ++ ".",[global, {return, list}]),
+ %% Try to keep same look by removing blanks at right of rewritten PID
+ NbBlanks = length(Pref) - 1,
+ Re = "(<" ++ Pref ++ "\.[^>]{1,}>)[ ]{"++ integer_to_list(NbBlanks) ++ "}",
+ Look = re:replace(ExpPid, Re, "\\1", [global, {return, list}]),
+ Html = observer_html_lib:expandable_term("SaslLog", Look, Table),
+ wxHtmlWindow:setPage(Win, Html)
+ end,
+ Update(),
+ {Win, Update}.
+
create_menus(MenuBar) ->
Menus = [{"File", [#create_menu{id=?wxID_CLOSE, text="Close"}]},
{"View", [#create_menu{id=?REFRESH, text="Refresh\tCtrl-R"}]}],
@@ -409,3 +435,51 @@ filter_monitor_info() ->
Ms = proplists:get_value(monitors, Data),
[Pid || {process, Pid} <- Ms]
end.
+
+local_pid_str(Pid) ->
+ %% observer can observe remote nodes
+ %% There is no function to get the local
+ %% pid from the remote pid ...
+ %% So grep will fail to find remote pid in remote local log.
+ %% i.e. <4589.42.1> will not be found, but <0.42.1> will
+ %% Let's replace first integer by zero
+ "<0" ++ re:replace(pid_to_list(Pid),"\<([0-9]{1,})","",[{return, list}]).
+
+global_pid_node_pref(Pid) ->
+ %% Global PID node prefix : X of <X.Y.Z>
+ string:strip(string:sub_word(pid_to_list(Pid),1,$.),left,$<).
+
+
+io_get_data(Pid) ->
+ Pid ! {self(), get_data_and_close},
+ receive
+ {Pid, data, Data} -> lists:flatten(Data)
+ end.
+
+io_server() ->
+ io_server(#io{}).
+
+io_server(State) ->
+ receive
+ {io_request, From, ReplyAs, Request} ->
+ {_, Reply, NewState} = io_request(Request,State),
+ From ! {io_reply, ReplyAs, Reply},
+ io_server(NewState);
+ {Pid, get_data_and_close} ->
+ Pid ! {self(), data, lists:reverse(State#io.rdata)},
+ normal;
+ _Unknown ->
+ io_server(State)
+ end.
+
+io_request({put_chars, _Encoding, Chars}, State = #io{rdata=Data}) ->
+ {ok, ok, State#io{rdata=[Chars|Data]}};
+io_request({put_chars, Encoding, Module, Function, Args}, State) ->
+ try
+ io_request({put_chars, Encoding, apply(Module, Function, Args)}, State)
+ catch _:_ ->
+ {error, {error, Function}, State}
+ end;
+io_request(_Req, State) ->
+ %% io:format("~p: Unknown req: ~p ~n",[?LINE, _Req]),
+ {ok, {error, request}, State}.
diff --git a/lib/observer/src/observer_sys_wx.erl b/lib/observer/src/observer_sys_wx.erl
index f989f9cf97..ea89590e84 100644
--- a/lib/observer/src/observer_sys_wx.erl
+++ b/lib/observer/src/observer_sys_wx.erl
@@ -37,7 +37,6 @@
parent_notebook,
panel, sizer,
menubar,
- alloc,
fields,
timer}).
@@ -48,7 +47,6 @@ start_link(Notebook, Parent) ->
init([Notebook, Parent]) ->
SysInfo = observer_backend:sys_info(),
- AllocInfo = proplists:get_value(alloc_info, SysInfo, []),
{Info, Stat} = info_fields(),
Panel = wxPanel:new(Notebook),
Sizer = wxBoxSizer:new(?wxVERTICAL),
@@ -60,16 +58,13 @@ init([Notebook, Parent]) ->
wxSizer:add(TopSizer, FPanel0, [{flag, ?wxEXPAND}, {proportion, 1}]),
wxSizer:add(TopSizer, FPanel1, [{flag, ?wxEXPAND}, {proportion, 1}]),
BorderFlags = ?wxLEFT bor ?wxRIGHT,
- {MemPanel, MemoryInfo} = create_mem_info(Panel, AllocInfo),
wxSizer:add(Sizer, TopSizer, [{flag, ?wxEXPAND bor BorderFlags bor ?wxTOP},
{proportion, 0}, {border, 5}]),
- wxSizer:add(Sizer, MemPanel, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM},
- {proportion, 1}, {border, 5}]),
wxPanel:setSizer(Panel, Sizer),
Timer = observer_lib:start_timer(10),
{Panel, #sys_wx_state{parent=Parent,
parent_notebook=Notebook,
- panel=Panel, sizer=Sizer, alloc=MemoryInfo,
+ panel=Panel, sizer=Sizer,
timer=Timer, fields=Fields0 ++ Fields1}}.
create_sys_menu(Parent) ->
@@ -77,91 +72,13 @@ create_sys_menu(Parent) ->
#create_menu{id = ?ID_REFRESH_INTERVAL, text = "Refresh interval"}]},
observer_wx:create_menus(Parent, [View]).
-update_syspage(#sys_wx_state{node = Node, fields=Fields, sizer=Sizer, alloc=AllocCtrl}) ->
+update_syspage(#sys_wx_state{node = Node, fields=Fields, sizer=Sizer}) ->
SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []),
- AllocInfo = proplists:get_value(alloc_info, SysInfo, []),
{Info, Stat} = info_fields(),
observer_lib:update_info(Fields, observer_lib:fill_info(Info, SysInfo) ++
observer_lib:fill_info(Stat, SysInfo)),
- update_alloc(AllocCtrl, AllocInfo),
wxSizer:layout(Sizer).
-create_mem_info(Parent, Fields) ->
- Panel = wxPanel:new(Parent),
- wxWindow:setBackgroundColour(Panel, {255,255,255}),
- Style = ?wxLC_REPORT bor ?wxLC_SINGLE_SEL bor ?wxLC_HRULES bor ?wxLC_VRULES,
- Grid = wxListCtrl:new(Panel, [{style, Style}]),
- Li = wxListItem:new(),
- AddListEntry = fun({Name, Align, DefSize}, Col) ->
- wxListItem:setText(Li, Name),
- wxListItem:setAlign(Li, Align),
- wxListCtrl:insertColumn(Grid, Col, Li),
- wxListCtrl:setColumnWidth(Grid, Col, DefSize),
- Col + 1
- end,
- ListItems = [{"Allocator Type", ?wxLIST_FORMAT_LEFT, 200},
- {"Block size (kB)", ?wxLIST_FORMAT_RIGHT, 150},
- {"Carrier size (kB)",?wxLIST_FORMAT_RIGHT, 150}],
- lists:foldl(AddListEntry, 0, ListItems),
- wxListItem:destroy(Li),
- update_alloc(Grid, Fields),
-
- Sizer = wxBoxSizer:new(?wxVERTICAL),
- wxSizer:add(Sizer, Grid, [{flag, ?wxEXPAND bor ?wxLEFT bor ?wxRIGHT},
- {border, 5}, {proportion, 1}]),
- wxWindow:setSizerAndFit(Panel, Sizer),
- {Panel, Grid}.
-
-update_alloc(Grid, AllocInfo) ->
- Fields = alloc_info(AllocInfo, [], 0, 0, true),
- wxListCtrl:deleteAllItems(Grid),
- Update = fun({Name, BS, CS}, Row) ->
- wxListCtrl:insertItem(Grid, Row, ""),
- wxListCtrl:setItem(Grid, Row, 0, observer_lib:to_str(Name)),
- wxListCtrl:setItem(Grid, Row, 1, observer_lib:to_str(BS div 1024)),
- wxListCtrl:setItem(Grid, Row, 2, observer_lib:to_str(CS div 1024)),
- Row + 1
- end,
- lists:foldl(Update, 0, Fields),
- Fields.
-
-alloc_info([{Type,Instances}|Allocators],TypeAcc,TotalBS,TotalCS,IncludeTotal) ->
- {BS,CS,NewTotalBS,NewTotalCS,NewIncludeTotal} =
- sum_alloc_instances(Instances,0,0,TotalBS,TotalCS),
- alloc_info(Allocators,[{Type,BS,CS}|TypeAcc],NewTotalBS,NewTotalCS,
- IncludeTotal andalso NewIncludeTotal);
-alloc_info([],TypeAcc,TotalBS,TotalCS,IncludeTotal) ->
- Types = [X || X={_,BS,CS} <- TypeAcc, (BS>0 orelse CS>0)],
- case IncludeTotal of
- true ->
- [{total,TotalBS,TotalCS} | lists:reverse(Types)];
- false ->
- lists:reverse(Types)
- end.
-
-sum_alloc_instances(false,BS,CS,TotalBS,TotalCS) ->
- {BS,CS,TotalBS,TotalCS,false};
-sum_alloc_instances([{_,_,Data}|Instances],BS,CS,TotalBS,TotalCS) ->
- {NewBS,NewCS,NewTotalBS,NewTotalCS} =
- sum_alloc_one_instance(Data,BS,CS,TotalBS,TotalCS),
- sum_alloc_instances(Instances,NewBS,NewCS,NewTotalBS,NewTotalCS);
-sum_alloc_instances([],BS,CS,TotalBS,TotalCS) ->
- {BS,CS,TotalBS,TotalCS,true}.
-
-sum_alloc_one_instance([{sbmbcs,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}|
- Rest],OldBS,OldCS,TotalBS,TotalCS) ->
- sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS,TotalCS);
-sum_alloc_one_instance([{_,[{blocks_size,BS,_,_},{carriers_size,CS,_,_}]}|
- Rest],OldBS,OldCS,TotalBS,TotalCS) ->
- sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS);
-sum_alloc_one_instance([{_,[{blocks_size,BS},{carriers_size,CS}]}|
- Rest],OldBS,OldCS,TotalBS,TotalCS) ->
- sum_alloc_one_instance(Rest,OldBS+BS,OldCS+CS,TotalBS+BS,TotalCS+CS);
-sum_alloc_one_instance([_|Rest],BS,CS,TotalBS,TotalCS) ->
- sum_alloc_one_instance(Rest,BS,CS,TotalBS,TotalCS);
-sum_alloc_one_instance([],BS,CS,TotalBS,TotalCS) ->
- {BS,CS,TotalBS,TotalCS}.
-
info_fields() ->
Info = [{"System and Architecture",
[{"System Version", otp_release},
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index c86f5ea916..cf602569aa 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -37,11 +37,13 @@
-define(ID_CONNECT, 2).
-define(ID_NOTEBOOK, 3).
-define(ID_CDV, 4).
+-define(ID_LOGVIEW, 5).
-define(FIRST_NODES_MENU_ID, 1000).
-define(LAST_NODES_MENU_ID, 2000).
-define(TRACE_STR, "Trace Overview").
+-define(ALLOC_STR, "Memory Allocators").
%% Records
-record(state,
@@ -57,10 +59,12 @@
trace_panel,
app_panel,
perf_panel,
+ allc_panel,
active_tab,
node,
nodes,
- prev_node=""
+ prev_node="",
+ log = false
}).
start() ->
@@ -147,6 +151,10 @@ setup(#state{frame = Frame} = State) ->
PerfPanel = observer_perf_wx:start_link(Notebook, self()),
wxNotebook:addPage(Notebook, PerfPanel, "Load Charts", []),
+ %% Memory Allocator Viewer Panel
+ AllcPanel = observer_alloc_wx:start_link(Notebook, self()),
+ wxNotebook:addPage(Notebook, AllcPanel, ?ALLOC_STR, []),
+
%% App Viewer Panel
AppPanel = observer_app_wx:start_link(Notebook, self()),
wxNotebook:addPage(Notebook, AppPanel, "Applications", []),
@@ -182,6 +190,7 @@ setup(#state{frame = Frame} = State) ->
trace_panel = TracePanel,
app_panel = AppPanel,
perf_panel = PerfPanel,
+ allc_panel = AllcPanel,
active_tab = SysPid,
node = node(),
nodes = Nodes
@@ -215,14 +224,17 @@ handle_event(#wx{event=#wxNotebook{type=command_notebook_page_changing}},
{noreply, State#state{active_tab=Pid}}
end;
-handle_event(#wx{event = #wxClose{}}, State) ->
- {stop, normal, State};
-
handle_event(#wx{id = ?ID_CDV, event = #wxCommand{type = command_menu_selected}}, State) ->
spawn(crashdump_viewer, start, []),
{noreply, State};
-handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}}, State) ->
+handle_event(#wx{event = #wxClose{}}, #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
+ {stop, normal, State};
+
+handle_event(#wx{id = ?wxID_EXIT, event = #wxCommand{type = command_menu_selected}},
+ #state{log=LogOn} = State) ->
+ LogOn andalso rpc:block_call(State#state.node, rb, stop, []),
{stop, normal, State};
handle_event(#wx{id = ?wxID_HELP, event = #wxCommand{type = command_menu_selected}}, State) ->
@@ -300,12 +312,42 @@ handle_event(#wx{id = ?ID_PING, event = #wxCommand{type = command_menu_selected}
end,
{noreply, UpdState};
-handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}}, State)
- when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+handle_event(#wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ #state{frame = Frame, log = PrevLog, node = Node} = State) ->
+ try
+ ok = ensure_sasl_started(Node),
+ ok = ensure_mf_h_handler_used(Node),
+ ok = ensure_rb_mode(Node, PrevLog),
+ case PrevLog of
+ false ->
+ rpc:block_call(Node, rb, start, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server started)"),
+ {noreply, State#state{log=true}};
+ true ->
+ rpc:block_call(Node, rb, stop, []),
+ set_status("Observer - " ++ atom_to_list(Node) ++ " (rb_server stopped)"),
+ {noreply, State#state{log=false}}
+ end
+ catch
+ throw:Reason ->
+ create_txt_dialog(Frame, Reason, "Log view status", ?wxICON_ERROR),
+ {noreply, State}
+ end;
- Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, State#state.nodes),
- UpdState = change_node_view(Node, State),
- {noreply, UpdState};
+handle_event(#wx{id = Id, event = #wxCommand{type = command_menu_selected}},
+ #state{nodes= Ns , node = PrevNode, log = PrevLog} = State)
+ when Id > ?FIRST_NODES_MENU_ID, Id < ?LAST_NODES_MENU_ID ->
+ Node = lists:nth(Id - ?FIRST_NODES_MENU_ID, Ns),
+ %% Close rb_server only if another node than current one selected
+ LState = case PrevLog of
+ true -> case Node == PrevNode of
+ false -> rpc:block_call(PrevNode, rb, stop, []),
+ State#state{log=false} ;
+ true -> State
+ end;
+ false -> State
+ end,
+ {noreply, change_node_view(Node, LState)};
handle_event(Event, State) ->
Pid = get_active_pid(State),
@@ -340,6 +382,9 @@ handle_call(stop, _, State = #state{frame = Frame}) ->
wxFrame:destroy(Frame),
{stop, normal, ok, State};
+handle_call(log_status, _From, State) ->
+ {reply, State#state.log, State};
+
handle_call(_Msg, _From, State) ->
{reply, ok, State}.
@@ -422,8 +467,7 @@ return_to_localnode(Frame, Node) ->
end.
create_txt_dialog(Frame, Msg, Title, Style) ->
- MD = wxMessageDialog:new(Frame, Msg, [{style, Style}]),
- wxMessageDialog:setTitle(MD, Title),
+ MD = wxMessageDialog:new(Frame, Msg, [{style, Style}, {caption,Title}]),
wxDialog:showModal(MD),
wxDialog:destroy(MD).
@@ -468,7 +512,7 @@ check_page_title(Notebook) ->
get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys,
tv_panel=Tv, trace_panel=Trace, app_panel=App,
- perf_panel=Perf
+ perf_panel=Perf, allc_panel=Alloc
}) ->
Panel = case check_page_title(Notebook) of
"Processes" -> Pro;
@@ -476,13 +520,14 @@ get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys,
"Table Viewer" -> Tv;
?TRACE_STR -> Trace;
"Load Charts" -> Perf;
- "Applications" -> App
+ "Applications" -> App;
+ ?ALLOC_STR -> Alloc
end,
wx_object:get_pid(Panel).
pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys,
tv_panel=Tv, trace_panel=Trace, app_panel=App,
- perf_panel=Perf}) ->
+ perf_panel=Perf, allc_panel=Alloc}) ->
case Pid of
Pro -> "Processes";
Sys -> "System";
@@ -490,6 +535,7 @@ pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys,
Trace -> ?TRACE_STR;
Perf -> "Load Charts";
App -> "Applications";
+ Alloc -> ?ALLOC_STR;
_ -> "unknown"
end.
@@ -569,17 +615,19 @@ default_menus(NodesMenuItems) ->
false -> {"Nodes", NodesMenuItems ++
[#create_menu{id = ?ID_CONNECT, text = "Enable distribution"}]}
end,
+ LogMenu = {"Log", [#create_menu{id = ?ID_LOGVIEW, text = "Toggle log view"}]},
case os:type() =:= {unix, darwin} of
false ->
FileMenu = {"File", [CDV, Quit]},
HelpMenu = {"Help", [About,Help]},
- [FileMenu, NodeMenu, HelpMenu];
+ [FileMenu, NodeMenu, LogMenu, HelpMenu];
true ->
%% On Mac quit and about will be moved to the "default' place
%% automagicly, so just add them to a menu that always exist.
%% But not to the help menu for some reason
+
{Tag, Menus} = FileMenu,
- [{Tag, Menus ++ [About]}, NodeMenu, {"&Help", [Help]}]
+ [{Tag, Menus ++ [Quit,About]}, NodeMenu, LogMenu, {"&Help", [Help]}]
end.
clean_menus(Menus, MenuBar) ->
@@ -658,3 +706,59 @@ update_node_list(State = #state{menubar=MenuBar}) ->
end,
observer_lib:create_menu_item(Dist, NodeMenu, Index),
State#state{nodes = Nodes}.
+
+ensure_sasl_started(Node) ->
+ %% is sasl started ?
+ Apps = rpc:block_call(Node, application, which_applications, []),
+ case lists:keyfind(sasl, 1, Apps) of
+ false -> throw("Error: sasl application not started."),
+ error;
+ {sasl, _, _} -> ok
+ end.
+
+ensure_mf_h_handler_used(Node) ->
+ %% is log_mf_h used ?
+ Handlers = rpc:block_call(Node, gen_event, which_handlers, [error_logger]),
+ case lists:any(fun(L)-> L == log_mf_h end, Handlers) of
+ false -> throw("Error: log_mf_h handler not used in sasl."),
+ error;
+ true -> ok
+ end.
+
+ensure_rb_mode(Node, PrevLog) ->
+ ok = ensure_rb_module_loaded(Node),
+ ok = is_rb_compatible(Node),
+ ok = is_rb_server_running(Node, PrevLog),
+ ok.
+
+
+ensure_rb_module_loaded(Node) ->
+ %% Need to ensure that module is loaded in order to detect exported
+ %% functions on interactive nodes
+ case rpc:block_call(Node, code, ensure_loaded, [rb]) of
+ {badrpc, Reason} ->
+ throw("Error: badrpc - " ++ io_lib:format("~tp",[Reason]));
+ {error, Reason} ->
+ throw("Error: rb module load error - " ++ io_lib:format("~tp",[Reason]));
+ {module,rb} ->
+ ok
+ end.
+
+is_rb_compatible(Node) ->
+ %% Simply test that rb:log_list/0 is exported
+ case rpc:block_call(Node, erlang, function_exported, [rb, log_list, 0]) of
+ false -> throw("Error: Node's Erlang release must be at least R16B02.");
+ true -> ok
+ end.
+
+is_rb_server_running(Node, LogState) ->
+ %% If already started, somebody else may use it.
+ %% We can not use it too, as far log file would be overriden. Not fair.
+ case rpc:block_call(Node, erlang, whereis, [rb_server]) of
+ Pid when is_pid(Pid), (LogState == false) ->
+ throw("Error: rb_server is already started and maybe used by someone.");
+ Pid when is_pid(Pid) ->
+ ok;
+ undefined ->
+ ok
+ end.
diff --git a/lib/observer/src/ttb.erl b/lib/observer/src/ttb.erl
index 61fd6d1787..a2db40aa2f 100644
--- a/lib/observer/src/ttb.erl
+++ b/lib/observer/src/ttb.erl
@@ -849,7 +849,7 @@ get_nodes() ->
receive {?MODULE,Nodes} -> Nodes end.
ts() ->
- {{Y,M,D},{H,Min,S}} = calendar:now_to_local_time(now()),
+ {{Y,M,D},{H,Min,S}} = calendar:now_to_local_time(erlang:timestamp()),
io_lib:format("-~4.4.0w~2.2.0w~2.2.0w-~2.2.0w~2.2.0w~2.2.0w",
[Y,M,D,H,Min,S]).
diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl
index 5cf719acb1..c69fdf4bdf 100644
--- a/lib/observer/test/observer_SUITE.erl
+++ b/lib/observer/test/observer_SUITE.erl
@@ -22,6 +22,8 @@
-include_lib("wx/include/wx.hrl").
-include_lib("observer/src/observer_tv.hrl").
+-define(ID_LOGVIEW, 5).
+
%% Test server specific exports
-export([all/0, suite/0,groups/0]).
-export([init_per_testcase/2, end_per_testcase/2,
@@ -44,8 +46,9 @@ all() ->
groups() ->
[{gui, [],
- [basic
- , process_win, table_win
+ [basic,
+ process_win,
+ table_win
]
}].
@@ -107,7 +110,7 @@ appup_file(Config) when is_list(Config) ->
basic(suite) -> [];
basic(doc) -> [""];
basic(Config) when is_list(Config) ->
- timer:send_after(100, "foobar"), %% Otherwise the timer sever gets added to procs
+ timer:send_after(100, "foobar"), %% Otherwise the timer server gets added to procs
ProcsBefore = processes(),
NumProcsBefore = length(ProcsBefore),
@@ -126,7 +129,7 @@ basic(Config) when is_list(Config) ->
timer:sleep(200),
ok = wxNotebook:advanceSelection(Notebook)
end,
- %% Just verify that we can toogle trough all pages
+ %% Just verify that we can toggle through all pages
[_|_] = [Check(N, false) || N <- lists:seq(1, Count)],
%% Cause it to resize
Frame = get_top_level_parent(Notebook),
@@ -214,10 +217,27 @@ test_page(Title, Window) ->
process_win(suite) -> [];
process_win(doc) -> [""];
process_win(Config) when is_list(Config) ->
+ % Stop SASL if already started
+ SaslStart = case whereis(sasl_sup) of
+ undefined -> false;
+ _ -> application:stop(sasl),
+ true
+ end,
+ % Define custom sasl and log_mf_h app vars
+ Privdir=?config(priv_dir,Config),
+ application:set_env(sasl, sasl_error_logger, tty),
+ application:set_env(sasl, error_logger_mf_dir, Privdir),
+ application:set_env(sasl, error_logger_mf_maxbytes, 1000),
+ application:set_env(sasl, error_logger_mf_maxfiles, 5),
+ application:start(sasl),
ok = observer:start(),
ObserverNB = setup_whitebox_testing(),
Parent = get_top_level_parent(ObserverNB),
- Frame = observer_procinfo:start(self(), Parent, self()),
+ % Activate log view
+ whereis(observer) ! #wx{id = ?ID_LOGVIEW, event = #wxCommand{type = command_menu_selected}},
+ timer:sleep(1000),
+ % Process window tests (use sasl_sup for a non empty Log tab)
+ Frame = observer_procinfo:start(whereis(sasl_sup), Parent, self()),
PIPid = wx_object:get_pid(Frame),
PIPid ! {get_debug_info, self()},
Notebook = receive {procinfo_debug, NB} -> NB end,
@@ -229,6 +249,11 @@ process_win(Config) when is_list(Config) ->
[_|_] = [Check(N) || N <- lists:seq(1, Count)],
PIPid ! #wx{event=#wxClose{type=close_window}},
observer:stop(),
+ application:stop(sasl),
+ case SaslStart of
+ true -> application:start(sasl);
+ false -> ok
+ end,
ok.
table_win(suite) -> [];
diff --git a/lib/observer/vsn.mk b/lib/observer/vsn.mk
index c8a6023b4f..7e7e32099b 100644
--- a/lib/observer/vsn.mk
+++ b/lib/observer/vsn.mk
@@ -1 +1 @@
-OBSERVER_VSN = 2.0.3
+OBSERVER_VSN = 2.1
diff --git a/lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_impl.erl b/lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_impl.erl
index 84db0b89f8..654a8f4385 100644
--- a/lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_impl.erl
+++ b/lib/orber/COSS/CosNaming/CosNaming_NamingContextExt_impl.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -533,7 +533,9 @@ unbind(_OE_THIS, _OE_State, []) ->
%% Returns :
%%----------------------------------------------------------------------
new_context(_OE_THIS, OE_State) ->
- DBKey = term_to_binary({now(), node()}),
+ DBKey = term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}),
%% Create a record in the table and set the key to a newly
{reply,
'CosNaming_NamingContextExt':oe_create(DBKey,
@@ -547,7 +549,9 @@ new_context(_OE_THIS, OE_State) ->
%% Returns :
%%----------------------------------------------------------------------
bind_new_context(OE_THIS, OE_State, N) ->
- DBKey = term_to_binary({now(), node()}),
+ DBKey = term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}),
%% Create a record in the table and set the key to a newly
%% generated objectkey.
%%?PRINTDEBUG("bind_new_context"),
diff --git a/lib/orber/src/cdr_decode.erl b/lib/orber/src/cdr_decode.erl
index 36ef6ce02f..00dcf01c56 100644
--- a/lib/orber/src/cdr_decode.erl
+++ b/lib/orber/src/cdr_decode.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2010. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -193,7 +193,7 @@ dec_message_header(TypeCodes, Message, Bytes) ->
%% Args:
%% The message as a byte sequence.
%% Returns:
-%% A tuple {Endianess, Rest} where Endianess is big or little.
+%% A tuple {Endianness, Rest} where Endianness is big or little.
%% Rest is the remaining message byte sequence.
%%-----------------------------------------------------------------
dec_byte_order(<<0:8,T/binary>>) ->
@@ -206,7 +206,7 @@ dec_byte_order(<<1:8,T/binary>>) ->
%% Args:
%% The message as a byte sequence.
%% Returns:
-%% A tuple {Endianess, Rest} where Endianess is big or little.
+%% A tuple {Endianness, Rest} where Endianness is big or little.
%% Rest is the remaining message byte sequence.
%%-----------------------------------------------------------------
dec_byte_order_list([0|T]) ->
@@ -1110,10 +1110,8 @@ ifrid_to_name(Id, Type) ->
[?LINE, Id, Type], ?DEBUG_LEVEL),
corba:raise(#'MARSHAL'{completion_status=?COMPLETED_MAYBE});
Nodes ->
- {A,B,C} = now(),
- random:seed(A,B,C),
L = length(Nodes),
- IFR = get_ifr_node(Nodes, random:uniform(L), L),
+ IFR = get_ifr_node(Nodes, rand:uniform(L), L),
list_to_atom('OrberApp_IFR':get_absolute_name(IFR, Id))
end;
{'EXIT', Other} ->
@@ -1176,7 +1174,7 @@ get_ifr_node(Nodes, N, L) ->
_ ->
%% Not able to commincate with the node. Try next one.
NewL = L-1,
- get_ifr_node(lists:delete(Node, Nodes), random:uniform(NewL), NewL)
+ get_ifr_node(lists:delete(Node, Nodes), rand:uniform(NewL), NewL)
end.
@@ -1260,10 +1258,8 @@ get_user_exception_type(TypeId) ->
completion_status=?COMPLETED_MAYBE})
end;
Nodes ->
- {A,B,C} = now(),
- random:seed(A,B,C),
L = length(Nodes),
- IFR = get_ifr_node(Nodes, random:uniform(L), L),
+ IFR = get_ifr_node(Nodes, rand:uniform(L), L),
'OrberApp_IFR':get_user_exception_type(IFR, TypeId)
end
end.
diff --git a/lib/orber/src/corba.erl b/lib/orber/src/corba.erl
index 586a02d540..f0eeb18c24 100644
--- a/lib/orber/src/corba.erl
+++ b/lib/orber/src/corba.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1922,7 +1922,9 @@ mk_passive_objkey(Mod, Module, Flags) ->
{Mod, 'passive', Module, term_to_binary(undefined), 0, Flags}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
objkey_to_string({_Mod, 'registered', 'orber_init', _UserDef, _OrberDef, _Flags}) ->
"INIT";
diff --git a/lib/orber/src/orber.app.src b/lib/orber/src/orber.app.src
index 30bd90347d..217c1b247f 100644
--- a/lib/orber/src/orber.app.src
+++ b/lib/orber/src/orber.app.src
@@ -104,8 +104,8 @@
{applications, [stdlib, kernel, mnesia]},
{env, []},
{mod, {orber, []}},
- {runtime_dependencies, ["stdlib-2.0","ssl-5.3.4","mnesia-4.12","kernel-3.0",
- "inets-5.10","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.5","ssl-5.3.4","mnesia-4.12","kernel-3.0",
+ "inets-5.10","erts-7.0"]}
]}.
diff --git a/lib/orber/src/orber_ifr_utils.erl b/lib/orber/src/orber_ifr_utils.erl
index 11e3d1cd3b..35c891ef6e 100644
--- a/lib/orber/src/orber_ifr_utils.erl
+++ b/lib/orber/src/orber_ifr_utils.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -289,10 +289,9 @@ makeref(Obj) ->
%%% unique tag. I do this because the tuple generated takes a lot of space
%%% when I dump the database. A binary is simply printed as #Bin, which
%%% is much less obtrusive.
-%%% The code has been moved to a macro defined in orber_ifr.hrl, so we
-%%% can use a simpler uniqification code when debugging.
-unique() -> term_to_binary({node(), now()}).
+unique() -> term_to_binary({node(), {erlang:system_time(),
+ erlang:unique_integer()}}).
%%%----------------------------------------------------------------------
%%% Check for an existing object with the Id of the object which is
diff --git a/lib/orber/src/orber_objectkeys.erl b/lib/orber/src/orber_objectkeys.erl
index b0e759187b..f57b1d811f 100644
--- a/lib/orber/src/orber_objectkeys.erl
+++ b/lib/orber/src/orber_objectkeys.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -374,11 +374,11 @@ handle_call({register, Objkey, Pid, Type}, _From, State) ->
%% No key exists. Ok to register.
mnesia:write(#orber_objkeys{object_key=Objkey, pid=Pid,
persistent=Type,
- timestamp=now()});
+ timestamp=erlang:monotonic_time(seconds)});
[X] when X#orber_objkeys.persistent==true,
X#orber_objkeys.pid == dead ->
%% A persistent object is being restarted. Update Pid & time.
- mnesia:write(X#orber_objkeys{pid=Pid, timestamp=now()});
+ mnesia:write(X#orber_objkeys{pid=Pid, timestamp=erlang:monotonic_time(seconds)});
[X] when is_pid(X#orber_objkeys.pid) ->
%% Object exists, i.e., trying to create an object with
%% the same name.
@@ -477,7 +477,7 @@ handle_info({'EXIT', Pid, Reason}, State) when is_pid(Pid) ->
Reason /= normal andalso
Reason /= shutdown ->
mnesia:write(X#orber_objkeys{pid=dead,
- timestamp=now()});
+ timestamp=erlang:monotonic_time(seconds)});
[X] when X#orber_objkeys.persistent==true ->
mnesia:delete({orber_objkeys, X#orber_objkeys.object_key});
_->
@@ -503,8 +503,8 @@ code_change(_OldVsn, State, _Extra) ->
%% Internal Functions
%%-----------------------------------------------------------------
-timetest(S, {MeSec, Sec, USec}) ->
- {MeSec, Sec+S, USec} < now().
+timetest(S, TimeStamp) ->
+ TimeStamp+S < erlang:monotonic_time(seconds).
get_key_from_pid(Pid) ->
case mnesia:dirty_match_object({orber_objkeys, '_', Pid,'_','_'}) of
diff --git a/lib/orber/src/orber_socket.erl b/lib/orber/src/orber_socket.erl
index 4507d90cce..4567728693 100644
--- a/lib/orber/src/orber_socket.erl
+++ b/lib/orber/src/orber_socket.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -167,8 +167,6 @@ multi_connect([CurrentPort|Rest], Retries, ssl, Host, Port, Options, Timeout) ->
get_port_sequence(Min, Max) ->
case orber_env:iiop_out_ports_random() of
true ->
- {A1,A2,A3} = now(),
- random:seed(A1, A2, A3),
Seq = lists:seq(Min, Max),
random_sequence((Max - Min) + 1, Seq, []);
_ ->
@@ -178,7 +176,7 @@ get_port_sequence(Min, Max) ->
random_sequence(0, _, Acc) ->
Acc;
random_sequence(Length, Seq, Acc) ->
- Nth = random:uniform(Length),
+ Nth = rand:uniform(Length),
Value = lists:nth(Nth, Seq),
NewSeq = lists:delete(Value, Seq),
random_sequence(Length-1, NewSeq, [Value|Acc]).
diff --git a/lib/orber/src/orber_web_server.erl b/lib/orber/src/orber_web_server.erl
index 9d2a063a69..1cda862f1b 100644
--- a/lib/orber/src/orber_web_server.erl
+++ b/lib/orber/src/orber_web_server.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,7 +46,7 @@
-define(DEBUG_LEVEL, 5).
--record(state, {ts}).
+-record(state, {}).
-include("ifr_objects.hrl").
%%----------------------------------------------------------------------
@@ -133,9 +133,7 @@ delete_obj(Env, Input) ->
%% Description:
%%----------------------------------------------------------------------
init(_Arg)->
- {M, S, U} = now(),
- TS = M*1000000000000 + S*1000000 + U,
- {ok, #state{ts = TS}}.
+ {ok, #state{}}.
terminate(_,_State)->
ok.
diff --git a/lib/orber/test/cdrcoding_10_SUITE.erl b/lib/orber/test/cdrcoding_10_SUITE.erl
index 54ad92cf7e..d8e57f74e5 100644
--- a/lib/orber/test/cdrcoding_10_SUITE.erl
+++ b/lib/orber/test/cdrcoding_10_SUITE.erl
@@ -622,4 +622,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
term_to_binary(undefined), term_to_binary(undefined)}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/cdrcoding_11_SUITE.erl b/lib/orber/test/cdrcoding_11_SUITE.erl
index 29b3e33069..bcd2b70446 100644
--- a/lib/orber/test/cdrcoding_11_SUITE.erl
+++ b/lib/orber/test/cdrcoding_11_SUITE.erl
@@ -621,4 +621,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
term_to_binary(undefined), term_to_binary(undefined)}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/cdrcoding_12_SUITE.erl b/lib/orber/test/cdrcoding_12_SUITE.erl
index dd9b98434d..a58688b654 100644
--- a/lib/orber/test/cdrcoding_12_SUITE.erl
+++ b/lib/orber/test/cdrcoding_12_SUITE.erl
@@ -609,4 +609,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
term_to_binary(undefined), term_to_binary(undefined)}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/iop_ior_10_SUITE.erl b/lib/orber/test/iop_ior_10_SUITE.erl
index 58dd1b5dba..be3daf6198 100644
--- a/lib/orber/test/iop_ior_10_SUITE.erl
+++ b/lib/orber/test/iop_ior_10_SUITE.erl
@@ -182,4 +182,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/iop_ior_11_SUITE.erl b/lib/orber/test/iop_ior_11_SUITE.erl
index 24b2f66357..4c4dd4effa 100644
--- a/lib/orber/test/iop_ior_11_SUITE.erl
+++ b/lib/orber/test/iop_ior_11_SUITE.erl
@@ -201,4 +201,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
{Id, 'registered', RegName, term_to_binary(undefined), 0, 0}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/iop_ior_12_SUITE.erl b/lib/orber/test/iop_ior_12_SUITE.erl
index 4c6e9ddb91..9f50784666 100644
--- a/lib/orber/test/iop_ior_12_SUITE.erl
+++ b/lib/orber/test/iop_ior_12_SUITE.erl
@@ -202,4 +202,6 @@ corba_fake_mk_objkey(Id, 'registered', RegName) when is_atom(RegName) ->
{Id, 'registered', RegName, term_to_binary(undefined), 0, 0}.
make_objkey() ->
- term_to_binary({now(), node()}).
+ term_to_binary({{erlang:system_time(),
+ erlang:unique_integer()},
+ node()}).
diff --git a/lib/orber/test/multi_ORB_SUITE.erl b/lib/orber/test/multi_ORB_SUITE.erl
index 40d8846e0f..3d0132c3e6 100644
--- a/lib/orber/test/multi_ORB_SUITE.erl
+++ b/lib/orber/test/multi_ORB_SUITE.erl
@@ -922,9 +922,9 @@ max_requests(Node, Host, Port) ->
spawn(orber_test_server, pseudo_call_delay, [Obj, 15000]),
%% Wait for a second to be sure that the previous request has been sent
timer:sleep(1000),
- {MegaSecsB, Before, _} = now(),
+ {MegaSecsB, Before, _} = erlang:timestamp(),
pseudo_calls(5, Obj),
- {MegaSecsA, After, _} = now(),
+ {MegaSecsA, After, _} = erlang:timestamp(),
%% Normally we we can perform hundreds of pseudo-calls per second. Hence,
%% if we add 8 seconds to 'Before' it should still be less since we only
%% allow one request at a time to the target ORB.
diff --git a/lib/orber/test/orber_acl_SUITE.erl b/lib/orber/test/orber_acl_SUITE.erl
index ab2c2c872c..05146afded 100644
--- a/lib/orber/test/orber_acl_SUITE.erl
+++ b/lib/orber/test/orber_acl_SUITE.erl
@@ -272,21 +272,21 @@ ipv6_bm(_) ->
bm2(Filters, Family, Ip) ->
{ok, IPTuple} = inet:getaddr(Ip, Family),
orber_acl:init_acl(Filters, Family),
- TimeBefore1 = erlang:now(),
+ TimeBefore1 = erlang:timestamp(),
bm_loop(IPTuple, ?NO_OF_TIMES),
- TimeAfter1 = erlang:now(),
+ TimeAfter1 = erlang:timestamp(),
orber_acl:clear_acl(),
Time1 = computeTime(TimeBefore1, TimeAfter1),
orber_acl:init_acl(Filters, Family),
- TimeBefore2 = erlang:now(),
+ TimeBefore2 = erlang:timestamp(),
bm_loop2(Ip, ?NO_OF_TIMES, Family),
- TimeAfter2 = erlang:now(),
+ TimeAfter2 = erlang:timestamp(),
orber_acl:clear_acl(),
Time2 = computeTime(TimeBefore2, TimeAfter2),
orber_acl:init_acl(Filters, Family),
- TimeBefore3 = erlang:now(),
+ TimeBefore3 = erlang:timestamp(),
bm_loop2(IPTuple, ?NO_OF_TIMES, Family),
- TimeAfter3 = erlang:now(),
+ TimeAfter3 = erlang:timestamp(),
orber_acl:clear_acl(),
Time3 = computeTime(TimeBefore3, TimeAfter3),
{ok, round(?NO_OF_TIMES/Time1), round(?NO_OF_TIMES/Time2), round(?NO_OF_TIMES/Time3)}.
diff --git a/lib/orber/test/orber_test_lib.erl b/lib/orber/test/orber_test_lib.erl
index 46ed26f210..c970600fce 100644
--- a/lib/orber/test/orber_test_lib.erl
+++ b/lib/orber/test/orber_test_lib.erl
@@ -220,7 +220,7 @@ js_node(InitOptions) when is_list(InitOptions) ->
js_node(InitOptions, []).
js_node(InitOptions, StartOptions) when is_list(InitOptions) ->
- {A,B,C} = erlang:now(),
+ {A,B,C} = erlang:timestamp(),
[_, Host] = string:tokens(atom_to_list(node()), [$@]),
_NewInitOptions = check_options(InitOptions),
js_node_helper(Host, 0, lists:concat([A,'_',B,'_',C]),
diff --git a/lib/orber/test/orber_test_server_impl.erl b/lib/orber/test/orber_test_server_impl.erl
index 10a9caf242..9aa12e98fb 100644
--- a/lib/orber/test/orber_test_server_impl.erl
+++ b/lib/orber/test/orber_test_server_impl.erl
@@ -243,22 +243,22 @@ relay_cast(_Self, State, Target) ->
%% Testing pseudo calls.
pseudo_call(_Self, State) ->
- io:format("orber_test_server:pseudo_call( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_call( ~p )~n", [erlang:timestamp()]),
{reply, ok, State}.
pseudo_cast(_Self, State) ->
- io:format("orber_test_server:pseudo_cast( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_cast( ~p )~n", [erlang:timestamp()]),
{noreply, State}.
pseudo_call_delay(_Self, State, Time) ->
- io:format("orber_test_server:pseudo_call_delay( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_call_delay( ~p )~n", [erlang:timestamp()]),
timer:sleep(Time),
- io:format("orber_test_server:pseudo_call_delay( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_call_delay( ~p )~n", [erlang:timestamp()]),
{reply, {ok, Time}, State}.
pseudo_cast_delay(_Self, State, Time) ->
- io:format("orber_test_server:pseudo_cast_delay( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_cast_delay( ~p )~n", [erlang:timestamp()]),
timer:sleep(Time),
- io:format("orber_test_server:pseudo_cast_delay( ~p )~n", [now()]),
+ io:format("orber_test_server:pseudo_cast_delay( ~p )~n", [erlang:timestamp()]),
{noreply, State}.
pseudo_call_raise_exc(_Self, State, 1) ->
diff --git a/lib/orber/vsn.mk b/lib/orber/vsn.mk
index 28fe9323fb..505c77de18 100644
--- a/lib/orber/vsn.mk
+++ b/lib/orber/vsn.mk
@@ -1 +1 @@
-ORBER_VSN = 3.7.1
+ORBER_VSN = 3.8
diff --git a/lib/os_mon/c_src/cpu_sup.c b/lib/os_mon/c_src/cpu_sup.c
index e9fd75a32c..9e217db105 100644
--- a/lib/os_mon/c_src/cpu_sup.c
+++ b/lib/os_mon/c_src/cpu_sup.c
@@ -31,15 +31,28 @@
#include <unistd.h>
#include <string.h>
+#if (defined(__APPLE__) && defined(__MACH__)) || defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)
+#include <sys/param.h>
+#include <sys/sysctl.h>
+#include <limits.h>
+#include <fcntl.h>
+#endif
+#if defined(__FreeBSD__) || defined(__DragonFly__)
+#include <kvm.h>
+#include <sys/user.h>
+#endif
+
#if defined(__sun__)
#include <kstat.h>
#endif
-#include <sys/sysinfo.h>
#include <errno.h>
+#if defined(__sun__) || defined(__linux__)
+#include <sys/sysinfo.h>
+#endif
+
#if defined(__linux__)
-#include <string.h> /* strlen */
#define PROCSTAT "/proc/stat"
#define BUFFERSIZE (256)
@@ -59,6 +72,13 @@ typedef struct {
#endif
+#if defined(__FreeBSD__)
+#include <sys/resource.h>
+#include <sys/sysctl.h>
+#define CU_BSD_VALUES (6)
+#endif
+
+
#define FD_IN (0)
#define FD_OUT (1)
#define FD_ERR (2)
@@ -124,10 +144,15 @@ static void util_measure(unsigned int **result_vec, int *result_sz);
#if defined(__sun__)
static unsigned int misc_measure(char* name);
#endif
-static void send(unsigned int data);
+static void sendi(unsigned int data);
static void sendv(unsigned int data[], int ints);
static void error(char* err_msg);
+#if (defined(__APPLE__) && defined(__MACH__)) || defined(__OpenBSD__) || defined(__FreeBSD__) || defined(__DragonFly__)
+static void bsd_count_procs(void);
+static void bsd_loadavg(int);
+#endif
+
#if defined(__sun__)
static kstat_ctl_t *kstat_ctl;
#endif
@@ -138,12 +163,16 @@ static int processors_online() {
}
#endif
+#if defined(__FreeBSD__)
+void getsysctl(const char *, void *, size_t);
+#endif
+
int main(int argc, char** argv) {
char cmd;
int rc;
int sz;
unsigned int *rv;
-#if defined(__linux__)
+#if defined(__linux__) || defined(__FreeBSD__)
unsigned int no_of_cpus = 0;
#endif
@@ -156,7 +185,14 @@ int main(int argc, char** argv) {
#if defined(__linux__)
no_of_cpus = processors_online();
if ( (rv = (unsigned int*)malloc(sizeof(unsigned int)*(2 + 2*no_of_cpus*CU_VALUES))) == NULL) {
- error("cpu_cup: malloc error");
+ error("cpu_sup: malloc error");
+ }
+#endif
+
+#if defined(__FreeBSD__)
+ getsysctl("hw.ncpu", &no_of_cpus, sizeof(int));
+ if ( (rv = (unsigned int*)malloc(sizeof(unsigned int)*(2 + 2*no_of_cpus*CU_BSD_VALUES))) == NULL) {
+ error("cpu_sup: malloc error");
}
#endif
@@ -173,20 +209,104 @@ int main(int argc, char** argv) {
error("Erlang has closed");
switch(cmd) {
- case PING: send(4711); break;
+ case PING: sendi(4711); break;
#if defined(__sun__)
- case NPROCS: send(misc_measure("nproc")); break;
- case AVG1: send(misc_measure("avenrun_1min")); break;
- case AVG5: send(misc_measure("avenrun_5min")); break;
- case AVG15: send(misc_measure("avenrun_15min")); break;
+ case NPROCS: sendi(misc_measure("nproc")); break;
+ case AVG1: sendi(misc_measure("avenrun_1min")); break;
+ case AVG5: sendi(misc_measure("avenrun_5min")); break;
+ case AVG15: sendi(misc_measure("avenrun_15min")); break;
+#elif defined(__OpenBSD__) || (defined(__APPLE__) && defined(__MACH__)) || defined(__FreeBSD__) || defined(__DragonFly__)
+ case NPROCS: bsd_count_procs(); break;
+ case AVG1: bsd_loadavg(0); break;
+ case AVG5: bsd_loadavg(1); break;
+ case AVG15: bsd_loadavg(2); break;
#endif
+#if defined(__sun__) || defined(__linux__) || defined(__FreeBSD__)
case UTIL: util_measure(&rv,&sz); sendv(rv, sz); break;
+#endif
case QUIT: free((void*)rv); return 0;
default: error("Bad command"); break;
}
}
- return 0; /* supress warnings */
+ return 0; /* suppress warnings */
+}
+
+/* ---------------------------- *
+ * BSD stat functions *
+ * ---------------------------- */
+#if defined(__OpenBSD__) || (defined(__APPLE__) && defined(__MACH__)) || defined(__FreeBSD__) || defined(__DragonFly__)
+
+static void bsd_loadavg(int idx) {
+ double avgs[3];
+ if (getloadavg(avgs, 3) < 0) {
+ error(strerror(errno));
+ return;
+ }
+ sendi((unsigned int)(avgs[idx] * 256));
+}
+
+#endif
+
+#if defined(__OpenBSD__)
+
+static void bsd_count_procs(void) {
+ int err, nproc;
+ size_t len = sizeof(nproc);
+ int mib[] = { CTL_KERN, KERN_NPROCS };
+
+ err = sysctl(mib, sizeof(mib) / sizeof(mib[0]), &nproc, &len, NULL, 0);
+ if (err) {
+ error(strerror(errno));
+ return;
+ }
+
+ sendi((unsigned int)nproc);
+}
+
+#elif defined(__FreeBSD__) || defined(__DragonFly__)
+
+static void bsd_count_procs(void) {
+ kvm_t *kd;
+ struct kinfo_proc *kp;
+ char err[_POSIX2_LINE_MAX];
+ int cnt = 0;
+
+ if ((kd = kvm_open(NULL, "/dev/null", NULL, O_RDONLY, err)) == NULL) {
+ error(err);
+ return;
+ }
+
+#if defined(KERN_PROC_PROC)
+ if ((kp = kvm_getprocs(kd, KERN_PROC_PROC, 0, &cnt)) == NULL) {
+#else
+ if ((kp = kvm_getprocs(kd, KERN_PROC_ALL, 0, &cnt)) == NULL) {
+#endif
+ error(strerror(errno));
+ return;
+ }
+
+ (void)kvm_close(kd);
+ sendi((unsigned int)cnt);
}
+
+#elif (defined(__APPLE__) && defined(__MACH__))
+
+static void bsd_count_procs(void) {
+ int err;
+ size_t len = 0;
+ int mib[] = { CTL_KERN, KERN_PROC, KERN_PROC_ALL };
+
+ err = sysctl(mib, sizeof(mib) / sizeof(mib[0]), NULL, &len, NULL, 0);
+ if (err) {
+ error(strerror(errno));
+ return;
+ }
+
+ sendi((unsigned int)(len / sizeof(struct kinfo_proc)));
+}
+
+#endif
+
/* ---------------------------- *
* Linux stat functions *
* ---------------------------- */
@@ -417,10 +537,75 @@ static void util_measure(unsigned int **result_vec, int *result_sz) {
#endif
/* ---------------------------- *
+ * FreeBSD stat functions *
+ * ---------------------------- */
+
+#if defined(__FreeBSD__)
+
+#define EXIT_WITH(msg) (rich_error(msg, __FILE__, __LINE__))
+#define RICH_BUFLEN (213) /* left in error(char*) */
+
+void rich_error(const char *reason, const char *file, const int line) {
+ char buf[RICH_BUFLEN];
+ snprintf(buf, RICH_BUFLEN, "%s (%s:%i)", reason, file, line);
+ error(buf);
+}
+#undef RICH_BUFLEN
+
+static void util_measure(unsigned int **result_vec, int *result_sz) {
+ int no_of_cpus;
+ size_t size_cpu_times;
+ unsigned long *cpu_times;
+ unsigned int *rv = NULL;
+ int i;
+
+ getsysctl("hw.ncpu", &no_of_cpus, sizeof(int));
+ /* Header constant CPUSTATES = #long values per cpu. */
+ size_cpu_times = sizeof(long) * CPUSTATES * no_of_cpus;
+ cpu_times = malloc(size_cpu_times);
+ if (!cpu_times) {
+ EXIT_WITH("badalloc");
+ }
+ getsysctl("kern.cp_times", cpu_times, size_cpu_times);
+
+ rv = *result_vec;
+ rv[0] = no_of_cpus;
+ rv[1] = CU_BSD_VALUES;
+ ++rv; /* first value is number of cpus */
+ ++rv; /* second value is number of entries */
+
+ for (i = 0; i < no_of_cpus; ++i) {
+ int offset = i * CPUSTATES;
+ rv[ 0] = CU_CPU_ID; rv[ 1] = i;
+ rv[ 2] = CU_USER; rv[ 3] = cpu_times[CP_USER + offset];
+ rv[ 4] = CU_NICE_USER; rv[ 5] = cpu_times[CP_NICE + offset];
+ rv[ 6] = CU_KERNEL; rv[ 7] = cpu_times[CP_SYS + offset];
+ rv[ 8] = CU_IDLE; rv[ 9] = cpu_times[CP_IDLE + offset];
+ rv[10] = CU_HARD_IRQ; rv[11] = cpu_times[CP_INTR + offset];
+ rv += CU_BSD_VALUES*2;
+ }
+
+ *result_sz = 2 + 2*CU_BSD_VALUES * no_of_cpus;
+}
+
+void getsysctl(const char *name, void *ptr, size_t len)
+{
+ size_t gotlen = len;
+ if (sysctlbyname(name, ptr, &gotlen, NULL, 0) != 0) {
+ EXIT_WITH("sysctlbyname failed");
+ }
+ if (gotlen != len) {
+ EXIT_WITH("sysctlbyname: unexpected length");
+ }
+}
+#endif
+
+
+/* ---------------------------- *
* Generic functions *
* ---------------------------- */
-static void send(unsigned int data) { sendv(&data, 1); }
+static void sendi(unsigned int data) { sendv(&data, 1); }
static void sendv(unsigned int data[], int ints) {
static unsigned char *buf = NULL;
@@ -474,8 +659,7 @@ static void error(char* err_msg) {
buffer[i++] = '\n';
/* try to use one write only */
- if(write(FD_ERR, buffer, i));
+ if(write(FD_ERR, buffer, i))
+ ;
exit(-1);
}
-
-
diff --git a/lib/os_mon/c_src/memsup.c b/lib/os_mon/c_src/memsup.c
index 409db84aa7..5dcab07dd8 100644
--- a/lib/os_mon/c_src/memsup.c
+++ b/lib/os_mon/c_src/memsup.c
@@ -104,7 +104,7 @@
#if !defined (__OpenBSD__) && !defined (__NetBSD__)
#include <vm/vm_param.h>
#endif
-#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__)
+#if defined (__FreeBSD__) || defined(__DragonFly__) || defined (__NetBSD__) || defined(__OpenBSD__)
#include <sys/vmmeter.h>
#endif
#endif
diff --git a/lib/os_mon/doc/src/cpu_sup.xml b/lib/os_mon/doc/src/cpu_sup.xml
index 59da876208..4a8f5bffa0 100644
--- a/lib/os_mon/doc/src/cpu_sup.xml
+++ b/lib/os_mon/doc/src/cpu_sup.xml
@@ -34,7 +34,7 @@
and CPU utilization. It is part of the OS_Mon application, see
<seealso marker="os_mon_app">os_mon(6)</seealso>. Available for Unix,
although CPU utilization values (<c>util/0,1</c>) are only
- available for Solaris and Linux.</p>
+ available for Solaris, Linux and FreeBSD.</p>
<p>The load values are proportional to how long time a runnable
Unix process has to spend in the run queue before it is scheduled.
Accordingly, higher values mean more system load. The returned
diff --git a/lib/os_mon/doc/src/notes.xml b/lib/os_mon/doc/src/notes.xml
index 6bc0cf7d43..d3acc1effc 100644
--- a/lib/os_mon/doc/src/notes.xml
+++ b/lib/os_mon/doc/src/notes.xml
@@ -30,6 +30,34 @@
</header>
<p>This document describes the changes made to the OS_Mon application.</p>
+<section><title>Os_Mon 2.3.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Do not crash with badmatch when integer part of loadavg
+ has more than 2 digits.</p>
+ <p>
+ Own Id: OTP-12581</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Fix compilation of memsup on OpenBSD.</p>
+ <p>
+ Own Id: OTP-12404</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Os_Mon 2.3</title>
<section><title>Improvements and New Features</title>
diff --git a/lib/os_mon/src/cpu_sup.erl b/lib/os_mon/src/cpu_sup.erl
index 1f088ecbde..d8cfd845bc 100644
--- a/lib/os_mon/src/cpu_sup.erl
+++ b/lib/os_mon/src/cpu_sup.erl
@@ -121,7 +121,7 @@ util(Args) when is_list (Args) ->
util(_) ->
erlang:error(badarg).
--spec util() -> float().
+-spec util() -> float() | {'error', any()}.
util() ->
case util([]) of
@@ -160,7 +160,8 @@ handle_call(?quit, _From, State) ->
handle_call({?util, D, PC}, {Client, _Tag},
#state{os_type = {unix, Flavor}} = State)
when Flavor == sunos;
- Flavor == linux ->
+ Flavor == linux;
+ Flavor == freebsd ->
case measurement_server_call(State#state.server, {?util, D, PC, Client}) of
{error, Reason} ->
{ reply,
@@ -217,11 +218,9 @@ code_change(_OldVsn, State, _Extra) ->
%% internal functions
%%----------------------------------------------------------------------
-get_uint32_measurement(Request, #internal{port = P, os_type = {unix, sunos}}) ->
- port_server_call(P, Request);
get_uint32_measurement(Request, #internal{os_type = {unix, linux}}) ->
{ok,F} = file:open("/proc/loadavg",[read,raw]),
- {ok,D} = file:read(F,24),
+ {ok,D} = file:read_line(F),
ok = file:close(F),
{ok,[Load1,Load5,Load15,_PRun,PTotal],_} = io_lib:fread("~f ~f ~f ~d/~d", D),
case Request of
@@ -231,67 +230,13 @@ get_uint32_measurement(Request, #internal{os_type = {unix, linux}}) ->
?ping -> 4711;
?nprocs -> PTotal
end;
-get_uint32_measurement(Request, #internal{os_type = {unix, freebsd}}) ->
- D = os:cmd("/sbin/sysctl -n vm.loadavg") -- "\n",
- {ok,[Load1,Load5,Load15],_} = io_lib:fread("{ ~f ~f ~f }", D),
- %% We could count the lines from the ps command as well
- case Request of
- ?avg1 -> sunify(Load1);
- ?avg5 -> sunify(Load5);
- ?avg15 -> sunify(Load15);
- ?ping -> 4711;
- ?nprocs ->
- Ps = os:cmd("/bin/ps -ax | /usr/bin/wc -l"),
- {ok, [N], _} = io_lib:fread("~d", Ps),
- N-1
- end;
-get_uint32_measurement(Request, #internal{os_type = {unix, dragonfly}}) ->
- D = os:cmd("/sbin/sysctl -n vm.loadavg") -- "\n",
- {ok,[Load1,Load5,Load15],_} = io_lib:fread("{ ~f ~f ~f }", D),
- %% We could count the lines from the ps command as well
- case Request of
- ?avg1 -> sunify(Load1);
- ?avg5 -> sunify(Load5);
- ?avg15 -> sunify(Load15);
- ?ping -> 4711;
- ?nprocs ->
- Ps = os:cmd("/bin/ps -ax | /usr/bin/wc -l"),
- {ok, [N], _} = io_lib:fread("~d", Ps),
- N-1
- end;
-get_uint32_measurement(Request, #internal{os_type = {unix, openbsd}}) ->
- D = os:cmd("/sbin/sysctl -n vm.loadavg") -- "\n",
- {ok, [L1, L5, L15], _} = io_lib:fread("~f ~f ~f", D),
- case Request of
- ?avg1 -> sunify(L1);
- ?avg5 -> sunify(L5);
- ?avg15 -> sunify(L15);
- ?ping -> 4711;
- ?nprocs ->
- Ps = os:cmd("/bin/ps -ax | /usr/bin/wc -l"),
- {ok, [N], _} = io_lib:fread("~d", Ps),
- N-1
- end;
-get_uint32_measurement(Request, #internal{os_type = {unix, darwin}}) ->
- %% Get the load average using uptime, overriding Locale setting.
- D = os:cmd("LANG=C LC_ALL=C uptime") -- "\n",
- %% Here is a sample uptime string from Mac OS 10.3.8 (C Locale):
- %% "11:17 up 12 days, 20:39, 2 users, load averages: 1.07 0.95 0.66"
- %% The safest way to extract the load averages seems to be grab everything
- %% after the last colon and then do an fread on that.
- Avg = lists:reverse(hd(string:tokens(lists:reverse(D), ":"))),
- {ok,[L1,L5,L15],_} = io_lib:fread("~f ~f ~f", Avg),
-
- case Request of
- ?avg1 -> sunify(L1);
- ?avg5 -> sunify(L5);
- ?avg15 -> sunify(L15);
- ?ping -> 4711;
- ?nprocs ->
- Ps = os:cmd("/bin/ps -ax | /usr/bin/wc -l"),
- {ok, [N], _} = io_lib:fread("~d", Ps),
- N-1
- end;
+get_uint32_measurement(Request, #internal{port = P, os_type = {unix, Sys}}) when
+ Sys == sunos;
+ Sys == dragonfly;
+ Sys == openbsd;
+ Sys == freebsd;
+ Sys == darwin ->
+ port_server_call(P, Request);
get_uint32_measurement(Request, #internal{os_type = {unix, Sys}}) when Sys == irix64;
Sys == irix ->
%% Get the load average using uptime.
@@ -541,14 +486,16 @@ measurement_server_init() ->
process_flag(trap_exit, true),
OS = os:type(),
Server = case OS of
- {unix, Flavor} when Flavor==sunos;
- Flavor==linux ->
- {ok, Pid} = port_server_start_link(),
- Pid;
- {unix, Flavor} when Flavor==darwin;
+ {unix, Flavor} when
+ Flavor==sunos;
+ Flavor==linux;
+ Flavor==darwin;
Flavor==freebsd;
Flavor==dragonfly;
- Flavor==openbsd;
+ Flavor==openbsd ->
+ {ok, Pid} = port_server_start_link(),
+ Pid;
+ {unix, Flavor} when
Flavor==irix64;
Flavor==irix ->
not_used;
diff --git a/lib/os_mon/test/cpu_sup_SUITE.erl b/lib/os_mon/test/cpu_sup_SUITE.erl
index 9f58e043db..7da819379c 100644
--- a/lib/os_mon/test/cpu_sup_SUITE.erl
+++ b/lib/os_mon/test/cpu_sup_SUITE.erl
@@ -64,6 +64,8 @@ all() ->
[load_api, util_api, util_values, port, unavailable];
{unix, linux} ->
[load_api, util_api, util_values, port, unavailable];
+ {unix, freebsd} ->
+ [load_api, util_api, util_values, port, unavailable];
{unix, _OSname} -> [load_api];
_OS -> [unavailable]
end.
diff --git a/lib/os_mon/vsn.mk b/lib/os_mon/vsn.mk
index f90cc306f0..7f2667e40a 100644
--- a/lib/os_mon/vsn.mk
+++ b/lib/os_mon/vsn.mk
@@ -1 +1 @@
-OS_MON_VSN = 2.3
+OS_MON_VSN = 2.4
diff --git a/lib/parsetools/include/leexinc.hrl b/lib/parsetools/include/leexinc.hrl
index 938aef58f9..2657fdcfaa 100644
--- a/lib/parsetools/include/leexinc.hrl
+++ b/lib/parsetools/include/leexinc.hrl
@@ -44,6 +44,8 @@ string(Ics0, L0, Tcs, Ts) ->
%% Test for and remove the end token wrapper. Push back characters
%% are prepended to RestChars.
+-dialyzer({nowarn_function, string_cont/4}).
+
string_cont(Rest, Line, {token,T}, Ts) ->
string(Rest, Line, Rest, [T|Ts]);
string_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -113,6 +115,8 @@ token(S0, Ics0, L0, Tcs, Tlen0, Tline, A0, Alen0) ->
%% If we have a token or error then return done, else if we have a
%% skip_token then continue.
+-dialyzer({nowarn_function, token_cont/3}).
+
token_cont(Rest, Line, {token,T}) ->
{done,{ok,T,Line},Rest};
token_cont(Rest, Line, {token,T,Push}) ->
@@ -187,6 +191,8 @@ tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Ts, A0, Alen0) ->
%% a token then save it and continue, else if we have a skip_token
%% just continue.
+-dialyzer({nowarn_function, tokens_cont/4}).
+
tokens_cont(Rest, Line, {token,T}, Ts) ->
tokens(yystate(), Rest, Line, Rest, 0, Line, [T|Ts], reject, 0);
tokens_cont(Rest, Line, {token,T,Push}, Ts) ->
@@ -238,6 +244,8 @@ skip_tokens(S0, Ics0, L0, Tcs, Tlen0, Tline, Error, A0, Alen0) ->
%% Skip tokens until we have an end_token or error then return done
%% with the original rror.
+-dialyzer({nowarn_function, skip_cont/4}).
+
skip_cont(Rest, Line, {token,_T}, Error) ->
skip_tokens(yystate(), Rest, Line, Rest, 0, Line, Error, reject, 0);
skip_cont(Rest, Line, {token,_T,Push}, Error) ->
diff --git a/lib/parsetools/include/yeccpre.hrl b/lib/parsetools/include/yeccpre.hrl
index 855bff5fdc..b9bba9a7c2 100644
--- a/lib/parsetools/include/yeccpre.hrl
+++ b/lib/parsetools/include/yeccpre.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -124,21 +124,10 @@ yecc_end(Line) ->
{'$end', Line}.
yecctoken_end_location(Token) ->
- try
- {text, Str} = erl_scan:token_info(Token, text),
- {line, Line} = erl_scan:token_info(Token, line),
- Parts = re:split(Str, "\n"),
- Dline = length(Parts) - 1,
- Yline = Line + Dline,
- case erl_scan:token_info(Token, column) of
- {column, Column} ->
- Col = byte_size(lists:last(Parts)),
- {Yline, Col + if Dline =:= 0 -> Column; true -> 1 end};
- undefined ->
- Yline
- end
- catch _:_ ->
- yecctoken_location(Token)
+ try erl_anno:end_location(element(2, Token)) of
+ undefined -> yecctoken_location(Token);
+ Loc -> Loc
+ catch _:_ -> yecctoken_location(Token)
end.
-compile({nowarn_unused_function, yeccerror/1}).
@@ -149,15 +138,15 @@ yeccerror(Token) ->
-compile({nowarn_unused_function, yecctoken_to_string/1}).
yecctoken_to_string(Token) ->
- case catch erl_scan:token_info(Token, text) of
- {text, Txt} -> Txt;
- _ -> yecctoken2string(Token)
+ try erl_scan:text(Token) of
+ undefined -> yecctoken2string(Token);
+ Txt -> Txt
+ catch _:_ -> yecctoken2string(Token)
end.
yecctoken_location(Token) ->
- case catch erl_scan:token_info(Token, location) of
- {location, Loc} -> Loc;
- _ -> element(2, Token)
+ try erl_scan:location(Token)
+ catch _:_ -> element(2, Token)
end.
-compile({nowarn_unused_function, yecctoken2string/1}).
diff --git a/lib/parsetools/src/leex.erl b/lib/parsetools/src/leex.erl
index 03f864ff03..15d42a4d9c 100644
--- a/lib/parsetools/src/leex.erl
+++ b/lib/parsetools/src/leex.erl
@@ -1545,7 +1545,7 @@ out_action_code(File, XrlFile, {_A,Code,_Vars,Name,Args,ArgsChars}) ->
%% Should set the file to the .erl file, but instead assumes that
%% ?LEEXINC is syntactically correct.
io:fwrite(File, "\n-compile({inline,~w/~w}).\n", [Name, length(Args)]),
- {line, L} = erl_scan:token_info(hd(Code), line),
+ L = erl_scan:line(hd(Code)),
output_file_directive(File, XrlFile, L-2),
io:fwrite(File, "~s(~s) ->~n", [Name, ArgsChars]),
io:fwrite(File, " ~s\n", [pp_tokens(Code, L)]).
@@ -1557,7 +1557,7 @@ pp_tokens(Tokens, Line0) -> pp_tokens(Tokens, Line0, none).
pp_tokens([], _Line0, _) -> [];
pp_tokens([T | Ts], Line0, Prev) ->
- {line, Line} = erl_scan:token_info(T, line),
+ Line = erl_scan:line(T),
[pp_sep(Line, Line0, Prev, T), pp_symbol(T) | pp_tokens(Ts, Line, T)].
pp_symbol({var,_,Var}) -> atom_to_list(Var);
diff --git a/lib/parsetools/src/parsetools.app.src b/lib/parsetools/src/parsetools.app.src
index 9eeb8fcc05..a7b258820a 100644
--- a/lib/parsetools/src/parsetools.app.src
+++ b/lib/parsetools/src/parsetools.app.src
@@ -12,7 +12,7 @@
{env, [{file_util_search_methods,[{"", ""}, {"ebin", "esrc"}, {"ebin", "src"}]}
]
},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.5","kernel-3.0","erts-6.0"]}
]
}.
diff --git a/lib/parsetools/src/yecc.erl b/lib/parsetools/src/yecc.erl
index f4657663e6..3fcec73ce2 100644
--- a/lib/parsetools/src/yecc.erl
+++ b/lib/parsetools/src/yecc.erl
@@ -2064,11 +2064,13 @@ output_actions(St0, StateJumps, StateInfo) ->
SelS = [{State,Called} ||
{{State,_JActions}, {State,Called}} <-
lists:zip(StateJumps, lists:keysort(1, Sel))],
+ St05 =
+ fwrite(St0, <<"-dialyzer({nowarn_function, yeccpars2/7}).\n">>, []),
St10 = foldl(fun({State, Called}, St_0) ->
{State, #state_info{state_repr = IState}} =
lookup_state(StateInfo, State),
output_state_selection(St_0, State, IState, Called)
- end, St0, SelS),
+ end, St05, SelS),
St20 = fwrite(St10, <<"yeccpars2(Other, _, _, _, _, _, _) ->\n">>, []),
St = fwrite(St20,
?YECC_BUG(<<"{missing_state_in_action_table, Other}">>, []),
@@ -2089,7 +2091,8 @@ output_state_selection(St0, State, IState, Called) ->
[Comment, IState]).
output_state_actions(St, State, State, {Actions,jump_none}, SI) ->
- output_state_actions1(St, State, Actions, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, State, Actions, true, normal, SI);
output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
{Tag, To, Common} = Jump,
CS = case Tag of
@@ -2099,13 +2102,22 @@ output_state_actions(St0, State, State, {Actions, Jump}, SI) ->
St = output_state_actions1(St0, State, Actions, true, {to, CS}, SI),
if
To =:= State ->
- output_state_actions1(St, CS, Common, true, normal, SI);
+ St1 = output_state_actions_begin(St, State, Actions),
+ output_state_actions1(St1, CS, Common, true, normal, SI);
true ->
St
end;
output_state_actions(St, State, JState, _XActions, _SI) ->
fwrite(St, <<"%% yeccpars2_~w: see yeccpars2_~w\n\n">>, [State, JState]).
+output_state_actions_begin(St, State, Actions) ->
+ case [yes || {_, #reduce{}} <- Actions] of
+ [] ->
+ fwrite(St, <<"-dialyzer({nowarn_function, yeccpars2_~w/7}).\n">>,
+ [State]); % Only when yeccerror(T) is output.
+ _ -> St
+ end.
+
output_state_actions1(St, State, [], IsFirst, normal, _SI) ->
output_state_actions_fini(State, IsFirst, St);
output_state_actions1(St0, State, [], IsFirst, {to, ToS}, _SI) ->
diff --git a/lib/parsetools/src/yeccgramm.yrl b/lib/parsetools/src/yeccgramm.yrl
index 562a9a7458..d76ebfc569 100644
--- a/lib/parsetools/src/yeccgramm.yrl
+++ b/lib/parsetools/src/yeccgramm.yrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,8 +38,8 @@ rule -> head '->' symbols attached_code dot: {rule, ['$1' | '$3'], '$4'}.
head -> symbol : '$1'.
symbols -> symbol : ['$1'].
symbols -> symbol symbols : ['$1' | '$2'].
-strings -> string : ['$1'].
-strings -> string strings : ['$1' | '$2'].
+strings -> string : [string('$1')].
+strings -> string strings : [string('$1') | '$2'].
attached_code -> ':' tokens : {erlang_code, '$2'}.
attached_code -> '$empty' : {erlang_code, [{atom, 0, '$undefined'}]}.
tokens -> token : ['$1'].
@@ -48,12 +48,12 @@ symbol -> var : symbol('$1').
symbol -> atom : symbol('$1').
symbol -> integer : symbol('$1').
symbol -> reserved_word : symbol('$1').
-token -> var : '$1'.
-token -> atom : '$1'.
-token -> float : '$1'.
-token -> integer : '$1'.
-token -> string : '$1'.
-token -> char : '$1'.
+token -> var : token('$1').
+token -> atom : token('$1').
+token -> float : token('$1').
+token -> integer : token('$1').
+token -> string : token('$1').
+token -> char : token('$1').
token -> reserved_symbol : {value_of('$1'), line_of('$1')}.
token -> reserved_word : {value_of('$1'), line_of('$1')}.
token -> '->' : {'->', line_of('$1')}. % Have to be treated in this
@@ -67,8 +67,14 @@ Erlang code.
symbol(Symbol) ->
#symbol{line = line_of(Symbol), name = value_of(Symbol)}.
+token(Token) ->
+ setelement(2, Token, line_of(Token)).
+
+string(Token) ->
+ setelement(2, Token, line_of(Token)).
+
value_of(Token) ->
element(3, Token).
line_of(Token) ->
- element(2, Token).
+ erl_anno:line(element(2, Token)).
diff --git a/lib/parsetools/src/yeccparser.erl b/lib/parsetools/src/yeccparser.erl
index 54f9ba5a58..fa0a1c4e2a 100644
--- a/lib/parsetools/src/yeccparser.erl
+++ b/lib/parsetools/src/yeccparser.erl
@@ -7,17 +7,23 @@
symbol(Symbol) ->
#symbol{line = line_of(Symbol), name = value_of(Symbol)}.
+token(Token) ->
+ setelement(2, Token, line_of(Token)).
+
+string(Token) ->
+ setelement(2, Token, line_of(Token)).
+
value_of(Token) ->
element(3, Token).
line_of(Token) ->
- element(2, Token).
+ erl_anno:line(element(2, Token)).
--file("/clearcase/otp/erts/lib/parsetools/include/yeccpre.hrl", 0).
+-file("lib/parsetools/include/yeccpre.hrl", 0).
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -44,10 +50,11 @@ parse(Tokens) ->
-spec parse_and_scan({function() | {atom(), atom()}, [_]}
| {atom(), atom(), [_]}) -> yecc_ret().
-parse_and_scan({F, A}) -> % Fun or {M, F}
+parse_and_scan({F, A}) ->
yeccpars0([], {{F, A}, no_line}, 0, [], []);
parse_and_scan({M, F, A}) ->
- yeccpars0([], {{{M, F}, A}, no_line}, 0, [], []).
+ Arity = length(A),
+ yeccpars0([], {{fun M:F/Arity, A}, no_line}, 0, [], []).
-spec format_error(any()) -> [char() | list()].
format_error(Message) ->
@@ -140,13 +147,13 @@ yecc_end(Line) ->
yecctoken_end_location(Token) ->
try
- {text, Str} = erl_scan:token_info(Token, text),
- {line, Line} = erl_scan:token_info(Token, line),
+ Str = erl_scan:text(Token),
+ Line = erl_scan:line(Token),
Parts = re:split(Str, "\n"),
Dline = length(Parts) - 1,
Yline = Line + Dline,
- case erl_scan:token_info(Token, column) of
- {column, Column} ->
+ case erl_scan:column(Token) of
+ Column when is_integer(Column) ->
Col = byte_size(lists:last(Parts)),
{Yline, Col + if Dline =:= 0 -> Column; true -> 1 end};
undefined ->
@@ -156,23 +163,26 @@ yecctoken_end_location(Token) ->
yecctoken_location(Token)
end.
+-compile({nowarn_unused_function, yeccerror/1}).
yeccerror(Token) ->
Text = yecctoken_to_string(Token),
Location = yecctoken_location(Token),
{error, {Location, ?MODULE, ["syntax error before: ", Text]}}.
+-compile({nowarn_unused_function, yecctoken_to_string/1}).
yecctoken_to_string(Token) ->
- case catch erl_scan:token_info(Token, text) of
- {text, Txt} -> Txt;
+ case catch erl_scan:text(Token) of
+ Txt when is_list(Txt) -> Txt;
_ -> yecctoken2string(Token)
end.
yecctoken_location(Token) ->
- case catch erl_scan:token_info(Token, location) of
- {location, Loc} -> Loc;
+ case catch erl_scan:location(Token) of
+ Loc when Loc =/= undefined -> Loc;
_ -> element(2, Token)
end.
+-compile({nowarn_unused_function, yecctoken2string/1}).
yecctoken2string({atom, _, A}) -> io_lib:write(A);
yecctoken2string({integer,_,N}) -> io_lib:write(N);
yecctoken2string({float,_,F}) -> io_lib:write(F);
@@ -180,7 +190,7 @@ yecctoken2string({char,_,C}) -> io_lib:write_char(C);
yecctoken2string({var,_,V}) -> io_lib:format("~s", [V]);
yecctoken2string({string,_,S}) -> io_lib:write_string(S);
yecctoken2string({reserved_symbol, _, A}) -> io_lib:write(A);
-yecctoken2string({_Cat, _, Val}) -> io_lib:write(Val);
+yecctoken2string({_Cat, _, Val}) -> io_lib:format("~p",[Val]);
yecctoken2string({dot, _}) -> "'.'";
yecctoken2string({'$end', _}) ->
[];
@@ -193,7 +203,7 @@ yecctoken2string(Other) ->
--file("yeccparser.erl", 196).
+-file("yeccgramm.erl", 207).
yeccpars2(0=S, Cat, Ss, Stack, T, Ts, Tzr) ->
yeccpars2_0(S, Cat, Ss, Stack, T, Ts, Tzr);
@@ -268,7 +278,7 @@ yeccpars2(34=S, Cat, Ss, Stack, T, Ts, Tzr) ->
yeccpars2(35=S, Cat, Ss, Stack, T, Ts, Tzr) ->
yeccpars2_35(S, Cat, Ss, Stack, T, Ts, Tzr);
yeccpars2(Other, _, _, _, _, _, _) ->
- erlang:error({yecc_bug,"1.3",{missing_state_in_action_table, Other}}).
+ erlang:error({yecc_bug,"1.4",{missing_state_in_action_table, Other}}).
yeccpars2_0(S, atom, Ss, Stack, T, Ts, Tzr) ->
yeccpars1(S, 6, Ss, Stack, T, Ts, Tzr);
@@ -417,16 +427,20 @@ yeccpars2_19(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_20(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_20_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_21(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_21_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_22(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_22_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_23(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_23_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_24(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
NewStack = yeccpars2_24_(Stack),
@@ -437,10 +451,12 @@ yeccpars2_25(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_26(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_26_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_27(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
- yeccgoto_token(hd(Ss), Cat, Ss, Stack, T, Ts, Tzr).
+ NewStack = yeccpars2_27_(Stack),
+ yeccgoto_token(hd(Ss), Cat, Ss, NewStack, T, Ts, Tzr).
yeccpars2_28(_S, Cat, Ss, Stack, T, Ts, Tzr) ->
[_|Nss] = Ss,
@@ -616,6 +632,38 @@ yeccpars2_19_(__Stack0) ->
{ ':' , line_of ( __1 ) }
end | __Stack].
+-compile({inline,yeccpars2_20_/1}).
+-file("yeccgramm.yrl", 48).
+yeccpars2_20_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
+-compile({inline,yeccpars2_21_/1}).
+-file("yeccgramm.yrl", 52).
+yeccpars2_21_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
+-compile({inline,yeccpars2_22_/1}).
+-file("yeccgramm.yrl", 49).
+yeccpars2_22_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
+-compile({inline,yeccpars2_23_/1}).
+-file("yeccgramm.yrl", 50).
+yeccpars2_23_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
-compile({inline,yeccpars2_24_/1}).
-file("yeccgramm.yrl", 53).
yeccpars2_24_(__Stack0) ->
@@ -632,6 +680,22 @@ yeccpars2_25_(__Stack0) ->
{ value_of ( __1 ) , line_of ( __1 ) }
end | __Stack].
+-compile({inline,yeccpars2_26_/1}).
+-file("yeccgramm.yrl", 51).
+yeccpars2_26_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
+-compile({inline,yeccpars2_27_/1}).
+-file("yeccgramm.yrl", 47).
+yeccpars2_27_(__Stack0) ->
+ [__1 | __Stack] = __Stack0,
+ [begin
+ token ( __1 )
+ end | __Stack].
+
-compile({inline,yeccpars2_28_/1}).
-file("yeccgramm.yrl", 42).
yeccpars2_28_(__Stack0) ->
@@ -653,7 +717,7 @@ yeccpars2_29_(__Stack0) ->
yeccpars2_32_(__Stack0) ->
[__1 | __Stack] = __Stack0,
[begin
- [ __1 ]
+ [ string ( __1 ) ]
end | __Stack].
-compile({inline,yeccpars2_33_/1}).
@@ -661,7 +725,7 @@ yeccpars2_32_(__Stack0) ->
yeccpars2_33_(__Stack0) ->
[__2,__1 | __Stack] = __Stack0,
[begin
- [ __1 | __2 ]
+ [ string ( __1 ) | __2 ]
end | __Stack].
-compile({inline,yeccpars2_34_/1}).
@@ -681,4 +745,4 @@ yeccpars2_35_(__Stack0) ->
end | __Stack].
--file("yeccgramm.yrl", 75).
+-file("yeccgramm.yrl", 82).
diff --git a/lib/parsetools/test/yecc_SUITE.erl b/lib/parsetools/test/yecc_SUITE.erl
index d308d21f82..b8d658e5c2 100644
--- a/lib/parsetools/test/yecc_SUITE.erl
+++ b/lib/parsetools/test/yecc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -340,8 +340,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Bad macro in action. OTP-7224.
@@ -358,8 +358,8 @@ syntax(Config) when is_list(Config) ->
{_,[{L1,_,{undefined_function,{yeccpars2_2_,1}}},
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[]} = compile:file(Parserfile1, [basic_validation,return]),
- ?line L1 = 28 + SzYeccPre,
- ?line L2 = 35 + SzYeccPre
+ ?line L1 = 31 + SzYeccPre,
+ ?line L2 = 38 + SzYeccPre
end(),
%% Check line numbers. OTP-7224.
@@ -1521,7 +1521,9 @@ otp_7945(doc) ->
"OTP-7945. A bug introduced in R13A.";
otp_7945(suite) -> [];
otp_7945(Config) when is_list(Config) ->
- ?line {error,_} = erl_parse:parse([{atom,3,foo},{'.',2,9,9}]),
+ A2 = erl_anno:new(2),
+ A3 = erl_anno:new(3),
+ {error,_} = erl_parse:parse([{atom,3,foo},{'.',A2,9,9}]),
ok.
otp_8483(doc) ->
@@ -1619,8 +1621,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 38 + SzYeccPre,
- ?line L2 = 45 + SzYeccPre
+ L1 = 41 + SzYeccPre,
+ L2 = 48 + SzYeccPre
end(),
YeccPre = filename:join(Dir, "yeccpre.hrl"),
@@ -1637,8 +1639,8 @@ otp_7292(Config) when is_list(Config) ->
{L2,_,{bad_inline,{yeccpars2_2_,1}}}]}],
[{_,[{16,_,{unused_function,{foo,0}}}]}]} =
compile:file(Parserfile1, [basic_validation, return]),
- ?line L1 = 37 + SzYeccPre,
- ?line L2 = 44 + SzYeccPre
+ ?line L1 = 40 + SzYeccPre,
+ ?line L2 = 47 + SzYeccPre
end(),
file:delete(YeccPre),
@@ -1786,7 +1788,8 @@ otp_7969(Config) when is_list(Config) ->
?line {ok, Ts11, _}=R1 = erl_scan:string("f() -> a."),
?line F1 = fun() -> {ok,Ts11 ++ [{'$end',2}],2} end,
- ?line{ok,{function,1,f,0,[{clause,1,[],[],[{atom,1,a}]}]}} =
+ A1 = erl_anno:new(1),
+ {ok,{function,A1,f,0,[{clause,A1,[],[],[{atom,A1,a}]}]}} =
erl_parse:parse_and_scan({F1, []}),
?line F2 = fun() -> erl_scan:string("f() -> ,") end,
?line {error,{1,erl_parse,_}} = erl_parse:parse_and_scan({F2, []}),
@@ -1797,7 +1800,7 @@ otp_7969(Config) when is_list(Config) ->
put(foo,bar), R1
end
end,
- ?line {ok,{function,1,f,0,[{clause,1,[],[],[{atom,1,a}]}]}} =
+ {ok,{function,A1,f,0,[{clause,A1,[],[],[{atom,A1,a}]}]}} =
erl_parse:parse_and_scan({F3,[]}),
F4 = fun() -> {error, {1, ?MODULE, bad}, 2} end,
?line {error, {1,?MODULE,bad}} = erl_parse:parse_and_scan({F4, []}),
@@ -1813,7 +1816,8 @@ otp_8919(doc) ->
"OTP-8919. Improve formating of Yecc error messages.";
otp_8919(suite) -> [];
otp_8919(Config) when is_list(Config) ->
- {error,{1,Mod,Mess}} = erl_parse:parse([{cat,1,"hello"}]),
+ A1 = erl_anno:new(1),
+ {error,{1,Mod,Mess}} = erl_parse:parse([{cat,A1,"hello"}]),
"syntax error before: \"hello\"" = lists:flatten(Mod:format_error(Mess)),
ok.
diff --git a/lib/parsetools/vsn.mk b/lib/parsetools/vsn.mk
index dd9cc2991c..b99b3bb713 100644
--- a/lib/parsetools/vsn.mk
+++ b/lib/parsetools/vsn.mk
@@ -1 +1 @@
-PARSETOOLS_VSN = 2.0.12
+PARSETOOLS_VSN = 2.1
diff --git a/lib/percept/src/percept.erl b/lib/percept/src/percept.erl
index 3a2d9f7601..135e20774e 100644
--- a/lib/percept/src/percept.erl
+++ b/lib/percept/src/percept.erl
@@ -319,10 +319,6 @@ get_webserver_config(Servername, Port) when is_list(Servername), is_integer(Port
{alias,{"/images/", filename:join([Root, "images"]) ++ "/"}},
{alias,{"/css/", filename:join([Root, "css"]) ++ "/"}},
- % Logs
- %{transfer_log, filename:join([Path, "logs", "transfer.log"])},
- %{error_log, filename:join([Path, "logs", "error.log"])},
-
% Configs
{default_type,"text/plain"},
{directory_index,["index.html"]},
@@ -331,12 +327,9 @@ get_webserver_config(Servername, Port) when is_list(Servername), is_integer(Port
mod_esi,
mod_actions,
mod_cgi,
- mod_include,
mod_dir,
mod_get,
mod_head
- % mod_log,
- % mod_disk_log
]},
{com_type,ip_comm},
{server_name, Servername},
diff --git a/lib/public_key/asn1/Makefile b/lib/public_key/asn1/Makefile
index c1b3bc866d..11b03dc2f7 100644
--- a/lib/public_key/asn1/Makefile
+++ b/lib/public_key/asn1/Makefile
@@ -66,7 +66,7 @@ EBIN = ../ebin
EXTRA_ERLC_FLAGS =
ERL_COMPILE_FLAGS += $(EXTRA_ERLC_FLAGS)
-ASN_FLAGS = -bber +der +compact_bit_string +noobj +asn1config
+ASN_FLAGS = -bber +der +noobj +asn1config
# ----------------------------------------------------
# Targets
diff --git a/lib/public_key/doc/src/Makefile b/lib/public_key/doc/src/Makefile
index 17fb67e95c..d04819b5aa 100644
--- a/lib/public_key/doc/src/Makefile
+++ b/lib/public_key/doc/src/Makefile
@@ -42,8 +42,7 @@ XML_REF6_FILES =
XML_PART_FILES = part.xml part_notes.xml
XML_CHAPTER_FILES = \
introduction.xml \
- public_key_records.xml \
- cert_records.xml \
+ public_key_records.xml \
using_public_key.xml \
notes.xml
diff --git a/lib/public_key/doc/src/cert_records.xml b/lib/public_key/doc/src/cert_records.xml
deleted file mode 100644
index b66c66bead..0000000000
--- a/lib/public_key/doc/src/cert_records.xml
+++ /dev/null
@@ -1,690 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2008</year>
- <year>2014</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- The contents of this file are subject to the Erlang Public License,
- Version 1.1, (the "License"); you may not use this file except in
- compliance with the License. You should have received a copy of the
- Erlang Public License along with this software. If not, it can be
- retrieved online at http://www.erlang.org/.
-
- Software distributed under the License is distributed on an "AS IS"
- basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
- the License for the specific language governing rights and limitations
- under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Certificate records</title>
- <prepared>Ingela Anderton Andin</prepared>
- <responsible></responsible>
- <docno></docno>
- <approved></approved>
- <checked></checked>
- <date>2008-02-06</date>
- <rev>A</rev>
- <file>cert_records.xml</file>
- </header>
-
- <p>This chapter briefly describes erlang records derived from ASN1
- specifications used to handle <c> X509 certificates</c> and <c>CertificationRequest</c>.
- The intent is to describe the data types
-and not to specify the semantics of each component. For information on the
-semantics, please see <url
- href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280</url> and
- <url href="http://www.ietf.org/rfc/rfc5967.txt">PKCS-10</url>.
- </p>
-
- <p>Use the following include directive to get access to the
- records and constant macros (OIDs) described in the following sections.</p>
-
- <code> -include_lib("public_key/include/public_key.hrl"). </code>
-
- <p>The used ASN1 specifications are available <c>asn1</c> subdirectory
- of the application <c>public_key</c>.
- </p>
-
- <section>
- <title>Common Data Types</title>
-
- <p>Common non standard erlang
- data types used to described the record fields in the
- below sections are defined in <seealso
- marker="public_key">public key reference manual </seealso> or
- follows here.</p>
-
- <p><c>time() = uct_time() | general_time()</c></p>
-
- <p><c>uct_time() = {utcTime, "YYMMDDHHMMSSZ"} </c></p>
-
- <p><c>general_time() = {generalTime, "YYYYMMDDHHMMSSZ"} </c></p>
-
- <p><c>
- general_name() = {rfc822Name, string()} | {dNSName, string()}
- | {x400Address, string()} | {directoryName,
- {rdnSequence, [#AttributeTypeAndValue'{}]}} |
- | {eidPartyName, special_string()}
- | {eidPartyName, special_string(), special_string()}
- | {uniformResourceIdentifier, string()} | {ipAddress, string()} |
- {registeredId, oid()} | {otherName, term()}
- </c></p>
-
- <p><c>
- special_string() =
- {teletexString, string()} | {printableString, string()} |
- {universalString, string()} | {utf8String, binary()} |
- {bmpString, string()}
- </c></p>
-
- <p><c>
- dist_reason() = unused | keyCompromise | cACompromise |
- affiliationChanged | superseded | cessationOfOperation |
- certificateHold | privilegeWithdrawn |
- aACompromise
- </c></p>
- </section>
-
- <section>
- <title> PKIX Certificates</title>
-<code>
-#'Certificate'{
- tbsCertificate, % #'TBSCertificate'{}
- signatureAlgorithm, % #'AlgorithmIdentifier'{}
- signature % {0, binary()} - ASN1 compact bitstring
- }.
-
-#'TBSCertificate'{
- version, % v1 | v2 | v3
- serialNumber, % integer()
- signature, % #'AlgorithmIdentifier'{}
- issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
- validity, % #'Validity'{}
- subject, % {rdnSequence, [#AttributeTypeAndValue'{}]}
- subjectPublicKeyInfo, % #'SubjectPublicKeyInfo'{}
- issuerUniqueID, % binary() | asn1_novalue
- subjectUniqueID, % binary() | asn1_novalue
- extensions % [#'Extension'{}]
- }.
-
-#'AlgorithmIdentifier'{
- algorithm, % oid()
- parameters % der_encoded()
- }.
-</code>
-
-<code>
-#'OTPCertificate'{
- tbsCertificate, % #'OTPTBSCertificate'{}
- signatureAlgorithm, % #'SignatureAlgorithm'
- signature % {0, binary()} - ASN1 compact bitstring
- }.
-
-#'OTPTBSCertificate'{
- version, % v1 | v2 | v3
- serialNumber, % integer()
- signature, % #'SignatureAlgorithm'
- issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
- validity, % #'Validity'{}
- subject, % {rdnSequence, [#AttributeTypeAndValue'{}]}
- subjectPublicKeyInfo, % #'OTPSubjectPublicKeyInfo'{}
- issuerUniqueID, % binary() | asn1_novalue
- subjectUniqueID, % binary() | asn1_novalue
- extensions % [#'Extension'{}]
- }.
-
-#'SignatureAlgorithm'{
- algorithm, % id_signature_algorithm()
- parameters % asn1_novalue | #'Dss-Parms'{}
- }.
-</code>
-
-<p><c> id_signature_algorithm() = ?oid_name_as_erlang_atom</c> for available
-oid names see table below. Ex: ?'id-dsa-with-sha1'</p>
-<table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-dsa-with-sha1</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-dsaWithSHA1 (ISO alt oid to above)</cell>
- </row>
- <row>
- <cell align="left" valign="middle">md2WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">md5WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">sha1WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">sha-1WithRSAEncryption (ISO alt oid to above)</cell>
- </row>
- <row>
- <cell align="left" valign="middle">sha224WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">sha256WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">sha512WithRSAEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">ecdsa-with-SHA1</cell>
- </row>
- <tcaption>Signature algorithm oids </tcaption>
-</table>
-
-<code>
-#'AttributeTypeAndValue'{
- type, % id_attributes()
- value % term()
- }.
-</code>
-
-<p><c>id_attributes() </c></p>
-<table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- <cell align="left" valign="middle">Value type</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-name</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-surname</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-givenName</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-initials </cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-generationQualifier</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-commonName</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-localityName</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-stateOrProvinceName</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-organizationName</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-title</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-dnQualifier</cell>
- <cell align="left" valign="middle">{printableString, string()}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-countryName</cell>
- <cell align="left" valign="middle">{printableString, string()}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-serialNumber</cell>
- <cell align="left" valign="middle">{printableString, string()}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-at-pseudonym</cell>
- <cell align="left" valign="middle">special_string()</cell>
- </row>
- <tcaption>Attribute oids </tcaption>
-</table>
-
-<code>
-#'Validity'{
- notBefore, % time()
- notAfter % time()
- }.
-
-#'SubjectPublicKeyInfo'{
- algorithm, % #AlgorithmIdentifier{}
- subjectPublicKey % binary()
- }.
-
-#'SubjectPublicKeyInfoAlgorithm'{
- algorithm, % id_public_key_algorithm()
- parameters % public_key_params()
- }.
-</code>
-
-<p><c> id_public_key_algorithm() </c></p>
-<table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- </row>
- <row>
- <cell align="left" valign="middle">rsaEncryption</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-dsa</cell>
- </row>
- <row>
- <cell align="left" valign="middle">dhpublicnumber</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-keyExchangeAlgorithm</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ecPublicKey</cell>
- </row>
- <tcaption>Public key algorithm oids </tcaption>
-</table>
-
-<code>
-#'Extension'{
- extnID, % id_extensions() | oid()
- critical, % boolean()
- extnValue % der_encoded()
- }.
-</code>
-
-<p><c>id_extensions()</c>
- <seealso marker="#StdCertExt">Standard Certificate Extensions</seealso>,
- <seealso marker="#PrivIntExt">Private Internet Extensions</seealso>,
- <seealso marker="#CRLCertExt">CRL Extensions</seealso> and
- <seealso marker="#CRLEntryExt">CRL Entry Extensions</seealso>.
-</p>
-
-</section>
-
-<section>
- <marker id="StdCertExt"></marker>
- <title>Standard certificate extensions</title>
-
- <table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- <cell align="left" valign="middle">Value type</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-authorityKeyIdentifier</cell>
- <cell align="left" valign="middle">#'AuthorityKeyIdentifier'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-subjectKeyIdentifier</cell>
- <cell align="left" valign="middle">oid()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-keyUsage</cell>
- <cell align="left" valign="middle"> [key_usage()]</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-privateKeyUsagePeriod</cell>
- <cell align="left" valign="middle">#'PrivateKeyUsagePeriod'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-certificatePolicies</cell>
- <cell align="left" valign="middle">#'PolicyInformation'{}</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-policyMappings</cell>
- <cell align="left" valign="middle">#'PolicyMappings_SEQOF'{}</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-subjectAltName</cell>
- <cell align="left" valign="middle">general_name()</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-issuerAltName</cell>
- <cell align="left" valign="middle">general_name()</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-subjectDirectoryAttributes</cell>
- <cell align="left" valign="middle"> [#'Attribute'{}]</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-basicConstraints</cell>
- <cell align="left" valign="middle">#'BasicConstraints'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-nameConstraints</cell>
- <cell align="left" valign="middle">#'NameConstraints'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-policyConstraints</cell>
- <cell align="left" valign="middle">#'PolicyConstraints'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-extKeyUsage</cell>
- <cell align="left" valign="middle">[id_key_purpose()]</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-cRLDistributionPoints</cell>
- <cell align="left" valign="middle">[#'DistributionPoint'{}]</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-inhibitAnyPolicy</cell>
- <cell align="left" valign="middle">integer()</cell>
- </row>
-
- <row>
- <cell align="left" valign="middle">id-ce-freshestCRL</cell>
- <cell align="left" valign="middle">[#'DistributionPoint'{}]</cell>
- </row>
-
-
- <tcaption>Standard Certificate Extensions</tcaption>
- </table>
-
- <p><c>
- key_usage() = digitalSignature | nonRepudiation | keyEncipherment|
- dataEncipherment | keyAgreement | keyCertSign | cRLSign | encipherOnly |
- decipherOnly
- </c></p>
-
- <p><c> id_key_purpose()</c></p>
-
-<table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-serverAuth</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-clientAuth</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-codeSigning</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-emailProtection</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-timeStamping</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-kp-OCSPSigning</cell>
- </row>
- <tcaption>Key purpose oids </tcaption>
-</table>
-
- <code>
-#'AuthorityKeyIdentifier'{
- keyIdentifier, % oid()
- authorityCertIssuer, % general_name()
- authorityCertSerialNumber % integer()
- }.
-
-#'PrivateKeyUsagePeriod'{
- notBefore, % general_time()
- notAfter % general_time()
- }.
-
-#'PolicyInformation'{
- policyIdentifier, % oid()
- policyQualifiers % [#PolicyQualifierInfo{}]
- }.
-
-#'PolicyQualifierInfo'{
- policyQualifierId, % oid()
- qualifier % string() | #'UserNotice'{}
- }.
-
-#'UserNotice'{
- noticeRef, % #'NoticeReference'{}
- explicitText % string()
- }.
-
-#'NoticeReference'{
- organization, % string()
- noticeNumbers % [integer()]
- }.
-
-#'PolicyMappings_SEQOF'{
- issuerDomainPolicy, % oid()
- subjectDomainPolicy % oid()
- }.
-
-#'Attribute'{
- type, % oid()
- values % [der_encoded()]
- }).
-
-#'BasicConstraints'{
- cA, % boolean()
- pathLenConstraint % integer()
- }).
-
-#'NameConstraints'{
- permittedSubtrees, % [#'GeneralSubtree'{}]
- excludedSubtrees % [#'GeneralSubtree'{}]
- }).
-
-#'GeneralSubtree'{
- base, % general_name()
- minimum, % integer()
- maximum % integer()
- }).
-
-#'PolicyConstraints'{
- requireExplicitPolicy, % integer()
- inhibitPolicyMapping % integer()
- }).
-
-#'DistributionPoint'{
- distributionPoint, % {fullName, [general_name()]} | {nameRelativeToCRLIssuer,
- [#AttributeTypeAndValue{}]}
- reasons, % [dist_reason()]
- cRLIssuer % [general_name()]
- }).
-</code>
-
-</section>
-
- <section>
- <marker id="PrivIntExt"></marker>
- <title>Private Internet Extensions</title>
-
- <table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- <cell align="left" valign="middle">Value type</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-pe-authorityInfoAccess</cell>
- <cell align="left" valign="middle">[#'AccessDescription'{}]</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-pe-subjectInfoAccess</cell>
- <cell align="left" valign="middle">[#'AccessDescription'{}]</cell>
- </row>
- <tcaption>Private Internet Extensions</tcaption>
- </table>
-
-<code>
-#'AccessDescription'{
- accessMethod, % oid()
- accessLocation % general_name()
- }).
-</code>
-
- </section>
-
-<section>
- <title> CRL and CRL Extensions Profile</title>
-
- <code>
-#'CertificateList'{
- tbsCertList, % #'TBSCertList{}
- signatureAlgorithm, % #'AlgorithmIdentifier'{}
- signature % {0, binary()} - ASN1 compact bitstring
- }).
-
-#'TBSCertList'{
- version, % v2 (if defined)
- signature, % #AlgorithmIdentifier{}
- issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
- thisUpdate, % time()
- nextUpdate, % time()
- revokedCertificates, % [#'TBSCertList_revokedCertificates_SEQOF'{}]
- crlExtensions % [#'Extension'{}]
- }).
-
-#'TBSCertList_revokedCertificates_SEQOF'{
- userCertificate, % integer()
- revocationDate, % timer()
- crlEntryExtensions % [#'Extension'{}]
- }).
- </code>
-
- <section>
- <marker id="CRLCertExt"></marker>
- <title>CRL Extensions </title>
-
- <table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- <cell align="left" valign="middle">Value type</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-authorityKeyIdentifier</cell>
- <cell align="left" valign="middle">#'AuthorityKeyIdentifier{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-issuerAltName</cell>
- <cell align="left" valign="middle">{rdnSequence, [#AttributeTypeAndValue'{}]}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-cRLNumber</cell>
- <cell align="left" valign="middle">integer()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-deltaCRLIndicator</cell>
- <cell align="left" valign="middle">integer()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-issuingDistributionPoint</cell>
- <cell align="left" valign="middle">#'IssuingDistributionPoint'{}</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-freshestCRL</cell>
- <cell align="left" valign="middle">[#'Distributionpoint'{}]</cell>
- </row>
-
- <tcaption>CRL Extensions</tcaption>
- </table>
-
- <code>
-#'IssuingDistributionPoint'{
- distributionPoint, % {fullName, [general_name()]} | {nameRelativeToCRLIssuer,
- [#AttributeTypeAndValue'{}]}
- onlyContainsUserCerts, % boolean()
- onlyContainsCACerts, % boolean()
- onlySomeReasons, % [dist_reason()]
- indirectCRL, % boolean()
- onlyContainsAttributeCerts % boolean()
- }).
- </code>
- </section>
-
- <section>
- <marker id="CRLEntryExt"></marker>
- <title> CRL Entry Extensions </title>
-
- <table>
- <row>
- <cell align="left" valign="middle">OID name</cell>
- <cell align="left" valign="middle">Value type</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-cRLReason</cell>
- <cell align="left" valign="middle">crl_reason()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-holdInstructionCode</cell>
- <cell align="left" valign="middle">oid()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-invalidityDate</cell>
- <cell align="left" valign="middle">general_time()</cell>
- </row>
- <row>
- <cell align="left" valign="middle">id-ce-certificateIssuer</cell>
- <cell align="left" valign="middle">general_name()</cell>
- </row>
- <tcaption>CRL Entry Extensions</tcaption>
- </table>
- <p><c>
- crl_reason() = unspecified | keyCompromise | cACompromise |
- affiliationChanged | superseded | cessationOfOperation |
- certificateHold | removeFromCRL | privilegeWithdrawn |
- aACompromise
- </c></p>
- </section>
-
- <section>
- <marker id="PKCS10"></marker>
- <title>PKCS#10 Certification Request</title>
- <code>
-#'CertificationRequest'{
- certificationRequestInfo #'CertificationRequestInfo'{},
- signatureAlgorithm #'CertificationRequest_signatureAlgorithm'{}}.
- signature {0, binary()} - ASN1 compact bitstring
- }
-
-#'CertificationRequestInfo'{
- version atom(),
- subject {rdnSequence, [#AttributeTypeAndValue'{}]} ,
- subjectPKInfo #'CertificationRequestInfo_subjectPKInfo'{},
- attributes [#'AttributePKCS-10' {}]
- }
-
-#'CertificationRequestInfo_subjectPKInfo'{
- algorithm #'CertificationRequestInfo_subjectPKInfo_algorithm'{}
- subjectPublicKey {0, binary()} - ASN1 compact bitstring
- }
-
-#'CertificationRequestInfo_subjectPKInfo_algorithm'{
- algorithm = oid(),
- parameters = der_encoded()
-}
-
-#'CertificationRequest_signatureAlgorithm'{
- algorithm = oid(),
- parameters = der_encoded()
- }
-
-#'AttributePKCS-10'{
- type = oid(),
- values = [der_encoded()]
-}
- </code>
- </section>
-
-</section>
-</chapter>
diff --git a/lib/public_key/doc/src/introduction.xml b/lib/public_key/doc/src/introduction.xml
index bf11a092d8..6542c8c509 100644
--- a/lib/public_key/doc/src/introduction.xml
+++ b/lib/public_key/doc/src/introduction.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2008</year>
- <year>2013</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -36,27 +36,28 @@
<section>
<title>Purpose</title>
- <p> public_key deals with public key related file formats, digital
- signatures and <url href="http://www.ietf.org/rfc/rfc5280.txt">
+ <p>The Public Key application deals with public-key related file
+ formats, digital signatures, and <url href="http://www.ietf.org/rfc/rfc5280.txt">
X-509 certificates</url>. It is a library application that
- provides encode/decode, sign/verify, encrypt/decrypt and similar
- functionality, it does not read or write files it expects or returns
+ provides encode/decode, sign/verify, encrypt/decrypt, and similar
+ functionality. It does not read or write files, it expects or returns
file contents or partial file contents as binaries.
</p>
</section>
<section>
<title>Prerequisites</title>
- <p>It is assumed that the reader has a basic understanding
- of the concepts of using public keys and digital certificates.</p>
+ <p>It is assumed that the reader is familiar with the Erlang programming
+ language and has a basic understanding of the concepts of using public-keys
+ and digital certificates.</p>
</section>
<section>
- <title>Performance tips</title>
- <p>The public_key decode and encode functions will try to use the NIFs
- which are in the ASN1 compilers runtime modules if they can be found.
- So for the best performance you want to have the ASN1 application in the
- path of your system. </p>
+ <title>Performance Tips</title>
+ <p>The Public Key decode- and encode-functions try to use the NIFs
+ in the ASN.1 compilers runtime modules, if they can be found.
+ Thus, to have the ASN1 application in the
+ path of your system gives the best performance.</p>
</section>
</chapter>
diff --git a/lib/public_key/doc/src/notes.xml b/lib/public_key/doc/src/notes.xml
index fe4bf5ce2d..f241a91eb0 100644
--- a/lib/public_key/doc/src/notes.xml
+++ b/lib/public_key/doc/src/notes.xml
@@ -34,6 +34,21 @@
<file>notes.xml</file>
</header>
+<section><title>Public_Key 0.23</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Improve/extend support for CRL handling.</p>
+ <p>
+ Own Id: OTP-12547 Aux Id: OTP-10362 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Public_Key 0.22.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/public_key/doc/src/part.xml b/lib/public_key/doc/src/part.xml
index 73146c8e2a..465f311946 100644
--- a/lib/public_key/doc/src/part.xml
+++ b/lib/public_key/doc/src/part.xml
@@ -31,15 +31,14 @@
<file>part.xml</file>
</header>
<description>
- <p> This application provides an API to public key infrastructure
+ <p>This application provides an API to public-key infrastructure
from <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC
- 5280</url> (X.509 certificates) and public key formats defined by
+ 5280</url> (X.509 certificates) and public-key formats defined by
the <url href="http://en.wikipedia.org/wiki/PKCS">
- PKCS-standard</url></p>
+ PKCS</url> standard.</p>
</description>
<xi:include href="introduction.xml"/>
<xi:include href="public_key_records.xml"/>
- <xi:include href="cert_records.xml"/>
<xi:include href="using_public_key.xml"/>
</part>
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index e3473f80d7..883c52393f 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2008</year>
- <year>2014</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -31,11 +31,11 @@
<rev></rev>
</header>
<module>public_key</module>
- <modulesummary> API module for public key infrastructure.</modulesummary>
+ <modulesummary>API module for public-key infrastructure.</modulesummary>
<description>
- <p>This module provides functions to handle public key infrastructure. It can
- encode/decode different file formats (PEM, openssh), sign and verify digital signatures and validate
- certificate paths and certificate revocation lists.
+ <p>This module provides functions to handle public-key infrastructure. It can
+ encode/decode different file formats (PEM, OpenSSH), sign and verify digital signatures,
+ and validate certificate paths and certificate revocation lists.
</p>
</description>
@@ -43,92 +43,156 @@
<title>public_key</title>
<list type="bulleted">
- <item>public_key requires the crypto and asn1 applications, the latter since R16 (hopefully the runtime dependency on asn1 will
+ <item> Public Key requires the Crypto and ASN1 applications,
+ the latter as OTP R16 (hopefully the runtime dependency on ASN1 will
be removed again in the future).</item>
<item>Supports <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280 </url> -
- Internet X.509 Public Key Infrastructure Certificate and Certificate Revocation List (CRL) Profile </item>
- <item>Supports <url href="http://www.ietf.org/rfc/rfc3447.txt"> PKCS-1 </url> - RSA Cryptography Standard </item>
- <item>Supports <url href="http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf"> DSS</url>- Digital Signature Standard (DSA - Digital Signature Algorithm)</item>
- <item>Supports <url href="http://www.emc.com/emc-plus/rsa-labs/standards-initiatives/pkcs-3-diffie-hellman-key-agreement-standar.htm"> PKCS-3 </url> - Diffie-Hellman Key Agreement Standard </item>
- <item>Supports <url href="http://www.ietf.org/rfc/rfc2898.txt"> PKCS-5</url> - Password-Based Cryptography Standard </item>
- <item>Supports <url href="http://www.ietf.org/rfc/rfc5208.txt"> PKCS-8</url> - Private-Key Information Syntax Standard</item>
- <item>Supports <url href="http://www.ietf.org/rfc/rfc5967.txt"> PKCS-10</url> - Certification Request Syntax Standard</item>
+ Internet X.509 Public-Key Infrastructure Certificate and Certificate Revocation List
+ (CRL) Profile </item>
+ <item>Supports <url href="http://www.ietf.org/rfc/rfc3447.txt"> PKCS-1 </url> -
+ RSA Cryptography Standard </item>
+ <item>Supports <url href="http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf"> DSS</url> -
+ Digital Signature Standard (DSA - Digital Signature Algorithm)</item>
+ <item>Supports
+ <url href="http://www.emc.com/emc-plus/rsa-labs/standards-initiatives/pkcs-3-diffie-hellman-key-agreement-standar.htm"> PKCS-3 </url> -
+ Diffie-Hellman Key Agreement Standard </item>
+ <item>Supports <url href="http://www.ietf.org/rfc/rfc2898.txt"> PKCS-5</url> -
+ Password-Based Cryptography Standard </item>
+ <item>Supports <url href="http://www.ietf.org/rfc/rfc5208.txt"> PKCS-8</url> -
+ Private-Key Information Syntax Standard</item>
+ <item>Supports <url href="http://www.ietf.org/rfc/rfc5967.txt"> PKCS-10</url> -
+ Certification Request Syntax Standard</item>
</list>
</section>
<section>
- <title>COMMON DATA TYPES </title>
+ <title>DATA TYPES</title>
- <note><p>All records used in this manual
+ <note><p>All records used in this Reference Manual
<!-- except #policy_tree_node{} -->
are generated from ASN.1 specifications
and are documented in the User's Guide. See <seealso
- marker="public_key_records">Public key records</seealso> and <seealso
- marker="cert_records">X.509 Certificate records</seealso>.
+ marker="public_key_records">Public-key Records</seealso>.
</p></note>
<p>Use the following include directive to get access to the
- records and constant macros described here and in the User's Guide.</p>
+ records and constant macros described here and in the User's Guide:</p>
<code> -include_lib("public_key/include/public_key.hrl").</code>
- <p><em>Data Types </em></p>
-
- <p><code>oid() - Object Identifier, a tuple of integers as generated by the ASN1 compiler.</code></p>
-
- <p><code>boolean() = true | false</code></p>
+ <p>The following data types are used in the functions for <c>public_key</c>:</p>
- <p><code>string() = [bytes()]</code></p>
+ <taglist>
+ <tag><c>oid()</c></tag>
+ <item><p>Object identifier, a tuple of integers as generated by the <c>ASN.1</c> compiler.</p></item>
- <p><code>der_encoded() = binary()</code></p>
-
- <p><code>pki_asn1_type() = 'Certificate' | 'RSAPrivateKey'| 'RSAPublicKey' |
- 'DSAPrivateKey' | 'DSAPublicKey' | 'DHParameter' |
- 'SubjectPublicKeyInfo' | 'PrivateKeyInfo' |
- 'CertificationRequest' | 'ECPrivateKey' | 'EcpkParameters'</code></p>
-
- <p><code>pem_entry () = {pki_asn1_type(), binary(), %% DER or encrypted DER
- not_encrypted | cipher_info()}</code></p>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
+
+ <tag><c>string() =</c></tag>
+ <item><p><c>[bytes()]</c></p></item>
+
+ <tag><c>der_encoded() =</c></tag>
+ <item><p><c>binary()</c></p></item>
+
+ <tag><c>pki_asn1_type() =</c></tag>
+ <item>
+ <p><c>'Certificate'</c></p>
+ <p><c>| 'RSAPrivateKey'</c></p>
+ <p><c>| 'RSAPublicKey'</c></p>
+ <p><c>| 'DSAPrivateKey'</c></p>
+ <p><c>| 'DSAPublicKey'</c></p>
+ <p><c>| 'DHParameter'</c></p>
+ <p><c>| 'SubjectPublicKeyInfo'</c></p>
+ <p><c>| 'PrivateKeyInfo'</c></p>
+ <p><c>| 'CertificationRequest'</c></p>
+ <p><c>| 'ECPrivateKey'</c></p>
+ <p><c>| 'EcpkParameters'</c></p>
+ </item>
+
+ <tag><c>pem_entry () =</c></tag>
+ <item><p><c>{pki_asn1_type(), binary(), %% DER or encrypted DER not_encrypted</c></p>
+ <p><c>| cipher_info()}</c></p></item>
+
+ <tag><c>cipher_info() = </c></tag>
+ <item><p><c>{"RC2-CBC" | "DES-CBC" | "DES-EDE3-CBC", crypto:rand_bytes(8)</c></p>
+ <p><c>| {#'PBEParameter{}, digest_type()} | #'PBES2-params'{}}</c></p>
+ </item>
+
+ <tag><c>public_key() =</c></tag>
+ <item><p><c>rsa_public_key() | dsa_public_key() | ec_public_key()</c></p></item>
+
+ <tag><c>private_key() =</c></tag>
+ <item><p><c>rsa_private_key() | dsa_private_key() | ec_private_key()</c></p></item>
- <p><code>cipher_info() = {"RC2-CBC | "DES-CBC" | "DES-EDE3-CBC",
- crypto:rand_bytes(8) | {#'PBEParameter{}, digest_type()} |#'PBES2-params'{}}</code></p>
-
- <p><code>public_key() = rsa_public_key() | dsa_public_key() | ec_public_key()</code></p>
- <p><code>private_key() = rsa_private_key() | dsa_private_key() | ec_private_key()</code></p>
- <p><code>rsa_public_key() = #'RSAPublicKey'{}</code></p>
+ <tag><c>rsa_public_key() =</c></tag>
+ <item><p><c>#'RSAPublicKey'{}</c></p></item>
- <p><code>rsa_private_key() = #'RSAPrivateKey'{}</code></p>
+ <tag><c>rsa_private_key() =</c></tag>
+ <item><p><c>#'RSAPrivateKey'{}</c></p></item>
- <p><code>dsa_public_key() = {integer(), #'Dss-Parms'{}}</code></p>
+ <tag><c>dsa_public_key() =</c></tag>
+ <item><p><c>{integer(), #'Dss-Parms'{}}</c></p></item>
- <p><code>dsa_private_key() = #'DSAPrivateKey'{}</code></p>
+ <tag><c>dsa_private_key() =</c></tag>
+ <item><p><c>#'DSAPrivateKey'{}</c></p></item>
- <p><code>ec_public_key() = {#'ECPoint'{}, #'EcpkParameters'{} |
- {namedCurve, oid()}}</code></p>
-
- <p><code>ec_private_key() = #'ECPrivateKey'{}</code></p>
+ <tag><c>ec_public_key()</c></tag>
+ <item><p>= <c>{#'ECPoint'{}, #'EcpkParameters'{} | {namedCurve, oid()}}</c></p></item>
- <p><code>public_crypt_options() = [{rsa_pad, rsa_padding()}].</code></p>
+ <tag><c>ec_private_key() =</c></tag>
+ <item><p><c>#'ECPrivateKey'{}</c></p></item>
- <p><code>rsa_padding() = 'rsa_pkcs1_padding' | 'rsa_pkcs1_oaep_padding' |
- 'rsa_no_padding'</code></p>
+ <tag><c>public_crypt_options() =</c></tag>
+ <item><p><c>[{rsa_pad, rsa_padding()}]</c></p></item>
- <p><code>digest_type() - Union of below digest types</code></p>
-
- <p><code>rsa_digest_type() = 'md5' | 'sha' | 'sha224' | 'sha256' | 'sha384' |
- 'sha512'</code></p>
+ <tag><c>rsa_padding() =</c></tag>
+ <item>
+ <p><c>'rsa_pkcs1_padding'</c></p>
+ <p><c>| 'rsa_pkcs1_oaep_padding'</c></p>
+ <p><c>| 'rsa_no_padding'</c></p>
+ </item>
- <p><code>dss_digest_type() = 'sha'</code></p>
+ <tag><c>digest_type() = </c></tag>
+ <item><p>Union of <c>rsa_digest_type()</c>, <c>dss_digest_type()</c>,
+ and <c>ecdsa_digest_type()</c>.</p></item>
- <p><code>ecdsa_digest_type() = 'sha'| 'sha224' | 'sha256' | 'sha384' | 'sha512'</code></p>
+ <tag><c>rsa_digest_type() = </c></tag>
+ <item><p><c>'md5' | 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
- <p><code>crl_reason() = unspecified | keyCompromise | cACompromise |
- affiliationChanged | superseded | cessationOfOperation |
- certificateHold | privilegeWithdrawn | aACompromise</code></p>
+ <tag><c>dss_digest_type() = </c></tag>
+ <item><p><c>'sha'</c></p></item>
- <p><code>ssh_file() = openssh_public_key | rfc4716_public_key | known_hosts |
- auth_keys</code></p>
+ <tag><c>ecdsa_digest_type() = </c></tag>
+ <item><p><c>'sha'| 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
+
+ <tag><c>crl_reason() = </c></tag>
+ <item>
+ <p><c>unspecified</c></p>
+ <p><c>| keyCompromise</c></p>
+ <p><c>| cACompromise</c></p>
+ <p><c>| affiliationChanged</c></p>
+ <p><c>| superseded</c></p>
+ <p><c>| cessationOfOperation</c></p>
+ <p><c>| certificateHold</c></p>
+ <p><c>| privilegeWithdrawn</c></p>
+ <p><c>| aACompromise</c></p>
+ </item>
+
+ <tag><c>issuer_name() =</c></tag>
+ <item><p><c>{rdnSequence,[#'AttributeTypeAndValue'{}]}</c></p>
+ </item>
+
+ <tag><c>ssh_file() =</c></tag>
+ <item>
+ <p><c>openssh_public_key</c></p>
+ <p><c>| rfc4716_public_key</c></p>
+ <p><c>| known_hosts</c></p>
+ <p><c>| auth_keys</c></p>
+ </item>
+ </taglist>
+
<!-- <p><code>policy_tree() = [Root, Children]</code></p> -->
@@ -136,12 +200,12 @@
<!-- <p><code>Children = [] | policy_tree()</code></p> -->
-<!-- <p> The policy_tree_node record has the following fields:</p> -->
+<!-- <p>The <c>policy_tree_node</c> record has the following fields:</p> -->
<!-- <taglist> -->
<!-- <tag>valid_policy</tag> -->
-<!-- <item> Is a single policy OID representing a -->
+<!-- <item>A single policy OID representing a -->
<!-- valid policy for the path of length x.</item> -->
<!-- <tag>qualifier_set</tag> -->
@@ -149,13 +213,13 @@
<!-- with the valid policy in certificate x.</item> -->
<!-- <tag>critically_indicator</tag> -->
-<!-- <item>The critically_indicator indicates whether the -->
+<!-- <item>Indicates whether the -->
<!-- certificate policy extension in certificate x was marked as -->
-<!-- critical. </item> -->
+<!-- critical.</item> -->
<!-- <tag>expected_policy_set</tag> -->
-<!-- <item>The expected_policy_set contains one or more policy OIDs -->
-<!-- that would satisfy this policy in the certificate x+1. </item> -->
+<!-- <item>Contains one or more policy OIDs -->
+<!-- that would satisfy this policy in the certificate x+1.</item> -->
<!-- </taglist> -->
</section>
@@ -164,27 +228,27 @@
<func>
<name>compute_key(OthersKey, MyKey)-></name>
<name>compute_key(OthersKey, MyKey, Params)-></name>
- <fsummary> Compute shared secret</fsummary>
+ <fsummary>Computes shared secret.</fsummary>
<type>
<v>OthersKey = #'ECPoint'{} | binary(), MyKey = #'ECPrivateKey'{} | binary()</v>
<v>Params = #'DHParameter'{}</v>
</type>
<desc>
- <p> Compute shared secret </p>
+ <p>Computes shared secret.</p>
</desc>
</func>
<func>
<name>decrypt_private(CipherText, Key) -> binary()</name>
<name>decrypt_private(CipherText, Key, Options) -> binary()</name>
- <fsummary>Public key decryption.</fsummary>
+ <fsummary>Public-key decryption.</fsummary>
<type>
<v>CipherText = binary()</v>
<v>Key = rsa_private_key()</v>
<v>Options = public_crypt_options()</v>
</type>
<desc>
- <p>Public key decryption using the private key. See also <seealso
+ <p>Public-key decryption using the private key. See also <seealso
marker="crypto:crypto#private_decrypt/4">crypto:private_decrypt/4</seealso></p>
</desc>
</func>
@@ -192,156 +256,156 @@
<func>
<name>decrypt_public(CipherText, Key) - > binary()</name>
<name>decrypt_public(CipherText, Key, Options) - > binary()</name>
- <fsummary></fsummary>
+ <fsummary>Public-key decryption.</fsummary>
<type>
<v>CipherText = binary()</v>
<v>Key = rsa_public_key()</v>
<v>Options = public_crypt_options()</v>
</type>
<desc>
- <p> Public key decryption using the public key. See also <seealso
+ <p>Public-key decryption using the public key. See also <seealso
marker="crypto:crypto#public_decrypt/4">crypto:public_decrypt/4</seealso></p>
</desc>
</func>
<func>
<name>der_decode(Asn1type, Der) -> term()</name>
- <fsummary> Decodes a public key ASN.1 DER encoded entity.</fsummary>
+ <fsummary>Decodes a public-key ASN.1 DER encoded entity.</fsummary>
<type>
<v>Asn1Type = atom()</v>
- <d> ASN.1 type present in the public_key applications
- asn1 specifications.</d>
+ <d>ASN.1 type present in the Public Key applications
+ ASN.1 specifications.</d>
<v>Der = der_encoded()</v>
</type>
<desc>
- <p> Decodes a public key ASN.1 DER encoded entity.</p>
+ <p>Decodes a public-key ASN.1 DER encoded entity.</p>
</desc>
</func>
<func>
<name>der_encode(Asn1Type, Entity) -> der_encoded()</name>
- <fsummary> Encodes a public key entity with asn1 DER encoding.</fsummary>
+ <fsummary>Encodes a public-key entity with ASN.1 DER encoding.</fsummary>
<type>
<v>Asn1Type = atom()</v>
- <d> Asn1 type present in the public_key applications
+ <d>ASN.1 type present in the Public Key applications
ASN.1 specifications.</d>
<v>Entity = term()</v>
- <d>The erlang representation of <c>Asn1Type</c></d>
+ <d>Erlang representation of <c>Asn1Type</c></d>
</type>
<desc>
- <p> Encodes a public key entity with ASN.1 DER encoding.</p>
+ <p>Encodes a public-key entity with ASN.1 DER encoding.</p>
</desc>
</func>
+ <func>
+ <name>encrypt_private(PlainText, Key) -> binary()</name>
+ <fsummary>Public-key encryption using the private key.</fsummary>
+ <type>
+ <v>PlainText = binary()</v>
+ <v>Key = rsa_private_key()</v>
+ </type>
+ <desc>
+ <p>Public-key encryption using the private key.
+ See also <seealso
+ marker="crypto:crypto#private_encrypt/4">crypto:private_encrypt/4</seealso>.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>encrypt_public(PlainText, Key) -> binary()</name>
+ <fsummary>Public-key encryption using the public key.</fsummary>
+ <type>
+ <v>PlainText = binary()</v>
+ <v>Key = rsa_public_key()</v>
+ </type>
+ <desc>
+ <p>Public-key encryption using the public key. See also <seealso
+ marker="crypto:crypto#public_encrypt/4">crypto:public_encrypt/4</seealso>.</p>
+ </desc>
+ </func>
+
<func>
<name>generate_key(Params) -> {Public::binary(), Private::binary()} | #'ECPrivateKey'{} </name>
- <fsummary>Generates a new keypair</fsummary>
+ <fsummary>Generates a new keypair.</fsummary>
<type>
- <v> Params = #'DHParameter'{} | {namedCurve, oid()} | #'ECParameters'{} </v>
+ <v>Params = #'DHParameter'{} | {namedCurve, oid()} | #'ECParameters'{}</v>
</type>
<desc>
- <p>Generates a new keypair</p>
+ <p>Generates a new keypair.</p>
</desc>
</func>
<func>
<name>pem_decode(PemBin) -> [pem_entry()]</name>
- <fsummary>Decode PEM binary data and return
- entries as ASN.1 DER encoded entities. </fsummary>
+ <fsummary>Decodes PEM binary data and returns
+ entries as ASN.1 DER encoded entities.</fsummary>
<type>
<v>PemBin = binary()</v>
<d>Example {ok, PemBin} = file:read_file("cert.pem").</d>
</type>
<desc>
- <p>Decode PEM binary data and return
+ <p>Decodes PEM binary data and returns
entries as ASN.1 DER encoded entities.</p>
</desc>
</func>
<func>
<name>pem_encode(PemEntries) -> binary()</name>
- <fsummary>Creates a PEM binary</fsummary>
+ <fsummary>Creates a PEM binary.</fsummary>
<type>
<v> PemEntries = [pem_entry()] </v>
</type>
<desc>
- <p>Creates a PEM binary</p>
+ <p>Creates a PEM binary.</p>
</desc>
</func>
<func>
<name>pem_entry_decode(PemEntry) -> term()</name>
<name>pem_entry_decode(PemEntry, Password) -> term()</name>
- <fsummary>Decodes a pem entry.</fsummary>
+ <fsummary>Decodes a PEM entry.</fsummary>
<type>
- <v> PemEntry = pem_entry() </v>
- <v> Password = string() </v>
+ <v>PemEntry = pem_entry()</v>
+ <v>Password = string()</v>
</type>
<desc>
- <p>Decodes a PEM entry. pem_decode/1 returns a list of PEM
- entries. Note that if the PEM entry is of type
- 'SubjectPublickeyInfo' it will be further decoded to an
- rsa_public_key() or dsa_public_key().</p>
+ <p>Decodes a PEM entry. <c>pem_decode/1</c> returns a list of PEM
+ entries. Notice that if the PEM entry is of type
+ 'SubjectPublickeyInfo', it is further decoded to an
+ <c>rsa_public_key()</c> or <c>dsa_public_key()</c>.</p>
</desc>
</func>
<func>
<name>pem_entry_encode(Asn1Type, Entity) -> pem_entry()</name>
<name>pem_entry_encode(Asn1Type, Entity, {CipherInfo, Password}) -> pem_entry()</name>
- <fsummary> Creates a PEM entry that can be fed to pem_encode/1.</fsummary>
+ <fsummary>Creates a PEM entry that can be fed to <c>pem_encode/1</c>.</fsummary>
<type>
<v>Asn1Type = pki_asn1_type()</v>
<v>Entity = term()</v>
- <d>The Erlang representation of
- <c>Asn1Type</c>. If <c>Asn1Type</c> is 'SubjectPublicKeyInfo'
- then <c>Entity</c> must be either an rsa_public_key() or a
- dsa_public_key() and this function will create the appropriate
+ <d>Erlang representation of
+ <c>Asn1Type</c>. If <c>Asn1Type</c> is 'SubjectPublicKeyInfo',
+ <c>Entity</c> must be either an <c>rsa_public_key()</c> or a
+ <c>dsa_public_key()</c> and this function creates the appropriate
'SubjectPublicKeyInfo' entry.
</d>
<v>CipherInfo = cipher_info()</v>
<v>Password = string()</v>
</type>
<desc>
- <p> Creates a PEM entry that can be feed to pem_encode/1.</p>
+ <p>Creates a PEM entry that can be feed to <c>pem_encode/1</c>.</p>
</desc>
</func>
-
- <func>
- <name>encrypt_private(PlainText, Key) -> binary()</name>
- <fsummary> Public key encryption using the private key.</fsummary>
- <type>
- <v>PlainText = binary()</v>
- <v>Key = rsa_private_key()</v>
- </type>
- <desc>
- <p> Public key encryption using the private key.
- See also <seealso
- marker="crypto:crypto#private_encrypt/4">crypto:private_encrypt/4</seealso></p>
- </desc>
- </func>
-
- <func>
- <name>encrypt_public(PlainText, Key) -> binary()</name>
- <fsummary> Public key encryption using the public key.</fsummary>
- <type>
- <v>PlainText = binary()</v>
- <v>Key = rsa_public_key()</v>
- </type>
- <desc>
- <p> Public key encryption using the public key. See also <seealso
- marker="crypto:crypto#public_encrypt/4">crypto:public_encrypt/4</seealso></p>
- </desc>
- </func>
<func>
<name>pkix_decode_cert(Cert, otp|plain) -> #'Certificate'{} | #'OTPCertificate'{}</name>
- <fsummary> Decodes an ASN.1 DER encoded PKIX x509 certificate.</fsummary>
+ <fsummary>Decodes an ASN.1 DER-encoded PKIX x509 certificate.</fsummary>
<type>
<v>Cert = der_encoded()</v>
</type>
<desc>
- <p>Decodes an ASN.1 DER encoded PKIX certificate. The otp option
- will use the customized ASN.1 specification OTP-PKIX.asn1 for
+ <p>Decodes an ASN.1 DER-encoded PKIX certificate. Option <c>otp</c>
+ uses the customized ASN.1 specification OTP-PKIX.asn1 for
decoding and also recursively decode most of the standard
parts.</p>
</desc>
@@ -353,98 +417,99 @@
certificate.</fsummary>
<type>
<v>Asn1Type = atom()</v>
- <d>The ASN.1 type can be 'Certificate', 'OTPCertificate' or a subtype of either .</d>
+ <d>The ASN.1 type can be 'Certificate', 'OTPCertificate' or a subtype of either.</d>
<v>Entity = #'Certificate'{} | #'OTPCertificate'{} | a valid subtype</v>
</type>
<desc>
<p>DER encodes a PKIX x509 certificate or part of such a
certificate. This function must be used for encoding certificates or parts of certificates
- that are decoded/created in the otp format, whereas for the plain format this
- function will directly call der_encode/2. </p>
+ that are decoded/created in the <c>otp</c> format, whereas for the plain format this
+ function directly calls <c>der_encode/2</c>.</p>
</desc>
</func>
<func>
<name>pkix_is_issuer(Cert, IssuerCert) -> boolean()</name>
- <fsummary> Checks if <c>IssuerCert</c> issued <c>Cert</c> </fsummary>
+ <fsummary>Checks if <c>IssuerCert</c> issued <c>Cert</c>.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
- <v>IssuerCert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
+ <v>IssuerCert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
- <p> Checks if <c>IssuerCert</c> issued <c>Cert</c> </p>
+ <p>Checks if <c>IssuerCert</c> issued <c>Cert</c>.</p>
</desc>
</func>
<func>
<name>pkix_is_fixed_dh_cert(Cert) -> boolean()</name>
- <fsummary> Checks if a Certificate is a fixed Diffie-Hellman Cert.</fsummary>
+ <fsummary>Checks if a certificate is a fixed Diffie-Hellman certificate.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
- <p> Checks if a Certificate is a fixed Diffie-Hellman Cert.</p>
+ <p>Checks if a certificate is a fixed Diffie-Hellman certificate.</p>
</desc>
</func>
<func>
<name>pkix_is_self_signed(Cert) -> boolean()</name>
- <fsummary> Checks if a Certificate is self signed.</fsummary>
+ <fsummary>Checks if a certificate is self-signed.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
</type>
<desc>
- <p> Checks if a Certificate is self signed.</p>
+ <p>Checks if a certificate is self-signed.</p>
</desc>
</func>
<func>
<name>pkix_issuer_id(Cert, IssuedBy) -> {ok, IssuerID} | {error, Reason}</name>
- <fsummary> Returns the issuer id.</fsummary>
+ <fsummary>Returns the issuer id.</fsummary>
<type>
- <v>Cert = der_encode() | #'OTPCertificate'{}</v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{}</v>
<v>IssuedBy = self | other</v>
- <v>IssuerID = {integer(), {rdnSequence, [#'AttributeTypeAndValue'{}]}}</v>
+ <v>IssuerID = {integer(), issuer_name()}</v>
<d>The issuer id consists of the serial number and the issuers name.</d>
<v>Reason = term()</v>
- </type>
- <desc>
- <p> Returns the issuer id.</p>
- </desc>
+ </type>
+ <desc>
+ <p>Returns the issuer id.</p>
+ </desc>
</func>
-
+
+
<func>
<name>pkix_normalize_name(Issuer) -> Normalized</name>
- <fsummary>Normalizes a issuer name so that it can be easily
- compared to another issuer name. </fsummary>
+ <fsummary>Normalizes an issuer name so that it can be easily
+ compared to another issuer name.</fsummary>
<type>
- <v>Issuer = {rdnSequence,[#'AttributeTypeAndValue'{}]}</v>
- <v>Normalized = {rdnSequence, [#'AttributeTypeAndValue'{}]}</v>
+ <v>Issuer = issuer_name()</v>
+ <v>Normalized = issuer_name()</v>
</type>
<desc>
- <p>Normalizes a issuer name so that it can be easily
+ <p>Normalizes an issuer name so that it can be easily
compared to another issuer name.</p>
</desc>
</func>
<func>
<name>pkix_path_validation(TrustedCert, CertChain, Options) -> {ok, {PublicKeyInfo, PolicyTree}} | {error, {bad_cert, Reason}} </name>
- <fsummary> Performs a basic path validation according to RFC 5280.</fsummary>
+ <fsummary>Performs a basic path validation according to RFC 5280.</fsummary>
<type>
- <v> TrustedCert = #'OTPCertificate'{} | der_encode() | atom() </v>
- <d>Normally a trusted certificate but it can also be a path validation
+ <v>TrustedCert = #'OTPCertificate'{} | der_encode() | atom()</v>
+ <d>Normally a trusted certificate, but it can also be a path-validation
error that can be discovered while
- constructing the input to this function and that should be run through the <c>verify_fun</c>.
- For example <c>unknown_ca </c> or <c>selfsigned_peer </c>
+ constructing the input to this function and that is to be run through the <c>verify_fun</c>.
+ Examples are <c>unknown_ca</c> and <c>selfsigned_peer.</c>
</d>
- <v> CertChain = [der_encode()]</v>
- <d>A list of DER encoded certificates in trust order ending with the peer certificate.</d>
- <v> Options = proplists:proplist()</v>
+ <v>CertChain = [der_encode()]</v>
+ <d>A list of DER-encoded certificates in trust order ending with the peer certificate.</d>
+ <v>Options = proplists:proplist()</v>
<v>PublicKeyInfo = {?'rsaEncryption' | ?'id-dsa',
rsa_public_key() | integer(), 'NULL' | 'Dss-Parms'{}}</v>
- <v> PolicyTree = term() </v>
- <d>At the moment this will always be an empty list as Policies are not currently supported</d>
- <v> Reason = cert_expired | invalid_issuer | invalid_signature | name_not_permitted |
+ <v>PolicyTree = term()</v>
+ <d>At the moment this is always an empty list as policies are not currently supported.</d>
+ <v>Reason = cert_expired | invalid_issuer | invalid_signature | name_not_permitted |
missing_basic_constraint | invalid_key_usage | {revoked, crl_reason()} | atom()
</v>
</type>
@@ -452,17 +517,17 @@
<p>
Performs a basic path validation according to
<url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280.</url>
- However CRL validation is done separately by <seealso
- marker="#pkix_crls_validate-3">pkix_crls_validate/3 </seealso> and should be called
- from the supplied <c>verify_fun</c>
+ However, CRL validation is done separately by <seealso
+ marker="#pkix_crls_validate-3">pkix_crls_validate/3 </seealso> and is to be called
+ from the supplied <c>verify_fun</c>.
</p>
- <taglist>
- <p> Available options are: </p>
+ <p>Available options:</p>
+ <taglist>
<tag>{verify_fun, fun()}</tag>
<item>
- <p>The fun should be defined as:</p>
+ <p>The fun must be defined as:</p>
<code>
fun(OtpCert :: #'OTPCertificate'{},
@@ -475,98 +540,112 @@ fun(OtpCert :: #'OTPCertificate'{},
{unknown, UserState :: term()}.
</code>
- <p>If the verify callback fun returns {fail, Reason}, the
+ <p>If the verify callback fun returns <c>{fail, Reason}</c>, the
verification process is immediately stopped. If the verify
- callback fun returns {valid, UserState}, the verification
- process is continued, this can be used to accept specific path
- validation errors such as <c>selfsigned_peer</c> as well as
- verifying application specific extensions. If called with an
- extension unknown to the user application the return value
- {unknown, UserState} should be used.</p>
+ callback fun returns <c>{valid, UserState}</c>, the verification
+ process is continued. This can be used to accept specific path
+ validation errors, such as <c>selfsigned_peer</c>, as well as
+ verifying application-specific extensions. If called with an
+ extension unknown to the user application, the return value
+ <c>{unknown, UserState}</c> is to be used.</p>
</item>
<tag>{max_path_length, integer()}</tag>
<item>
The <c>max_path_length</c> is the maximum number of non-self-issued
- intermediate certificates that may follow the peer certificate
- in a valid certification path. So if <c>max_path_length</c> is 0 the PEER must
- be signed by the trusted ROOT-CA directly, if 1 the path can
- be PEER, CA, ROOT-CA, if it is 2 PEER, CA, CA, ROOT-CA and so
- on.
+ intermediate certificates that can follow the peer certificate
+ in a valid certification path. So, if <c>max_path_length</c> is 0, the PEER must
+ be signed by the trusted ROOT-CA directly, if it is 1, the path can
+ be PEER, CA, ROOT-CA, if it is 2, the path can
+ be PEER, CA, CA, ROOT-CA, and so on.
</item>
</taglist>
- <p> Possible reasons for a bad certificate are: </p>
+ <p>Possible reasons for a bad certificate: </p>
<taglist>
<tag>cert_expired</tag>
- <item>The certificate is no longer valid as its expiration date has passed.</item>
+ <item><p>Certificate is no longer valid as its expiration date has passed.</p></item>
<tag>invalid_issuer</tag>
- <item>The certificate issuer name does not match the name of the issuer certificate in the chain.</item>
+ <item><p>Certificate issuer name does not match the name of the issuer certificate in the chain.</p></item>
<tag>invalid_signature</tag>
- <item>The certificate was not signed by its issuer certificate in the chain.</item>
+ <item><p>Certificate was not signed by its issuer certificate in the chain.</p></item>
<tag>name_not_permitted</tag>
- <item>Invalid Subject Alternative Name extension.</item>
+ <item><p>Invalid Subject Alternative Name extension.</p></item>
<tag>missing_basic_constraint</tag>
- <item>Certificate, required to have the basic constraints extension, does not have
- a basic constraints extension.</item>
+ <item><p>Certificate, required to have the basic constraints extension, does not have
+ a basic constraints extension.</p></item>
<tag>invalid_key_usage</tag>
- <item>Certificate key is used in an invalid way according to the key usage extension.</item>
+ <item><p>Certificate key is used in an invalid way according to the key-usage extension.</p></item>
<tag>{revoked, crl_reason()}</tag>
- <item>Certificate has been revoked.</item>
+ <item><p>Certificate has been revoked.</p></item>
<tag>atom()</tag>
- <item>Application specific error reason that should be checked by the verify_fun</item>
+ <item><p>Application-specific error reason that is to be checked by the <c>verify_fun</c>.</p></item>
</taglist>
</desc>
</func>
+ <func>
+ <name>pkix_crl_issuer(CRL) -> issuer_name()</name>
+ <fsummary>Returns the issuer of the <c>CRL</c>.</fsummary>
+ <type>
+ <v>CRL = der_encoded() | #'CertificateList'{} </v>
+ </type>
+ <desc>
+ <p>Returns the issuer of the <c>CRL</c>.</p>
+ </desc>
+ </func>
+
<func>
<name>pkix_crls_validate(OTPCertificate, DPAndCRLs, Options) -> CRLStatus()</name>
- <fsummary> Performs CRL validation.</fsummary>
+ <fsummary>Performs CRL validation.</fsummary>
<type>
- <v> OTPCertificate = #'OTPCertificate'{}</v>
- <v> DPAndCRLs = [{DP::#'DistributionPoint'{}, {DerCRL::der_encoded(), CRL::#'CertificateList'{}}}] </v>
- <v> Options = proplists:proplist()</v>
- <v> CRLStatus() = valid | {bad_cert, revocation_status_undetermined} |
+ <v>OTPCertificate = #'OTPCertificate'{}</v>
+ <v>DPAndCRLs = [{DP::#'DistributionPoint'{}, {DerCRL::der_encoded(), CRL::#'CertificateList'{}}}] </v>
+ <v>Options = proplists:proplist()</v>
+ <v>CRLStatus() = valid | {bad_cert, revocation_status_undetermined} |
{bad_cert, {revoked, crl_reason()}}</v>
</type>
<desc>
- <p> Performs CRL validation. It is intended to be called from
+ <p>Performs CRL validation. It is intended to be called from
the verify fun of <seealso marker="#pkix_path_validation-3"> pkix_path_validation/3
- </seealso></p>
+ </seealso>.</p>
+
+ <p>Available options:</p>
+
<taglist>
- <p> Available options are: </p>
+
<tag>{update_crl, fun()}</tag>
<item>
- <p>The fun has the following type spec:</p>
+ <p>The fun has the following type specification:</p>
<code> fun(#'DistributionPoint'{}, #'CertificateList'{}) ->
#'CertificateList'{}</code>
- <p>The fun should use the information in the distribution point to acesses
- the lates possible version of the CRL. If this fun is not specified
- public_key will use the default implementation:
+ <p>The fun uses the information in the distribution point to access
+ the latest possible version of the CRL. If this fun is not specified,
+ Public Key uses the default implementation:
</p>
<code> fun(_DP, CRL) -> CRL end</code>
</item>
<tag>{issuer_fun, fun()}</tag>
<item>
- <p>The fun has the following type spec:</p>
+ <p>The fun has the following type specification:</p>
<code>
fun(#'DistributionPoint'{}, #'CertificateList'{},
{rdnSequence,[#'AttributeTypeAndValue'{}]}, term()) ->
{ok, #'OTPCertificate'{}, [der_encoded]}</code>
- <p>The fun should return the root certificate and certificate chain
+ <p>The fun returns the root certificate and certificate chain
that has signed the CRL.
</p>
<code> fun(DP, CRL, Issuer, UserState) -> {ok, RootCert, CertChain}</code>
@@ -574,91 +653,130 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
</taglist>
</desc>
</func>
+
+ <func>
+ <name>pkix_crl_verify(CRL, Cert) -> boolean()</name>
+ <fsummary> Verify that <c>Cert</c> is the <c> CRL</c> signer. </fsummary>
+ <type>
+ <v>CRL = der_encoded() | #'CertificateList'{} </v>
+ <v>Cert = der_encoded() | #'OTPCertificate'{} </v>
+ </type>
+ <desc>
+ <p>Verify that <c>Cert</c> is the <c>CRL</c> signer.</p>
+ </desc>
+ </func>
+ <func>
+ <name>pkix_dist_point(Cert) -> DistPoint</name>
+ <fsummary>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.</fsummary>
+ <type>
+ <v> Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v> DistPoint = #'DistributionPoint'{}</v>
+ </type>
+ <desc>
+ <p>Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.
+ Can be used as input to <seealso
+ marker="#pkix_crls_validate-3">pkix_crls_validate/3 </seealso>
+ </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>pkix_dist_points(Cert) -> DistPoints</name>
+ <fsummary> Extracts distribution points from the certificates extensions.</fsummary>
+ <type>
+ <v> Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v> DistPoints = [#'DistributionPoint'{}]</v>
+ </type>
+ <desc>
+ <p> Extracts distribution points from the certificates extensions.</p>
+ </desc>
+ </func>
+
<func>
- <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encode()</name>
+ <name>pkix_sign(#'OTPTBSCertificate'{}, Key) -> der_encoded()</name>
<fsummary>Signs certificate.</fsummary>
<type>
<v>Key = rsa_public_key() | dsa_public_key()</v>
</type>
<desc>
- <p>Signs a 'OTPTBSCertificate'. Returns the corresponding
- der encoded certificate.</p>
+ <p>Signs an 'OTPTBSCertificate'. Returns the corresponding
+ DER-encoded certificate.</p>
</desc>
</func>
<func>
<name>pkix_sign_types(AlgorithmId) -> {DigestType, SignatureType}</name>
- <fsummary>Translates signature algorithm oid to erlang digest and signature algorithm types.</fsummary>
+ <fsummary>Translates signature algorithm OID to Erlang digest and signature algorithm types.</fsummary>
<type>
<v>AlgorithmId = oid()</v>
- <d>Signature oid from a certificate or a certificate revocation list</d>
- <v>DigestType = rsa_digest_type() | dss_digest_type() </v>
+ <d>Signature OID from a certificate or a certificate revocation list.</d>
+ <v>DigestType = rsa_digest_type() | dss_digest_type()</v>
<v>SignatureType = rsa | dsa</v>
</type>
<desc>
- <p>Translates signature algorithm oid to erlang digest and signature types.
+ <p>Translates signature algorithm OID to Erlang digest and signature types.
</p>
</desc>
</func>
<func>
<name>pkix_verify(Cert, Key) -> boolean()</name>
- <fsummary> Verify pkix x.509 certificate signature.</fsummary>
+ <fsummary>Verifies PKIX x.509 certificate signature.</fsummary>
<type>
- <v>Cert = der_encode()</v>
+ <v>Cert = der_encoded()</v>
<v>Key = rsa_public_key() | dsa_public_key()</v>
</type>
<desc>
- <p> Verify PKIX x.509 certificate signature.</p>
+ <p>Verifies PKIX x.509 certificate signature.</p>
</desc>
</func>
<func>
<name>sign(Msg, DigestType, Key) -> binary()</name>
- <fsummary> Create digital signature.</fsummary>
+ <fsummary>Creates a digital signature.</fsummary>
<type>
<v>Msg = binary() | {digest,binary()}</v>
- <d>The msg is either the binary "plain text" data to be
- signed or it is the hashed value of "plain text" i.e. the
+ <d>The <c>Msg</c> is either the binary "plain text" data to be
+ signed or it is the hashed value of "plain text", that is, the
digest.</d>
<v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Key = rsa_private_key() | dsa_private_key() | ec_private_key()</v>
</type>
<desc>
- <p> Creates a digital signature.</p>
+ <p>Creates a digital signature.</p>
</desc>
</func>
<func>
<name>ssh_decode(SshBin, Type) -> [{public_key(), Attributes::list()}]</name>
- <fsummary>Decodes a ssh file-binary. </fsummary>
+ <fsummary>Decodes an SSH file-binary.</fsummary>
<type>
<v>SshBin = binary()</v>
<d>Example {ok, SshBin} = file:read_file("known_hosts").</d>
- <v> Type = public_key | ssh_file()</v>
- <d>If <c>Type</c> is <c>public_key</c> the binary may be either
- a rfc4716 public key or a openssh public key.</d>
+ <v>Type = public_key | ssh_file()</v>
+ <d>If <c>Type</c> is <c>public_key</c> the binary can be either
+ an RFC4716 public key or an OpenSSH public key.</d>
</type>
<desc>
- <p> Decodes a ssh file-binary. In the case of know_hosts or
- auth_keys the binary may include one or more lines of the
+ <p>Decodes an SSH file-binary. In the case of <c>know_hosts</c> or
+ <c>auth_keys</c>, the binary can include one or more lines of the
file. Returns a list of public keys and their attributes, possible
attribute values depends on the file type represented by the
binary.
</p>
<taglist>
- <tag>rfc4716 attributes - see RFC 4716</tag>
- <item>{headers, [{string(), utf8_string()}]}</item>
- <tag>auth_key attributes - see man sshd </tag>
+ <tag>RFC4716 attributes - see RFC 4716.</tag>
+ <item><p>{headers, [{string(), utf8_string()}]}</p></item>
+ <tag>auth_key attributes - see manual page for sshd.</tag>
<item>{comment, string()}</item>
<item>{options, [string()]}</item>
- <item>{bits, integer()} - In ssh version 1 files</item>
- <tag>known_host attributes - see man sshd</tag>
+ <item><p>{bits, integer()} - In SSH version 1 files.</p></item>
+ <tag>known_host attributes - see manual page for sshd.</tag>
<item>{hostnames, [string()]}</item>
<item>{comment, string()}</item>
- <item>{bits, integer()} - In ssh version 1 files</item>
+ <item><p>{bits, integer()} - In SSH version 1 files.</p></item>
</taglist>
</desc>
@@ -666,16 +784,16 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<func>
<name>ssh_encode([{Key, Attributes}], Type) -> binary()</name>
- <fsummary> Encodes a list of ssh file entries to a binary.</fsummary>
+ <fsummary>Encodes a list of SSH file entries to a binary.</fsummary>
<type>
<v>Key = public_key()</v>
<v>Attributes = list()</v>
<v>Type = ssh_file()</v>
</type>
<desc>
- <p>Encodes a list of ssh file entries (public keys and attributes) to a binary. Possible
- attributes depends on the file type, see <seealso
- marker="#ssh_decode-2"> ssh_decode/2 </seealso></p>
+ <p>Encodes a list of SSH file entries (public keys and attributes) to a binary. Possible
+ attributes depend on the file type, see <seealso
+ marker="#ssh_decode-2"> ssh_decode/2 </seealso>.</p>
</desc>
</func>
@@ -684,14 +802,14 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<fsummary>Verifies a digital signature.</fsummary>
<type>
<v>Msg = binary() | {digest,binary()}</v>
- <d>The msg is either the binary "plain text" data
- or it is the hashed value of "plain text" i.e. the digest.</d>
+ <d>The <c>Msg</c> is either the binary "plain text" data
+ or it is the hashed value of "plain text", that is, the digest.</d>
<v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Signature = binary()</v>
<v>Key = rsa_public_key() | dsa_public_key() | ec_public_key()</v>
</type>
<desc>
- <p>Verifies a digital signature</p>
+ <p>Veryfies a digital signature.</p>
</desc>
</func>
diff --git a/lib/public_key/doc/src/public_key_records.xml b/lib/public_key/doc/src/public_key_records.xml
index d3534846fa..fc2a74a353 100644
--- a/lib/public_key/doc/src/public_key_records.xml
+++ b/lib/public_key/doc/src/public_key_records.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2008</year>
- <year>2014</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -23,7 +23,7 @@
The Initial Developer of the Original Code is Ericsson AB.
</legalnotice>
- <title>Public key records</title>
+ <title>Public-Key Records</title>
<prepared>Ingela Anderton Andin</prepared>
<responsible></responsible>
<docno></docno>
@@ -34,28 +34,85 @@
<file>public_key_records.xml</file>
</header>
- <p>This chapter briefly describes Erlang records derived from ASN1
- specifications used to handle public and private keys.
- The intent is to describe the data types
- and not to specify the semantics of each component. For information on the
- semantics, please see the relevant standards and RFCs.</p>
+ <p>This chapter briefly describes Erlang records derived from ASN.1
+ specifications used to handle public key infrastructure.
+ The scope is to describe the data types of each component,
+ not the semantics. For information on the
+ semantics, refer to the relevant standards and RFCs linked in the sections below.</p>
<p>Use the following include directive to get access to the
- records and constant macros described in the following sections.</p>
+ records and constant macros described in the following sections:</p>
<code> -include_lib("public_key/include/public_key.hrl"). </code>
- <section>
- <title>Common Data Types</title>
+ <section>
+ <title>Data Types</title>
<p>Common non-standard Erlang
- data types used to described the record fields in the
- below sections are defined in <seealso
- marker="public_key">public key reference manual </seealso></p>
- </section>
+ data types used to describe the record fields in the
+ following sections and which are not defined in the Public Key <seealso
+ marker="public_key">Reference Manual</seealso>
+ follows here:</p>
+
+ <taglist>
+ <tag><c>time() =</c></tag>
+ <item><p><c>uct_time() | general_time()</c></p></item>
+
+ <tag><c>uct_time() =</c></tag>
+ <item><p><c>{utcTime, "YYMMDDHHMMSSZ"}</c></p></item>
+
+ <tag><c>general_time() =</c></tag>
+ <item><p><c>{generalTime, "YYYYMMDDHHMMSSZ"}</c></p></item>
+
+ <tag><c>general_name() =</c></tag>
+ <item><p><c>{rfc822Name, string()}</c></p>
+ <p><c>| {dNSName, string()}</c></p>
+ <p><c>| {x400Address, string()}</c></p>
+ <p><c>| {directoryName, {rdnSequence, [#AttributeTypeAndValue'{}]}}</c></p>
+ <p><c>| {eidPartyName, special_string()}</c></p>
+ <p><c>| {eidPartyName, special_string(), special_string()}</c></p>
+ <p><c>| {uniformResourceIdentifier, string()}</c></p>
+ <p><c>| {ipAddress, string()}</c></p>
+ <p><c>| {registeredId, oid()}</c></p>
+ <p><c>| {otherName, term()}</c></p>
+ </item>
+
+ <tag><c>special_string() =</c></tag>
+ <item><p><c>{teletexString, string()}</c></p>
+ <p><c>| {printableString, string()}</c></p>
+ <p><c>| {universalString, string()}</c></p>
+ <p><c>| {utf8String, binary()}</c></p>
+ <p><c>| {bmpString, string()}</c></p>
+ </item>
+
+ <tag><c>dist_reason() =</c></tag>
+ <item><p><c>unused</c></p>
+ <p><c>| keyCompromise</c></p>
+ <p><c>| cACompromise</c></p>
+ <p><c>| affiliationChanged</c></p>
+ <p><c>| superseded</c></p>
+ <p><c>| cessationOfOperation</c></p>
+ <p><c>| certificateHold</c></p>
+ <p><c>| privilegeWithdrawn</c></p>
+ <p><c>| aACompromise</c></p>
+ </item>
+ <tag><c>OID_macro() =</c></tag>
+ <item><p><c>?OID_name()</c></p>
+ </item>
+
+ <tag><c>OID_name() =</c></tag>
+ <item><p><c>atom()</c></p>
+ </item>
+
+ </taglist>
+
+ </section>
+
<section>
- <title>RSA as defined by the PKCS-1 standard and <url href="http://www.ietf.org/rfc/rfc3447.txt"> RFC 3447 </url></title>
+ <title>RSA</title>
+ <p>Erlang representation of <url href="http://www.ietf.org/rfc/rfc3447.txt">
+ Rivest-Shamir-Adleman cryptosystem (RSA)</url> keys follows:</p>
<code>
#'RSAPublicKey'{
@@ -80,16 +137,13 @@
prime, % integer()
exponent, % integer()
coefficient % integer()
- }.
- </code>
+ }. </code>
</section>
<section>
- <title>DSA as defined by
- <url href="http://csrc.nist.gov/publications/fips/fips186-3/fips_186-3.pdf"> Digital Signature Standard (NIST FIPS PUB 186-2) </url>
- </title>
-
+ <title>DSA</title>
+ <p>Erlang representation of <url href="http://www.ietf.org/rfc/rfc6979.txt">Digigital Signature Algorithm (DSA)</url> keys</p>
<code>
#'DSAPrivateKey',{
version, % integer()
@@ -104,18 +158,18 @@
p, % integer()
q, % integer()
g % integer()
- }.
- </code>
+ }. </code>
+
</section>
<section>
- <title>ECC (Elliptic Curve) <url href="http://www.ietf.org/rfc/rfc3447.txt"> RFC 5480 </url>
- </title>
+ <title>ECDSA </title>
+ <p>Erlang representation of <url href="http://www.ietf.org/rfc/rfc6979.txt">Elliptic Curve Digital Signature Algorithm (ECDSA)</url> keys follows:</p>
<code>
#'ECPrivateKey'{
version, % integer()
- privateKey, % octet_string()
+ privateKey, % binary()
parameters, % der_encoded() - {'EcpkParameters', #'ECParameters'{}} |
{'EcpkParameters', {namedCurve, oid()}} |
{'EcpkParameters', 'NULL'} % Inherited by CA
@@ -126,14 +180,14 @@
version, % integer()
fieldID, % #'FieldID'{}
curve, % #'Curve'{}
- base, % octet_string()
+ base, % binary()
order, % integer()
cofactor % integer()
}.
#'Curve'{
- a, % octet_string()
- b, % octet_string()
+ a, % binary()
+ b, % binary()
seed % bitstring() - optional
}.
@@ -144,10 +198,644 @@
}.
#'ECPoint'{
- point % octet_string() - the public key
- }.
-
- </code>
+ point % binary() - the public key
+ }.</code>
</section>
+ <section>
+ <title>PKIX Certificates</title>
+ <p>Erlang representation of PKIX certificates derived from ASN.1
+ specifications see also <url href="http://www.ietf.org/rfc/rfc5280.txt">X509 certificates (RFC 5280)</url>, also referred to as <c>plain</c> type, are as follows:</p>
+<code>
+#'Certificate'{
+ tbsCertificate, % #'TBSCertificate'{}
+ signatureAlgorithm, % #'AlgorithmIdentifier'{}
+ signature % bitstring()
+ }.
+
+#'TBSCertificate'{
+ version, % v1 | v2 | v3
+ serialNumber, % integer()
+ signature, % #'AlgorithmIdentifier'{}
+ issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
+ validity, % #'Validity'{}
+ subject, % {rdnSequence, [#AttributeTypeAndValue'{}]}
+ subjectPublicKeyInfo, % #'SubjectPublicKeyInfo'{}
+ issuerUniqueID, % binary() | asn1_novalue
+ subjectUniqueID, % binary() | asn1_novalue
+ extensions % [#'Extension'{}]
+ }.
+
+#'AlgorithmIdentifier'{
+ algorithm, % oid()
+ parameters % der_encoded()
+ }.</code>
+
+<p>Erlang alternate representation of PKIX certificate, also referred to as <c>otp</c> type</p>
+
+<code>
+#'OTPCertificate'{
+ tbsCertificate, % #'OTPTBSCertificate'{}
+ signatureAlgorithm, % #'SignatureAlgorithm'
+ signature % bitstring()
+ }.
+
+#'OTPTBSCertificate'{
+ version, % v1 | v2 | v3
+ serialNumber, % integer()
+ signature, % #'SignatureAlgorithm'
+ issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
+ validity, % #'Validity'{}
+ subject, % {rdnSequence, [#AttributeTypeAndValue'{}]}
+ subjectPublicKeyInfo, % #'OTPSubjectPublicKeyInfo'{}
+ issuerUniqueID, % binary() | asn1_novalue
+ subjectUniqueID, % binary() | asn1_novalue
+ extensions % [#'Extension'{}]
+ }.
+
+#'SignatureAlgorithm'{
+ algorithm, % id_signature_algorithm()
+ parameters % asn1_novalue | #'Dss-Parms'{}
+ }.</code>
+
+<p><c>id_signature_algorithm() = OID_macro()</c></p>
+
+<p>The available OID names are as follows:</p>
+<table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-dsa-with-sha1</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-dsaWithSHA1 (ISO or OID to above)</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">md2WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">md5WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">sha1WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">sha-1WithRSAEncryption (ISO or OID to above)</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">sha224WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">sha256WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">sha512WithRSAEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">ecdsa-with-SHA1</cell>
+ </row>
+ <tcaption>Signature Algorithm OIDs </tcaption>
+</table>
+
+<p>The data type <c>'AttributeTypeAndValue'</c>, is represented as
+ the following erlang record:</p>
+
+<code>
+#'AttributeTypeAndValue'{
+ type, % id_attributes()
+ value % term()
+ }.</code>
+
+<p>The attribute OID name atoms and their corresponding value types
+are as follows:</p>
+<table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ <cell align="left" valign="middle"><em>Value Type</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-name</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-surname</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-givenName</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-initials </cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-generationQualifier</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-commonName</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-localityName</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-stateOrProvinceName</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-organizationName</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-title</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-dnQualifier</cell>
+ <cell align="left" valign="middle">{printableString, string()}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-countryName</cell>
+ <cell align="left" valign="middle">{printableString, string()}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-serialNumber</cell>
+ <cell align="left" valign="middle">{printableString, string()}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-at-pseudonym</cell>
+ <cell align="left" valign="middle">special_string()</cell>
+ </row>
+ <tcaption>Attribute OIDs</tcaption>
+</table>
+
+<p>The data types <c>'Validity'</c>, <c>'SubjectPublicKeyInfo'</c>, and
+<c>'SubjectPublicKeyInfoAlgorithm'</c> are represented as the following Erlang records:</p>
+
+<code>
+#'Validity'{
+ notBefore, % time()
+ notAfter % time()
+ }.
+
+#'SubjectPublicKeyInfo'{
+ algorithm, % #AlgorithmIdentifier{}
+ subjectPublicKey % binary()
+ }.
+
+#'SubjectPublicKeyInfoAlgorithm'{
+ algorithm, % id_public_key_algorithm()
+ parameters % public_key_params()
+ }.</code>
+
+<p>The public-key algorithm OID name atoms are as follows:</p>
+<table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">rsaEncryption</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-dsa</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">dhpublicnumber</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-keyExchangeAlgorithm</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ecPublicKey</cell>
+ </row>
+ <tcaption>Public-Key Algorithm OIDs</tcaption>
+</table>
+
+<code>
+#'Extension'{
+ extnID, % id_extensions() | oid()
+ critical, % boolean()
+ extnValue % der_encoded()
+ }.</code>
+
+<p><c>id_extensions()</c>
+ <seealso marker="#StdCertExt">Standard Certificate Extensions</seealso>,
+ <seealso marker="#PrivIntExt">Private Internet Extensions</seealso>,
+ <seealso marker="#CRLCertExt">CRL Extensions</seealso> and
+ <seealso marker="#CRLEntryExt">CRL Entry Extensions</seealso>.
+</p>
+
+</section>
+
+<section>
+ <marker id="StdCertExt"></marker>
+ <title>Standard Certificate Extensions</title>
+
+ <p>The standard certificate extensions OID name atoms and their
+ corresponding value types are as follows:</p>
+
+ <table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ <cell align="left" valign="middle"><em>Value Type</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-authorityKeyIdentifier</cell>
+ <cell align="left" valign="middle">#'AuthorityKeyIdentifier'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-subjectKeyIdentifier</cell>
+ <cell align="left" valign="middle">oid()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-keyUsage</cell>
+ <cell align="left" valign="middle">[key_usage()]</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-privateKeyUsagePeriod</cell>
+ <cell align="left" valign="middle">#'PrivateKeyUsagePeriod'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-certificatePolicies</cell>
+ <cell align="left" valign="middle">#'PolicyInformation'{}</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-policyMappings</cell>
+ <cell align="left" valign="middle">#'PolicyMappings_SEQOF'{}</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-subjectAltName</cell>
+ <cell align="left" valign="middle">general_name()</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-issuerAltName</cell>
+ <cell align="left" valign="middle">general_name()</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-subjectDirectoryAttributes</cell>
+ <cell align="left" valign="middle"> [#'Attribute'{}]</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-basicConstraints</cell>
+ <cell align="left" valign="middle">#'BasicConstraints'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-nameConstraints</cell>
+ <cell align="left" valign="middle">#'NameConstraints'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-policyConstraints</cell>
+ <cell align="left" valign="middle">#'PolicyConstraints'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-extKeyUsage</cell>
+ <cell align="left" valign="middle">[id_key_purpose()]</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-cRLDistributionPoints</cell>
+ <cell align="left" valign="middle">[#'DistributionPoint'{}]</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-inhibitAnyPolicy</cell>
+ <cell align="left" valign="middle">integer()</cell>
+ </row>
+
+ <row>
+ <cell align="left" valign="middle">id-ce-freshestCRL</cell>
+ <cell align="left" valign="middle">[#'DistributionPoint'{}]</cell>
+ </row>
+
+
+ <tcaption>Standard Certificate Extensions</tcaption>
+ </table>
+
+ <p>Here:</p>
+ <taglist>
+ <tag><c>key_usage()</c></tag>
+ <item>= <p><c>digitalSignature</c></p>
+ <p><c>| nonRepudiation</c></p>
+ <p><c>| keyEncipherment</c></p>
+ <p><c>| dataEncipherment</c></p>
+ <p><c>| keyAgreement</c></p>
+ <p><c>| keyCertSign</c></p>
+ <p><c>| cRLSign</c></p>
+ <p><c>| encipherOnly</c></p>
+ <p><c>| decipherOnly </c></p>
+ </item>
+ </taglist>
+
+ <p>And for <c>id_key_purpose()</c>:</p>
+
+<table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-serverAuth</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-clientAuth</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-codeSigning</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-emailProtection</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-timeStamping</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-kp-OCSPSigning</cell>
+ </row>
+ <tcaption>Key Purpose OIDs</tcaption>
+</table>
+
+ <code>
+#'AuthorityKeyIdentifier'{
+ keyIdentifier, % oid()
+ authorityCertIssuer, % general_name()
+ authorityCertSerialNumber % integer()
+ }.
+
+#'PrivateKeyUsagePeriod'{
+ notBefore, % general_time()
+ notAfter % general_time()
+ }.
+
+#'PolicyInformation'{
+ policyIdentifier, % oid()
+ policyQualifiers % [#PolicyQualifierInfo{}]
+ }.
+
+#'PolicyQualifierInfo'{
+ policyQualifierId, % oid()
+ qualifier % string() | #'UserNotice'{}
+ }.
+
+#'UserNotice'{
+ noticeRef, % #'NoticeReference'{}
+ explicitText % string()
+ }.
+
+#'NoticeReference'{
+ organization, % string()
+ noticeNumbers % [integer()]
+ }.
+
+#'PolicyMappings_SEQOF'{
+ issuerDomainPolicy, % oid()
+ subjectDomainPolicy % oid()
+ }.
+
+#'Attribute'{
+ type, % oid()
+ values % [der_encoded()]
+ }).
+
+#'BasicConstraints'{
+ cA, % boolean()
+ pathLenConstraint % integer()
+ }).
+
+#'NameConstraints'{
+ permittedSubtrees, % [#'GeneralSubtree'{}]
+ excludedSubtrees % [#'GeneralSubtree'{}]
+ }).
+
+#'GeneralSubtree'{
+ base, % general_name()
+ minimum, % integer()
+ maximum % integer()
+ }).
+
+#'PolicyConstraints'{
+ requireExplicitPolicy, % integer()
+ inhibitPolicyMapping % integer()
+ }).
+
+#'DistributionPoint'{
+ distributionPoint, % {fullName, [general_name()]} | {nameRelativeToCRLIssuer,
+ [#AttributeTypeAndValue{}]}
+ reasons, % [dist_reason()]
+ cRLIssuer % [general_name()]
+ }).</code>
+
+</section>
+
+ <section>
+ <marker id="PrivIntExt"></marker>
+ <title>Private Internet Extensions</title>
+
+ <p>The private internet extensions OID name atoms and their corresponding value
+ types are as follows:</p>
+
+ <table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ <cell align="left" valign="middle"><em>Value Type</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-pe-authorityInfoAccess</cell>
+ <cell align="left" valign="middle">[#'AccessDescription'{}]</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-pe-subjectInfoAccess</cell>
+ <cell align="left" valign="middle">[#'AccessDescription'{}]</cell>
+ </row>
+ <tcaption>Private Internet Extensions</tcaption>
+ </table>
+
+<code>
+#'AccessDescription'{
+ accessMethod, % oid()
+ accessLocation % general_name()
+ }).</code>
+
+ </section>
+
+<section>
+ <title>CRL and CRL Extensions Profile</title>
+
+ <p>Erlang representation of CRL and CRL extensions profile
+ derived from ASN.1 specifications and RFC 5280 are as follows:</p>
+
+ <code>
+#'CertificateList'{
+ tbsCertList, % #'TBSCertList{}
+ signatureAlgorithm, % #'AlgorithmIdentifier'{}
+ signature % bitstring()
+ }).
+
+#'TBSCertList'{
+ version, % v2 (if defined)
+ signature, % #AlgorithmIdentifier{}
+ issuer, % {rdnSequence, [#AttributeTypeAndValue'{}]}
+ thisUpdate, % time()
+ nextUpdate, % time()
+ revokedCertificates, % [#'TBSCertList_revokedCertificates_SEQOF'{}]
+ crlExtensions % [#'Extension'{}]
+ }).
+
+#'TBSCertList_revokedCertificates_SEQOF'{
+ userCertificate, % integer()
+ revocationDate, % timer()
+ crlEntryExtensions % [#'Extension'{}]
+ }).</code>
+
+ <section>
+ <marker id="CRLCertExt"></marker>
+ <title>CRL Extensions</title>
+
+ <p>The CRL extensions OID name atoms and their corresponding value types are as follows:</p>
+
+
+ <table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ <cell align="left" valign="middle"><em>Value Type</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-authorityKeyIdentifier</cell>
+ <cell align="left" valign="middle">#'AuthorityKeyIdentifier{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-issuerAltName</cell>
+ <cell align="left" valign="middle">{rdnSequence, [#AttributeTypeAndValue'{}]}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-cRLNumber</cell>
+ <cell align="left" valign="middle">integer()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-deltaCRLIndicator</cell>
+ <cell align="left" valign="middle">integer()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-issuingDistributionPoint</cell>
+ <cell align="left" valign="middle">#'IssuingDistributionPoint'{}</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-freshestCRL</cell>
+ <cell align="left" valign="middle">[#'Distributionpoint'{}]</cell>
+ </row>
+
+ <tcaption>CRL Extensions</tcaption>
+ </table>
+
+ <p>Here, the data type <c>'IssuingDistributionPoint'</c> is represented as
+ the following Erlang record:</p>
+
+ <code>
+#'IssuingDistributionPoint'{
+ distributionPoint, % {fullName, [general_name()]} | {nameRelativeToCRLIssuer,
+ [#AttributeTypeAndValue'{}]}
+ onlyContainsUserCerts, % boolean()
+ onlyContainsCACerts, % boolean()
+ onlySomeReasons, % [dist_reason()]
+ indirectCRL, % boolean()
+ onlyContainsAttributeCerts % boolean()
+ }).</code>
+ </section>
+
+ <section>
+ <marker id="CRLEntryExt"></marker>
+ <title>CRL Entry Extensions</title>
+
+ <p>The CRL entry extensions OID name atoms and their corresponding value types are as follows:</p>
+
+ <table>
+ <row>
+ <cell align="left" valign="middle"><em>OID Name</em></cell>
+ <cell align="left" valign="middle"><em>Value Type</em></cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-cRLReason</cell>
+ <cell align="left" valign="middle">crl_reason()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-holdInstructionCode</cell>
+ <cell align="left" valign="middle">oid()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-invalidityDate</cell>
+ <cell align="left" valign="middle">general_time()</cell>
+ </row>
+ <row>
+ <cell align="left" valign="middle">id-ce-certificateIssuer</cell>
+ <cell align="left" valign="middle">general_name()</cell>
+ </row>
+ <tcaption>CRL Entry Extensions</tcaption>
+ </table>
+
+
+ <p>Here:</p>
+ <taglist>
+ <tag><c>crl_reason()</c></tag>
+ <item>= <p><c>unspecified</c></p>
+ <p><c>| keyCompromise</c></p>
+ <p><c>| cACompromise</c></p>
+ <p><c>| affiliationChanged</c></p>
+ <p><c>| superseded</c></p>
+ <p><c>| cessationOfOperation</c></p>
+ <p><c>| certificateHold</c></p>
+ <p><c>| removeFromCRL</c></p>
+ <p><c>| privilegeWithdrawn</c></p>
+ <p><c>| aACompromise</c></p>
+ </item>
+ </taglist>
+
+ </section>
+
+ <section>
+ <marker id="PKCS10"></marker>
+ <title>PKCS#10 Certification Request</title>
+ <p>Erlang representation of a PKCS#10 certification request
+ derived from ASN.1 specifications and RFC 5280 are as follows:</p>
+ <code>
+#'CertificationRequest'{
+ certificationRequestInfo #'CertificationRequestInfo'{},
+ signatureAlgorithm #'CertificationRequest_signatureAlgorithm'{}}.
+ signature bitstring()
+ }
+
+#'CertificationRequestInfo'{
+ version atom(),
+ subject {rdnSequence, [#AttributeTypeAndValue'{}]} ,
+ subjectPKInfo #'CertificationRequestInfo_subjectPKInfo'{},
+ attributes [#'AttributePKCS-10' {}]
+ }
+
+#'CertificationRequestInfo_subjectPKInfo'{
+ algorithm #'CertificationRequestInfo_subjectPKInfo_algorithm'{}
+ subjectPublicKey bitstring()
+ }
+
+#'CertificationRequestInfo_subjectPKInfo_algorithm'{
+ algorithm = oid(),
+ parameters = der_encoded()
+}
+
+#'CertificationRequest_signatureAlgorithm'{
+ algorithm = oid(),
+ parameters = der_encoded()
+ }
+
+#'AttributePKCS-10'{
+ type = oid(),
+ values = [der_encoded()]
+} </code>
+ </section>
+</section>
</chapter>
diff --git a/lib/public_key/doc/src/ref_man.xml b/lib/public_key/doc/src/ref_man.xml
index b7078891d4..9c80cf4b9f 100644
--- a/lib/public_key/doc/src/ref_man.xml
+++ b/lib/public_key/doc/src/ref_man.xml
@@ -31,8 +31,8 @@
<file>ref_man.xml</file>
</header>
<description>
- <p> Provides functions to handle public key infrastructure
- from RFC 3280 (X.509 certificates) and some parts of the PKCS-standard.
+ <p>The <c>public_key</c> application provides functions to handle public-key infrastructure
+ from RFC 3280 (X.509 certificates) and parts of the PKCS standard.
</p>
</description>
<xi:include href="public_key.xml"/>
diff --git a/lib/public_key/doc/src/using_public_key.xml b/lib/public_key/doc/src/using_public_key.xml
index 450bd7e35f..03e4bedf3d 100644
--- a/lib/public_key/doc/src/using_public_key.xml
+++ b/lib/public_key/doc/src/using_public_key.xml
@@ -22,48 +22,50 @@
</legalnotice>
<title>Getting Started</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
<file>using_public_key.xml</file>
</header>
- <section>
- <title>General information</title>
+ <p>This section describes examples of how to use the
+ Public Key API. Keys and certificates used in the following
+ sections are generated only for testing the Public Key
+ application.</p>
- <p> This chapter is dedicated to showing some
- examples of how to use the public_key API. Keys and certificates
- used in the following sections are generated only for the purpose
- of testing the public key application.</p>
+ <p>Some shell printouts in the following examples
+ are abbreviated for increased readability.</p>
- <p>Note that some shell printouts, in the following examples,
- have been abbreviated for increased readability.</p>
+
+ <section>
+ <title>PEM Files</title>
+ <p>Public-key data (keys, certificates, and so on) can be stored in
+ Privacy Enhanced Mail (PEM) format.
+ The PEM files have the following structure:</p>
- </section>
+ <code>
+ &lt;text&gt;
+ -----BEGIN &lt;SOMETHING&gt;-----
+ &lt;Attribute&gt; : &lt;Value&gt;
+ &lt;Base64 encoded DER data&gt;
+ -----END &lt;SOMETHING&gt;-----
+ &lt;text&gt;</code>
- <section>
- <title>PEM files</title>
- <p> Public key data (keys, certificates etc) may be stored in PEM format. PEM files
- comes from the Private Enhanced Mail Internet standard and has a
- structure that looks like this:</p>
-
- <code>&lt;text&gt;
- -----BEGIN &lt;SOMETHING&gt;-----
- &lt;Attribute&gt; : &lt;Value&gt;
- &lt;Base64 encoded DER data&gt;
- -----END &lt;SOMETHING&gt;-----
- &lt;text&gt;</code>
-
- <p>A file can contain several BEGIN/END blocks. Text lines between
- blocks are ignored. Attributes, if present, are currently ignored except
- for <c>Proc-Type</c> and <c>DEK-Info</c> that are used when the DER data is
- encrypted.</p>
+ <p>A file can contain several <c>BEGIN/END</c> blocks. Text lines between
+ blocks are ignored. Attributes, if present, are ignored except
+ for <c>Proc-Type</c> and <c>DEK-Info</c>, which are used when <c>DER</c>
+ data is encrypted.</p>
<section>
- <title>DSA private key</title>
+ <title>DSA Private Key</title>
+ <p>A DSA private key can look as follows:</p>
+ <note><p>File handling is not done by the Public Key application.</p></note>
- <p>Note file handling is not done by the public_key application. </p>
<code>1> {ok, PemBin} = file:read_file("dsa.pem").
{ok,&lt;&lt;"-----BEGIN DSA PRIVATE KEY-----\nMIIBuw"...&gt;&gt;}</code>
- <p>This PEM file only has one entry, a private DSA key.</p>
+ <p>The following PEM file has only one entry, a private DSA key:</p>
<code>2> [DSAEntry] = public_key:pem_decode(PemBin).
[{'DSAPrivateKey',&lt;&lt;48,130,1,187,2,1,0,2,129,129,0,183,
179,230,217,37,99,144,157,21,228,204,
@@ -80,21 +82,20 @@
</section>
<section>
- <title>RSA private key encrypted with a password.</title>
+ <title>RSA Private Key with Password</title>
+ <p>An RSA private key encrypted with a password can look as follows:</p>
<code>1> {ok, PemBin} = file:read_file("rsa.pem").
{ok,&lt;&lt;"Bag Attribut"...&gt;&gt;}</code>
- <p>This PEM file only has one entry a private RSA key.</p>
+ <p>The following PEM file has only one entry, a private RSA key:</p>
<code>2>[RSAEntry] = public_key:pem_decode(PemBin).
[{'RSAPrivateKey',&lt;&lt;224,108,117,203,152,40,15,77,128,126,
221,195,154,249,85,208,202,251,109,
119,120,57,29,89,19,9,...&gt;&gt;,
- {"DES-EDE3-CBC",&lt;&lt;"kÙeø¼pµL"&gt;&gt;}}]
+ {"DES-EDE3-CBC",&lt;&lt;"kÙeø¼pµL"&gt;&gt;}}]</code>
- </code>
-
- <p>In this example the password is "abcd1234".</p>
+ <p>In this following example, the password is <c>"abcd1234"</c>:</p>
<code>3> Key = public_key:pem_entry_decode(RSAEntry, "abcd1234").
#'RSAPrivateKey'{version = 'two-prime',
modulus = 1112355156729921663373...2737107,
@@ -110,11 +111,12 @@
<section>
<title>X509 Certificates</title>
+ <p>The following is an example of X509 certificates:</p>
<code>1> {ok, PemBin} = file:read_file("cacerts.pem").
{ok,&lt;&lt;"-----BEGIN CERTIFICATE-----\nMIIC7jCCAl"...&gt;&gt;}</code>
- <p>This file includes two certificates</p>
+ <p>The following file includes two certificates:</p>
<code>2> [CertEntry1, CertEntry2] = public_key:pem_decode(PemBin).
[{'Certificate',&lt;&lt;48,130,2,238,48,130,2,87,160,3,2,1,2,2,
9,0,230,145,97,214,191,2,120,150,48,13,
@@ -124,7 +126,7 @@
1,48,13,6,9,42,134,72,134,247,...&gt;&gt;>,
not_encrypted}]</code>
- <p>Certificates may of course be decoded as usual ... </p>
+ <p>Certificates can be decoded as usual:</p>
<code>2> Cert = public_key:pem_entry_decode(CertEntry1).
#'Certificate'{
tbsCertificate =
@@ -210,24 +212,24 @@
algorithm = {1,2,840,113549,1,1,5},
parameters = &lt;&lt;5,0&gt;&gt;},
signature =
- {0,
- &lt;&lt;163,186,7,163,216,152,63,47,154,234,139,73,154,96,120,
- 165,2,52,196,195,109,167,192,...&gt;&gt;}}
-</code>
-
- <p> Parts of certificates can be decoded with
- public_key:der_decode/2 using that parts ASN.1 type.
- Although application specific certificate
- extension requires application specific ASN.1 decode/encode-functions.
- Example, the first value of the rdnSequence above is of ASN.1 type
- 'X520CommonName'. ({2,5,4,3} = ?id-at-commonName)</p>
+ &lt;&lt;163,186,7,163,216,152,63,47,154,234,139,73,154,96,120,
+ 165,2,52,196,195,109,167,192,...&gt;&gt;}</code>
+
+ <p>Parts of certificates can be decoded with
+ <c>public_key:der_decode/2</c>, using the ASN.1 type of that part.
+ However, an application-specific certificate extension requires
+ application-specific ASN.1 decode/encode-functions.
+ In the recent example, the first value of <c>rdnSequence</c> is
+ of ASN.1 type <c>'X520CommonName'. ({2,5,4,3} = ?id-at-commonName)</c>:</p>
<code>public_key:der_decode('X520CommonName', &lt;&lt;19,8,101,114,108,97,110,103,67,65&gt;&gt;).
{printableString,"erlangCA"}</code>
- <p>... but certificates can also be decode using the pkix_decode_cert/2 that
- can customize and recursively decode standard parts of a certificate.</p>
+ <p>However, certificates can also be decoded using <c>pkix_decode_cert/2</c>,
+ which can customize and recursively decode standard parts of a certificate:</p>
+
<code>3>{_, DerCert, _} = CertEntry1.</code>
+
<code>4> public_key:pkix_decode_cert(DerCert, otp).
#'OTPCertificate'{
tbsCertificate =
@@ -314,30 +316,27 @@
algorithm = {1,2,840,113549,1,1,5},
parameters = 'NULL'},
signature =
- {0,
&lt;&lt;163,186,7,163,216,152,63,47,154,234,139,73,154,96,120,
- 165,2,52,196,195,109,167,192,...&gt;&gt;}}
-</code>
+ 165,2,52,196,195,109,167,192,...&gt;&gt;}</code>
- <p>This call is equivalent to public_key:pem_entry_decode(CertEntry1)</p>
+ <p>This call is equivalent to <c>public_key:pem_entry_decode(CertEntry1)</c>:</p>
<code>5> public_key:pkix_decode_cert(DerCert, plain).
-#'Certificate'{ ...}
-</code>
+#'Certificate'{ ...}</code>
</section>
<section>
- <title>Encoding public key data to PEM format</title>
+ <title>Encoding Public-Key Data to PEM Format</title>
- <p>If you have public key data and and want to create a PEM file
- you can do that by calling the functions
- public_key:pem_entry_encode/2 and pem_encode/1 and then saving the
- result to a file. For example assume you have PubKey =
- 'RSAPublicKey'{} then you can create a PEM-"RSA PUBLIC KEY" file
- (ASN.1 type 'RSAPublicKey') or a PEM-"PUBLIC KEY" file
- ('SubjectPublicKeyInfo' ASN.1 type).</p>
+ <p>If you have public-key data and want to create a PEM file
+ this can be done by calling functions
+ <c>public_key:pem_entry_encode/2</c> and <c>pem_encode/1</c> and
+ saving the result to a file. For example, assume that you have
+ <c>PubKey = 'RSAPublicKey'{}</c>. Then you can create a PEM-"RSA PUBLIC KEY"
+ file (ASN.1 type <c>'RSAPublicKey'</c>) or a PEM-"PUBLIC KEY" file
+ (<c>'SubjectPublicKeyInfo'</c> ASN.1 type).</p>
- <p> The second element of the PEM-entry will be the ASN.1 DER encoded
- key data.</p>
+ <p>The second element of the PEM-entry is the ASN.1 <c>DER</c> encoded
+ key data:</p>
<code>1> PemEntry = public_key:pem_entry_encode('RSAPublicKey', RSAPubKey).
{'RSAPublicKey', &lt;&lt;48,72,...&gt;&gt;, not_encrypted}
@@ -348,7 +347,7 @@
3> file:write_file("rsa_pub_key.pem", PemBin).
ok</code>
- <p> or </p>
+ <p>or:</p>
<code>1> PemEntry = public_key:pem_entry_encode('SubjectPublicKeyInfo', RSAPubKey).
{'SubjectPublicKeyInfo', &lt;&lt;48,92...&gt;&gt;, not_encrypted}
@@ -363,96 +362,108 @@ ok</code>
</section>
<section>
- <title>RSA public key cryptography </title>
- <p> Suppose you have PrivateKey = #'RSAPrivateKey{}' and the
- plaintext Msg = binary() and the corresponding public key
- PublicKey = #'RSAPublicKey'{} then you can do the following.
- Note that you normally will only do one of the encrypt or
- decrypt operations and the peer will do the other.
- </p>
-
- <p>Encrypt with the private key </p>
+ <title>RSA Public-Key Cryptography</title>
+ <p>Suppose you have the following private key and a corresponding public key:</p>
+ <list type="bulleted">
+ <item><c>PrivateKey = #'RSAPrivateKey{}'</c> and
+ the plaintext <c>Msg = binary()</c></item>
+ <item><c>PublicKey = #'RSAPublicKey'{}</c>
+ </item>
+ </list>
+ <p>Then you can proceed as follows:</p>
+
+ <p>Encrypt with the private key:</p>
<code>RsaEncrypted = public_key:encrypt_private(Msg, PrivateKey),
Msg = public_key:decrypt_public(RsaEncrypted, PublicKey),</code>
- <p>Encrypt with the public key </p>
+ <p>Encrypt with the public key:</p>
<code>RsaEncrypted = public_key:encrypt_public(Msg, PublicKey),
Msg = public_key:decrypt_private(RsaEncrypted, PrivateKey),</code>
+
+ <note><p>You normally do only one of the encrypt or decrypt operations,
+ and the peer does the other. This normaly used in legacy applications
+ as a primitive digital signature.
+ </p></note>
+
</section>
<section>
- <title>Digital signatures</title>
+ <title>Digital Signatures</title>
- <p> Suppose you have PrivateKey = #'RSAPrivateKey{}'or
- #'DSAPrivateKey'{} and the plaintext Msg = binary() and the
- corresponding public key PublicKey = #'RSAPublicKey'{} or
- {integer(), #'DssParams'{}} then you can do the following. Note
- that you normally will only do one of the sign or verify operations
- and the peer will do the other. </p>
+ <p>Suppose you have the following private key and a corresponding public key:</p>
+
+ <list type="bulleted">
+ <item><c>PrivateKey = #'RSAPrivateKey{}'</c> or
+ <c>#'DSAPrivateKey'{}</c> and the plaintext <c>Msg = binary()</c></item>
+ <item><c>PublicKey = #'RSAPublicKey'{}</c> or
+ <c>{integer(), #'DssParams'{}}</c></item>
+ </list>
+ <p>Then you can proceed as follows:</p>
<code>Signature = public_key:sign(Msg, sha, PrivateKey),
true = public_key:verify(Msg, sha, Signature, PublicKey),</code>
- <p>It might be appropriate to calculate the message digest before
- calling sign or verify and then you can use the none as second
- argument.</p>
+ <note><p>You normally do only one of the sign or verify operations,
+ and the peer does the other.</p></note>
+
+ <p>It can be appropriate to calculate the message digest before
+ calling <c>sign</c> or <c>verify</c>, and then use <c>none</c> as
+ second argument:</p>
<code>Digest = crypto:sha(Msg),
Signature = public_key:sign(Digest, none, PrivateKey),
-true = public_key:verify(Digest, none, Signature, PublicKey),
- </code>
+true = public_key:verify(Digest, none, Signature, PublicKey),</code>
</section>
<section>
- <title>SSH files</title>
+ <title>SSH Files</title>
<p>SSH typically uses PEM files for private keys but has its
- own file format for storing public keys. The erlang public_key
- application can be used to parse the content of SSH public key files.</p>
+ own file format for storing public keys. The <c>public_key</c>
+ application can be used to parse the content of SSH public-key files.</p>
<section>
- <title> RFC 4716 SSH public key files </title>
+ <title>RFC 4716 SSH Public-Key Files</title>
<p>RFC 4716 SSH files looks confusingly like PEM files,
- but there are some differences.</p>
+ but there are some differences:</p>
<code>1> {ok, SshBin} = file:read_file("ssh2_rsa_pub").
{ok, &lt;&lt;"---- BEGIN SSH2 PUBLIC KEY ----\nAAAA"...&gt;&gt;}</code>
- <p>This is equivalent to calling public_key:ssh_decode(SshBin, rfc4716_public_key).
+ <p>This is equivalent to calling <c>public_key:ssh_decode(SshBin, rfc4716_public_key)</c>:
</p>
<code>2> public_key:ssh_decode(SshBin, public_key).
[{#'RSAPublicKey'{modulus = 794430685...91663,
- publicExponent = 35}, []}]
-</code>
+ publicExponent = 35}, []}]</code>
</section>
<section>
- <title> Openssh public key format </title>
+ <title>OpenSSH Public-Key Format</title>
+ <p>OpenSSH public-key format looks as follows:</p>
<code>1> {ok, SshBin} = file:read_file("openssh_dsa_pub").
{ok,&lt;&lt;"ssh-dss AAAAB3Nza"...&gt;&gt;}</code>
- <p>This is equivalent to calling public_key:ssh_decode(SshBin, openssh_public_key).
+ <p>This is equivalent to calling <c>public_key:ssh_decode(SshBin, openssh_public_key)</c>:
</p>
<code>2> public_key:ssh_decode(SshBin, public_key).
[{{15642692...694280725,
#'Dss-Parms'{p = 17291273936...696123221,
q = 1255626590179665817295475654204371833735706001853,
g = 10454211196...480338645}},
- [{comment,"dhopson@VMUbuntu-DSH"}]}]
-</code>
+ [{comment,"dhopson@VMUbuntu-DSH"}]}]</code>
</section>
<section>
- <title> Known hosts - openssh format</title>
-
+ <title>Known Hosts - OpenSSH Format</title>
+ <p>Known hosts - OpenSSH format looks as follows:</p>
<code>1> {ok, SshBin} = file:read_file("known_hosts").
{ok,&lt;&lt;"hostname.domain.com,192.168.0.1 ssh-rsa AAAAB...&gt;&gt;}</code>
- <p>Returns a list of public keys and their related attributes
- each pair of key and attributes corresponds to one entry in
- the known hosts file.</p>
+ <p>Returns a list of public keys and their related attributes.
+ Each pair of key and attribute corresponds to one entry in
+ the known hosts file:</p>
<code>2> public_key:ssh_decode(SshBin, known_hosts).
[{#'RSAPublicKey'{modulus = 1498979460408...72721699,
@@ -461,19 +472,19 @@ true = public_key:verify(Digest, none, Signature, PublicKey),
{#'RSAPublicKey'{modulus = 14989794604088...2721699,
publicExponent = 35},
[{comment,"[email protected]"},
- {hostnames,["|1|BWO5qDxk/cFH0wa05JLdHn+j6xQ=|rXQvIxh5cDD3C43k5DPDamawVNA="]}]}]
-</code>
+ {hostnames,["|1|BWO5qDxk/cFH0wa05JLdHn+j6xQ=|rXQvIxh5cDD3C43k5DPDamawVNA="]}]}]</code>
</section>
<section>
- <title> Authorized keys - openssh format</title>
+ <title>Authorized Keys - OpenSSH Format</title>
+ <p>Authorized keys - OpenSSH format looks as follows:</p>
<code>1> {ok, SshBin} = file:read_file("auth_keys").
{ok, &lt;&lt;"command=\"dump /home\",no-pty,no-port-forwarding ssh-rsa AAA...&gt;&gt;}</code>
- <p>Returns a list of public keys and their related attributes
- each pair of key and attributes corresponds to one entry in
- the authorized key file.</p>
+ <p>Returns a list of public keys and their related attributes.
+ Each pair of key and attribute corresponds to one entry in
+ the authorized key file:</p>
<code>2> public_key:ssh_decode(SshBin, auth_keys).
[{#'RSAPublicKey'{modulus = 794430685...691663,
@@ -485,16 +496,15 @@ true = public_key:verify(Digest, none, Signature, PublicKey),
#'Dss-Parms'{p = 17291273936185...763696123221,
q = 1255626590179665817295475654204371833735706001853,
g = 10454211195705...60511039590076780999046480338645}},
- [{comment,"dhopson@VMUbuntu-DSH"}]}]
-</code>
+ [{comment,"dhopson@VMUbuntu-DSH"}]}]</code>
</section>
<section>
- <title> Creating an SSH file from public key data </title>
+ <title>Creating an SSH File from Public-Key Data</title>
<p>If you got a public key <c>PubKey</c> and a related list of
attributes <c>Attributes</c> as returned
- by ssh_decode/2 you can create a new ssh file for example</p>
+ by <c>ssh_decode/2</c>, you can create a new SSH file, for example:</p>
<code>N> SshBin = public_key:ssh_encode([{PubKey, Attributes}], openssh_public_key),
&lt;&lt;"ssh-rsa "...&gt;&gt;
N+1> file:write_file("id_rsa.pub", SshBin).
diff --git a/lib/public_key/src/pubkey_cert.erl b/lib/public_key/src/pubkey_cert.erl
index ae517ca642..1aa9c6764b 100644
--- a/lib/public_key/src/pubkey_cert.erl
+++ b/lib/public_key/src/pubkey_cert.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,8 +28,9 @@
validate_issuer/4, validate_names/6,
validate_extensions/4,
normalize_general_name/1, is_self_signed/1,
- is_issuer/2, issuer_id/2, is_fixed_dh_cert/1,
- verify_data/1, verify_fun/4, select_extension/2, match_name/3,
+ is_issuer/2, issuer_id/2, distribution_points/1,
+ is_fixed_dh_cert/1, verify_data/1, verify_fun/4,
+ select_extension/2, match_name/3,
extensions_list/1, cert_auth_key_id/1, time_str_2_gregorian_sec/1]).
-define(NULL, 0).
@@ -272,6 +273,16 @@ issuer_id(Otpcert, self) ->
SerialNr = TBSCert#'OTPTBSCertificate'.serialNumber,
{ok, {SerialNr, normalize_general_name(Issuer)}}.
+distribution_points(Otpcert) ->
+ TBSCert = Otpcert#'OTPCertificate'.tbsCertificate,
+ Extensions = extensions_list(TBSCert#'OTPTBSCertificate'.extensions),
+ case select_extension(?'id-ce-cRLDistributionPoints', Extensions) of
+ undefined ->
+ [];
+ #'Extension'{extnValue = Value} ->
+ Value
+ end.
+
%%--------------------------------------------------------------------
-spec is_fixed_dh_cert(#'OTPCertificate'{}) -> boolean().
%%
@@ -296,7 +307,9 @@ is_fixed_dh_cert(#'OTPCertificate'{tbsCertificate =
%% --------------------------------------------------------------------
verify_fun(Otpcert, Result, UserState0, VerifyFun) ->
case VerifyFun(Otpcert, Result, UserState0) of
- {valid,UserState} ->
+ {valid, UserState} ->
+ UserState;
+ {valid_peer, UserState} ->
UserState;
{fail, Reason} ->
case Reason of
@@ -432,7 +445,7 @@ extensions_list(Extensions) ->
Extensions.
extract_verify_data(OtpCert, DerCert) ->
- {_, Signature} = OtpCert#'OTPCertificate'.signature,
+ Signature = OtpCert#'OTPCertificate'.signature,
SigAlgRec = OtpCert#'OTPCertificate'.signatureAlgorithm,
SigAlg = SigAlgRec#'SignatureAlgorithm'.algorithm,
PlainText = encoded_tbs_cert(DerCert),
diff --git a/lib/public_key/src/pubkey_cert_records.erl b/lib/public_key/src/pubkey_cert_records.erl
index 9a8e49f265..f412d5862e 100644
--- a/lib/public_key/src/pubkey_cert_records.erl
+++ b/lib/public_key/src/pubkey_cert_records.erl
@@ -217,8 +217,8 @@ namedCurves(brainpoolP512t1) -> ?'brainpoolP512t1'.
%%% SubjectPublicKey
decode_supportedPublicKey(#'OTPSubjectPublicKeyInfo'{algorithm= PA =
- #'PublicKeyAlgorithm'{algorithm=Algo},
- subjectPublicKey = {0,SPK0}}) ->
+ #'PublicKeyAlgorithm'{algorithm=Algo},
+ subjectPublicKey = SPK0}) ->
Type = supportedPublicKeyAlgorithms(Algo),
SPK = case Type of
'ECPoint' -> #'ECPoint'{point = SPK0};
@@ -238,7 +238,7 @@ encode_supportedPublicKey(#'OTPSubjectPublicKeyInfo'{algorithm= PA =
{ok, SPK1} = 'OTP-PUB-KEY':encode(Type, SPK0),
SPK1
end,
- #'OTPSubjectPublicKeyInfo'{subjectPublicKey = {0,SPK}, algorithm=PA}.
+ #'OTPSubjectPublicKeyInfo'{subjectPublicKey = SPK, algorithm=PA}.
%%% Extensions
diff --git a/lib/public_key/src/pubkey_crl.erl b/lib/public_key/src/pubkey_crl.erl
index f0df4bc3f2..0010725da9 100644
--- a/lib/public_key/src/pubkey_crl.erl
+++ b/lib/public_key/src/pubkey_crl.erl
@@ -41,10 +41,10 @@ validate(OtpCert, OtherDPCRLs, DP, {DerCRL, CRL}, {DerDeltaCRL, DeltaCRL},
CRLIssuer = TBSCRL#'TBSCertList'.issuer,
AltNames = case pubkey_cert:select_extension(?'id-ce-subjectAltName',
TBSCert#'OTPTBSCertificate'.extensions) of
- undefined ->
- [];
- Ext ->
- Ext#'Extension'.extnValue
+ #'Extension'{extnValue = Value} ->
+ Value;
+ _ ->
+ []
end,
revoked_status(DP, IDP, {directoryName, CRLIssuer},
[ {directoryName, CertIssuer} | AltNames], SerialNumber, Revoked,
@@ -473,7 +473,7 @@ check_crl_num(_,_) ->
extension_value(Extension, ExtType, Extensions) ->
case pubkey_cert:select_extension(Extension, Extensions) of
#'Extension'{extnValue = Value} ->
- public_key:der_decode(ExtType, list_to_binary(Value));
+ public_key:der_decode(ExtType, iolist_to_binary(Value));
_ ->
undefined
end.
@@ -565,7 +565,7 @@ verify_crl_signature(CRL, DerCRL, Key, KeyParams) ->
{Key, KeyParams})
end.
extract_crl_verify_data(CRL, DerCRL) ->
- {0, Signature} = CRL#'CertificateList'.signature,
+ Signature = CRL#'CertificateList'.signature,
#'AlgorithmIdentifier'{algorithm = SigAlg} =
CRL#'CertificateList'.signatureAlgorithm,
PlainText = encoded_tbs_crl(DerCRL),
diff --git a/lib/public_key/src/pubkey_pbe.erl b/lib/public_key/src/pubkey_pbe.erl
index 521a32189d..8c61bc71d4 100644
--- a/lib/public_key/src/pubkey_pbe.erl
+++ b/lib/public_key/src/pubkey_pbe.erl
@@ -106,9 +106,8 @@ pbdkdf2(Password, Salt, Count, DerivedKeyLen, Prf, PrfHash, PrfOutputLen)->
%%--------------------------------------------------------------------
decrypt_parameters(#'EncryptedPrivateKeyInfo_encryptionAlgorithm'{
algorithm = Oid, parameters = Param}) ->
- decrypt_parameters(Oid, Param).
+ decrypt_parameters(Oid, decode_handle_open_type_wrapper(Param)).
-
%%--------------------------------------------------------------------
-spec encrypt_parameters({Cipher::string(), Params::term()}) ->
#'EncryptedPrivateKeyInfo_encryptionAlgorithm'{}.
@@ -129,7 +128,7 @@ password_to_key_and_iv(Password, _, #'PBES2-params'{} = Params) ->
password_to_key_and_iv(Password, _Cipher, {#'PBEParameter'{salt = Salt,
iterationCount = Count}, Hash}) ->
<<Key:8/binary, IV:8/binary, _/binary>>
- = pbdkdf1(Password, erlang:iolist_to_binary(Salt), Count, Hash),
+ = pbdkdf1(Password, Salt, Count, Hash),
{Key, IV};
password_to_key_and_iv(Password, Cipher, Salt) ->
KeyLen = derived_key_length(Cipher, undefined),
@@ -151,15 +150,15 @@ do_pbdkdf1(Prev, Count, Acc, Hash) ->
do_pbdkdf1(Result, Count-1 , <<Result/binary, Acc/binary>>, Hash).
iv(#'PBES2-params_encryptionScheme'{algorithm = Algo,
- parameters = ASNIV}) when (Algo == ?'desCBC') or
- (Algo == ?'des-EDE3-CBC') ->
- %% This is an so called open ASN1-type that in this
- %% case will be an octet-string of length 8
- <<?ASN1_OCTET_STR_TAG, ?IV_LEN, IV:?IV_LEN/binary>> = ASNIV,
+ parameters = ASN1IV})
+ when (Algo == ?'desCBC') or
+ (Algo == ?'des-EDE3-CBC') ->
+ <<?ASN1_OCTET_STR_TAG, ?IV_LEN, IV:?IV_LEN/binary>> = decode_handle_open_type_wrapper(ASN1IV),
IV;
iv(#'PBES2-params_encryptionScheme'{algorithm = ?'rc2CBC',
- parameters = ASN1IV}) ->
- {ok, #'RC2-CBC-Parameter'{iv = IV}} = 'PKCS-FRAME':decode('RC2-CBC-Parameter', ASN1IV),
+ parameters = ASN1IV}) ->
+ {ok, #'RC2-CBC-Parameter'{iv = IV}}
+ = 'PKCS-FRAME':decode('RC2-CBC-Parameter', decode_handle_open_type_wrapper(ASN1IV)),
iolist_to_binary(IV).
blocks(1, N, Index, Password, Salt, Count, Prf, PrfHash, PrfLen, Acc) ->
@@ -200,13 +199,13 @@ encrypt_parameters(_Cipher, #'PBES2-params'{} = Params) ->
{ok, Der} ='PKCS-FRAME':encode('PBES2-params', Params),
#'EncryptedPrivateKeyInfo_encryptionAlgorithm'{
algorithm = ?'id-PBES2',
- parameters = Der};
+ parameters = encode_handle_open_type_wrapper(Der)};
encrypt_parameters(Cipher, {#'PBEParameter'{} = Params, Hash}) ->
{ok, Der} ='PKCS-FRAME':encode('PBEParameter', Params),
#'EncryptedPrivateKeyInfo_encryptionAlgorithm'{
algorithm = pbe1_oid(Cipher, Hash),
- parameters = Der}.
+ parameters = encode_handle_open_type_wrapper(Der)}.
pbe1_oid("RC2-CBC", sha) ->
?'pbeWithSHA1AndRC2-CBC';
@@ -277,3 +276,8 @@ cipher(#'PBES2-params_encryptionScheme'{algorithm = ?'rc2CBC'}) ->
ceiling(Float) ->
erlang:round(Float + 0.5).
+
+decode_handle_open_type_wrapper({asn1_OPENTYPE, Type}) ->
+ Type.
+encode_handle_open_type_wrapper(Type) ->
+ {asn1_OPENTYPE, Type}.
diff --git a/lib/public_key/src/pubkey_pem.erl b/lib/public_key/src/pubkey_pem.erl
index 98881c4a6a..a62658923f 100644
--- a/lib/public_key/src/pubkey_pem.erl
+++ b/lib/public_key/src/pubkey_pem.erl
@@ -143,8 +143,7 @@ decode_encrypted_private_keyinfo(Der) ->
encryptedData = Data} =
public_key:der_decode('EncryptedPrivateKeyInfo', Der),
DecryptParams = pubkey_pbe:decrypt_parameters(AlgorithmInfo),
- {'PrivateKeyInfo', iolist_to_binary(Data), DecryptParams}.
-
+ {'PrivateKeyInfo', Data, DecryptParams}.
encode_encrypted_private_keyinfo(EncData, EncryptParmams) ->
AlgorithmInfo = pubkey_pbe:encrypt_parameters(EncryptParmams),
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 1bbf4ef416..261054637d 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,7 +46,11 @@
pkix_normalize_name/1,
pkix_path_validation/3,
ssh_decode/2, ssh_encode/2,
- pkix_crls_validate/3
+ pkix_crls_validate/3,
+ pkix_dist_point/1,
+ pkix_dist_points/1,
+ pkix_crl_verify/2,
+ pkix_crl_issuer/1
]).
-export_type([public_key/0, private_key/0, pem_entry/0,
@@ -110,13 +114,13 @@ pem_encode(PemEntries) when is_list(PemEntries) ->
iolist_to_binary(pubkey_pem:encode(PemEntries)).
%%--------------------------------------------------------------------
--spec pem_entry_decode(pem_entry(), [string()]) -> term().
+-spec pem_entry_decode(pem_entry(), string()) -> term().
%
%% Description: Decodes a pem entry. pem_decode/1 returns a list of
%% pem entries.
%%--------------------------------------------------------------------
pem_entry_decode({'SubjectPublicKeyInfo', Der, _}) ->
- {_, {'AlgorithmIdentifier', AlgId, Params}, {0, Key0}}
+ {_, {'AlgorithmIdentifier', AlgId, Params}, Key0}
= der_decode('SubjectPublicKeyInfo', Der),
KeyType = pubkey_cert_records:supportedPublicKeyAlgorithms(AlgId),
case KeyType of
@@ -142,14 +146,16 @@ pem_entry_decode({Asn1Type, CryptDer, {Cipher, #'PBES2-params'{}}} = PemEntry,
pem_entry_decode({Asn1Type, CryptDer, {Cipher, {#'PBEParameter'{},_}}} = PemEntry,
Password) when is_atom(Asn1Type) andalso
is_binary(CryptDer) andalso
- is_list(Cipher) ->
+ is_list(Cipher) andalso
+ is_list(Password) ->
do_pem_entry_decode(PemEntry, Password);
pem_entry_decode({Asn1Type, CryptDer, {Cipher, Salt}} = PemEntry,
Password) when is_atom(Asn1Type) andalso
is_binary(CryptDer) andalso
is_list(Cipher) andalso
is_binary(Salt) andalso
- ((erlang:byte_size(Salt) == 8) or (erlang:byte_size(Salt) == 16)) ->
+ ((erlang:byte_size(Salt) == 8) or (erlang:byte_size(Salt) == 16)) andalso
+ is_list(Password) ->
do_pem_entry_decode(PemEntry, Password).
@@ -162,14 +168,14 @@ pem_entry_decode({Asn1Type, CryptDer, {Cipher, Salt}} = PemEntry,
pem_entry_encode('SubjectPublicKeyInfo', Entity=#'RSAPublicKey'{}) ->
Der = der_encode('RSAPublicKey', Entity),
Spki = {'SubjectPublicKeyInfo',
- {'AlgorithmIdentifier', ?'rsaEncryption', ?DER_NULL}, {0, Der}},
+ {'AlgorithmIdentifier', ?'rsaEncryption', ?DER_NULL}, Der},
pem_entry_encode('SubjectPublicKeyInfo', Spki);
pem_entry_encode('SubjectPublicKeyInfo',
{DsaInt, Params=#'Dss-Parms'{}}) when is_integer(DsaInt) ->
KeyDer = der_encode('DSAPublicKey', DsaInt),
ParamDer = der_encode('DSAParams', {params, Params}),
Spki = {'SubjectPublicKeyInfo',
- {'AlgorithmIdentifier', ?'id-dsa', ParamDer}, {0, KeyDer}},
+ {'AlgorithmIdentifier', ?'id-dsa', ParamDer}, KeyDer},
pem_entry_encode('SubjectPublicKeyInfo', Spki);
pem_entry_encode(Asn1Type, Entity) when is_atom(Asn1Type) ->
Der = der_encode(Asn1Type, Entity),
@@ -228,7 +234,7 @@ der_encode(Asn1Type, Entity) when (Asn1Type == 'PrivateKeyInfo') or
(Asn1Type == 'EncryptedPrivateKeyInfo') ->
try
{ok, Encoded} = 'PKCS-FRAME':encode(Asn1Type, Entity),
- iolist_to_binary(Encoded)
+ Encoded
catch
error:{badmatch, {error, _}} = Error ->
erlang:error(Error)
@@ -237,7 +243,7 @@ der_encode(Asn1Type, Entity) when (Asn1Type == 'PrivateKeyInfo') or
der_encode(Asn1Type, Entity) when is_atom(Asn1Type) ->
try
{ok, Encoded} = 'OTP-PUB-KEY':encode(Asn1Type, Entity),
- iolist_to_binary(Encoded)
+ Encoded
catch
error:{badmatch, {error, _}} = Error ->
erlang:error(Error)
@@ -385,7 +391,7 @@ generate_key(#'ECParameters'{} = Params) ->
compute_key(#'ECPoint'{point = Point}, #'ECPrivateKey'{privateKey = PrivKey,
parameters = Param}) ->
ECCurve = ec_curve_spec(Param),
- crypto:compute_key(ecdh, Point, list_to_binary(PrivKey), ECCurve).
+ crypto:compute_key(ecdh, Point, PrivKey, ECCurve).
compute_key(PubKey, PrivKey, #'DHParameter'{prime = P, base = G}) ->
crypto:compute_key(dh, PubKey, PrivKey, [P, G]).
@@ -440,7 +446,7 @@ sign(DigestOrPlainText, sha, #'DSAPrivateKey'{p = P, q = Q, g = G, x = X}) ->
sign(DigestOrPlainText, DigestType, #'ECPrivateKey'{privateKey = PrivKey,
parameters = Param}) ->
ECCurve = ec_curve_spec(Param),
- crypto:sign(ecdsa, DigestType, DigestOrPlainText, [list_to_binary(PrivKey), ECCurve]);
+ crypto:sign(ecdsa, DigestType, DigestOrPlainText, [PrivKey, ECCurve]);
%% Backwards compatible
sign(Digest, none, #'DSAPrivateKey'{} = Key) ->
@@ -452,22 +458,51 @@ sign(Digest, none, #'DSAPrivateKey'{} = Key) ->
| dsa_public_key() | ec_public_key()) -> boolean().
%% Description: Verifies a digital signature.
%%--------------------------------------------------------------------
-verify(DigestOrPlainText, DigestType, Signature,
- #'RSAPublicKey'{modulus = Mod, publicExponent = Exp}) ->
- crypto:verify(rsa, DigestType, DigestOrPlainText, Signature,
- [Exp, Mod]);
-
-verify(DigestOrPlaintext, DigestType, Signature, {#'ECPoint'{point = Point}, Param}) ->
- ECCurve = ec_curve_spec(Param),
- crypto:verify(ecdsa, DigestType, DigestOrPlaintext, Signature, [Point, ECCurve]);
+verify(DigestOrPlainText, DigestType, Signature, Key) when is_binary(Signature) ->
+ do_verify(DigestOrPlainText, DigestType, Signature, Key);
+verify(_,_,_,_) ->
+ %% If Signature is a bitstring and not a binary we know already at this
+ %% point that the signature is invalid.
+ false.
-%% Backwards compatibility
-verify(Digest, none, Signature, {_, #'Dss-Parms'{}} = Key ) ->
- verify({digest,Digest}, sha, Signature, Key);
-
-verify(DigestOrPlainText, sha = DigestType, Signature, {Key, #'Dss-Parms'{p = P, q = Q, g = G}})
- when is_integer(Key), is_binary(Signature) ->
- crypto:verify(dss, DigestType, DigestOrPlainText, Signature, [P, Q, G, Key]).
+%%--------------------------------------------------------------------
+-spec pkix_dist_point(der_encoded() | #'OTPCertificate'{}) ->
+ #'DistributionPoint'{}.
+%% Description: Creates a distribution point for CRLs issued by the same issuer as <c>Cert</c>.
+%%--------------------------------------------------------------------
+pkix_dist_point(OtpCert) when is_binary(OtpCert) ->
+ pkix_dist_point(pkix_decode_cert(OtpCert, otp));
+pkix_dist_point(OtpCert) ->
+ Issuer = public_key:pkix_normalize_name(
+ pubkey_cert_records:transform(
+ OtpCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer, encode)),
+
+ TBSCert = OtpCert#'OTPCertificate'.tbsCertificate,
+ Extensions = pubkey_cert:extensions_list(TBSCert#'OTPTBSCertificate'.extensions),
+ AltNames = case pubkey_cert:select_extension(?'id-ce-issuerAltName', Extensions) of
+ undefined ->
+ [];
+ #'Extension'{extnValue = Value} ->
+ Value
+ end,
+ Point = {fullName, [{directoryName, Issuer} | AltNames]},
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point}.
+%%--------------------------------------------------------------------
+-spec pkix_dist_points(der_encoded() | #'OTPCertificate'{}) ->
+ [#'DistributionPoint'{}].
+%% Description: Extracts distributionpoints specified in the certificates extensions.
+%%--------------------------------------------------------------------
+pkix_dist_points(OtpCert) when is_binary(OtpCert) ->
+ pkix_dist_points(pkix_decode_cert(OtpCert, otp));
+pkix_dist_points(OtpCert) ->
+ Value = pubkey_cert:distribution_points(OtpCert),
+ lists:foldl(fun(Point, Acc0) ->
+ DistPoint = pubkey_cert_records:transform(Point, decode),
+ [DistPoint | Acc0]
+ end,
+ [], Value).
%%--------------------------------------------------------------------
-spec pkix_sign(#'OTPTBSCertificate'{},
@@ -485,7 +520,7 @@ pkix_sign(#'OTPTBSCertificate'{signature =
Signature = sign(Msg, DigestType, Key),
Cert = #'OTPCertificate'{tbsCertificate= TBSCert,
signatureAlgorithm = SigAlg,
- signature = {0, Signature}
+ signature = Signature
},
pkix_encode('OTPCertificate', Cert, otp).
@@ -511,6 +546,25 @@ pkix_verify(DerCert, Key = {#'ECPoint'{}, _})
verify(PlainText, DigestType, Signature, Key).
%%--------------------------------------------------------------------
+-spec pkix_crl_verify(CRL::binary() | #'CertificateList'{}, Cert::binary() | #'OTPCertificate'{}) -> boolean().
+%%
+%% Description: Verify that Cert is the CRL signer.
+%%--------------------------------------------------------------------
+pkix_crl_verify(CRL, Cert) when is_binary(CRL) ->
+ pkix_crl_verify(der_decode('CertificateList', CRL), Cert);
+pkix_crl_verify(CRL, Cert) when is_binary(Cert) ->
+ pkix_crl_verify(CRL, pkix_decode_cert(Cert, otp));
+pkix_crl_verify(#'CertificateList'{} = CRL, #'OTPCertificate'{} = Cert) ->
+ TBSCert = Cert#'OTPCertificate'.tbsCertificate,
+ PublicKeyInfo = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo,
+ PublicKey = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.subjectPublicKey,
+ AlgInfo = PublicKeyInfo#'OTPSubjectPublicKeyInfo'.algorithm,
+ PublicKeyParams = AlgInfo#'PublicKeyAlgorithm'.parameters,
+ pubkey_crl:verify_crl_signature(CRL,
+ der_encode('CertificateList', CRL),
+ PublicKey, PublicKeyParams).
+
+%%--------------------------------------------------------------------
-spec pkix_is_issuer(Cert :: der_encoded()| #'OTPCertificate'{} | #'CertificateList'{},
IssuerCert :: der_encoded()|
#'OTPCertificate'{}) -> boolean().
@@ -564,17 +618,27 @@ pkix_is_fixed_dh_cert(Cert) when is_binary(Cert) ->
%
%% Description: Returns the issuer id.
%%--------------------------------------------------------------------
-pkix_issuer_id(#'OTPCertificate'{} = OtpCert, self) ->
- pubkey_cert:issuer_id(OtpCert, self);
-
-pkix_issuer_id(#'OTPCertificate'{} = OtpCert, other) ->
- pubkey_cert:issuer_id(OtpCert, other);
-
-pkix_issuer_id(Cert, Signed) when is_binary(Cert) ->
+pkix_issuer_id(#'OTPCertificate'{} = OtpCert, Signed) when (Signed == self) or
+ (Signed == other) ->
+ pubkey_cert:issuer_id(OtpCert, Signed);
+pkix_issuer_id(Cert, Signed) when is_binary(Cert) ->
OtpCert = pkix_decode_cert(Cert, otp),
pkix_issuer_id(OtpCert, Signed).
%%--------------------------------------------------------------------
+-spec pkix_crl_issuer(CRL::binary()| #'CertificateList'{}) ->
+ {rdnSequence,
+ [#'AttributeTypeAndValue'{}]}.
+%
+%% Description: Returns the issuer.
+%%--------------------------------------------------------------------
+pkix_crl_issuer(CRL) when is_binary(CRL) ->
+ pkix_crl_issuer(der_decode('CertificateList', CRL));
+pkix_crl_issuer(#'CertificateList'{} = CRL) ->
+ pubkey_cert_records:transform(
+ CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode).
+
+%%--------------------------------------------------------------------
-spec pkix_normalize_name({rdnSequence,
[#'AttributeTypeAndValue'{}]}) ->
{rdnSequence,
@@ -679,6 +743,23 @@ ssh_encode(Entries, Type) when is_list(Entries),
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
+do_verify(DigestOrPlainText, DigestType, Signature,
+ #'RSAPublicKey'{modulus = Mod, publicExponent = Exp}) ->
+ crypto:verify(rsa, DigestType, DigestOrPlainText, Signature,
+ [Exp, Mod]);
+
+do_verify(DigestOrPlaintext, DigestType, Signature, {#'ECPoint'{point = Point}, Param}) ->
+ ECCurve = ec_curve_spec(Param),
+ crypto:verify(ecdsa, DigestType, DigestOrPlaintext, Signature, [Point, ECCurve]);
+
+%% Backwards compatibility
+do_verify(Digest, none, Signature, {_, #'Dss-Parms'{}} = Key ) ->
+ verify({digest,Digest}, sha, Signature, Key);
+
+do_verify(DigestOrPlainText, sha = DigestType, Signature, {Key, #'Dss-Parms'{p = P, q = Q, g = G}})
+ when is_integer(Key), is_binary(Signature) ->
+ crypto:verify(dss, DigestType, DigestOrPlainText, Signature, [P, Q, G, Key]).
+
do_pem_entry_encode(Asn1Type, Entity, CipherInfo, Password) ->
Der = der_encode(Asn1Type, Entity),
DecryptDer = pubkey_pem:cipher(Der, CipherInfo, Password),
@@ -911,13 +992,14 @@ ec_generate_key(Params) ->
ec_curve_spec( #'ECParameters'{fieldID = FieldId, curve = PCurve, base = Base, order = Order, cofactor = CoFactor }) ->
Field = {pubkey_cert_records:supportedCurvesTypes(FieldId#'FieldID'.fieldType),
FieldId#'FieldID'.parameters},
- Curve = {erlang:list_to_binary(PCurve#'Curve'.a), erlang:list_to_binary(PCurve#'Curve'.b), none},
- {Field, Curve, erlang:list_to_binary(Base), Order, CoFactor};
+ Curve = {PCurve#'Curve'.a, PCurve#'Curve'.b, none},
+ {Field, Curve, Base, Order, CoFactor};
ec_curve_spec({namedCurve, OID}) ->
pubkey_cert_records:namedCurves(OID).
ec_key({PubKey, PrivateKey}, Params) ->
#'ECPrivateKey'{version = 1,
- privateKey = binary_to_list(PrivateKey),
+ privateKey = PrivateKey,
parameters = Params,
- publicKey = {0, PubKey}}.
+ publicKey = PubKey}.
+
diff --git a/lib/public_key/test/erl_make_certs.erl b/lib/public_key/test/erl_make_certs.erl
index 5926794ca8..668924c03e 100644
--- a/lib/public_key/test/erl_make_certs.erl
+++ b/lib/public_key/test/erl_make_certs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2011-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2011-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -114,7 +114,7 @@ verify_signature(DerEncodedCert, DerKey, _KeyParams) ->
#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y} ->
public_key:pkix_verify(DerEncodedCert, {Y, #'Dss-Parms'{p=P, q=Q, g=G}});
#'ECPrivateKey'{version = _Version, privateKey = _PrivKey,
- parameters = Params, publicKey = {0, PubKey}} ->
+ parameters = Params, publicKey = PubKey} ->
public_key:pkix_verify(DerEncodedCert, {#'ECPoint'{point = PubKey}, Params})
end.
@@ -204,7 +204,7 @@ issuer_der(Issuer) ->
Subject.
subject(undefined, IsRootCA) ->
- User = if IsRootCA -> "RootCA"; true -> user() end,
+ User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end,
Opts = [{email, User ++ "@erlang.org"},
{name, User},
{city, "Stockholm"},
@@ -215,14 +215,6 @@ subject(undefined, IsRootCA) ->
subject(Opts, _) ->
subject(Opts).
-user() ->
- case os:getenv("USER") of
- false ->
- "test_user";
- User ->
- User
- end.
-
subject(SubjectOpts) when is_list(SubjectOpts) ->
Encode = fun(Opt) ->
{Type,Value} = subject_enc(Opt),
@@ -267,9 +259,8 @@ default_extensions(Exts) ->
Filter = fun({Key, _}, D) -> lists:keydelete(Key, 1, D) end,
Exts ++ lists:foldl(Filter, Def, Exts).
-
-
extension({_, undefined}) -> [];
+
extension({basic_constraints, Data}) ->
case Data of
default ->
@@ -286,9 +277,11 @@ extension({basic_constraints, Data}) ->
#'Extension'{extnID = ?'id-ce-basicConstraints',
extnValue = Data}
end;
+
extension({key_usage, default}) ->
#'Extension'{extnID = ?'id-ce-keyUsage',
extnValue = [keyCertSign], critical = true};
+
extension({Id, Data, Critical}) ->
#'Extension'{extnID = Id, extnValue = Data, critical = Critical}.
@@ -305,7 +298,7 @@ publickey(#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y}) ->
publickey(#'ECPrivateKey'{version = _Version,
privateKey = _PrivKey,
parameters = Params,
- publicKey = {0, PubKey}}) ->
+ publicKey = PubKey}) ->
Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-ecPublicKey', parameters=Params},
#'OTPSubjectPublicKeyInfo'{algorithm = Algo,
subjectPublicKey = #'ECPoint'{point = PubKey}}.
@@ -330,14 +323,14 @@ sign_algorithm(#'RSAPrivateKey'{}, Opts) ->
{Type, 'NULL'};
sign_algorithm(#'DSAPrivateKey'{p=P, q=Q, g=G}, _Opts) ->
{?'id-dsa-with-sha1', {params,#'Dss-Parms'{p=P, q=Q, g=G}}};
-sign_algorithm(#'ECPrivateKey'{}, Opts) ->
+sign_algorithm(#'ECPrivateKey'{parameters = Parms}, Opts) ->
Type = case proplists:get_value(digest, Opts, sha1) of
sha1 -> ?'ecdsa-with-SHA1';
sha512 -> ?'ecdsa-with-SHA512';
sha384 -> ?'ecdsa-with-SHA384';
sha256 -> ?'ecdsa-with-SHA256'
end,
- {Type, 'NULL'}.
+ {Type, Parms}.
make_key(rsa, _Opts) ->
%% (OBS: for testing only)
@@ -414,9 +407,9 @@ gen_ec2(CurveId) ->
{PubKey, PrivKey} = crypto:generate_key(ecdh, CurveId),
#'ECPrivateKey'{version = 1,
- privateKey = binary_to_list(PrivKey),
+ privateKey = PrivKey,
parameters = {namedCurve, pubkey_cert_records:namedCurves(CurveId)},
- publicKey = {0, PubKey}}.
+ publicKey = PubKey}.
%% See fips_186-3.pdf
dsa_search(T, P0, Q, Iter) when Iter > 0 ->
@@ -485,5 +478,3 @@ der_to_pem(File, Entries) ->
PemBin = public_key:pem_encode(Entries),
file:write_file(File, PemBin).
-
-
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index 163f5f4413..40c28e86b3 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -42,7 +42,7 @@ all() ->
encrypt_decrypt,
{group, sign_verify},
pkix, pkix_countryname, pkix_emailaddress, pkix_path_validation,
- pkix_iso_rsa_oid, pkix_iso_dsa_oid].
+ pkix_iso_rsa_oid, pkix_iso_dsa_oid, pkix_crl].
groups() ->
[{pem_decode_encode, [], [dsa_pem, rsa_pem, encrypted_pem,
@@ -712,6 +712,42 @@ pkix_iso_dsa_oid(Config) when is_list(Config) ->
{_, dsa} = public_key:pkix_sign_types(SigAlg#'SignatureAlgorithm'.algorithm).
%%--------------------------------------------------------------------
+
+pkix_crl() ->
+ [{doc, "test pkix_crl_* functions"}].
+
+pkix_crl(Config) when is_list(Config) ->
+ Datadir = ?config(data_dir, Config),
+ {ok, PemCRL} = file:read_file(filename:join(Datadir, "idp_crl.pem")),
+ [{_, CRL, _}] = public_key:pem_decode(PemCRL),
+
+ {ok, IDPPemCert} = file:read_file(filename:join(Datadir, "idp_cert.pem")),
+ [{_, IDPCert, _}] = public_key:pem_decode(IDPPemCert),
+
+ {ok, SignPemCert} = file:read_file(filename:join(Datadir, "crl_signer.pem")),
+ [{_, SignCert, _}] = public_key:pem_decode(SignPemCert),
+
+ OTPIDPCert = public_key:pkix_decode_cert(IDPCert, otp),
+ OTPSignCert = public_key:pkix_decode_cert(SignCert, otp),
+ ERLCRL = public_key:der_decode('CertificateList',CRL),
+
+ {rdnSequence,_} = public_key:pkix_crl_issuer(CRL),
+ {rdnSequence,_} = public_key:pkix_crl_issuer(ERLCRL),
+
+ true = public_key:pkix_crl_verify(CRL, SignCert),
+ true = public_key:pkix_crl_verify(ERLCRL, OTPSignCert),
+
+ [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(IDPCert),
+ [#'DistributionPoint'{}|_] = public_key:pkix_dist_points(OTPIDPCert),
+
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point} = public_key:pkix_dist_point(IDPCert),
+ #'DistributionPoint'{cRLIssuer = asn1_NOVALUE,
+ reasons = asn1_NOVALUE,
+ distributionPoint = Point} = public_key:pkix_dist_point(OTPIDPCert).
+
+%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
asn1_encode_decode({Asn1Type, Der, not_encrypted} = Entry) ->
diff --git a/lib/public_key/test/public_key_SUITE_data/crl_signer.pem b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem
new file mode 100644
index 0000000000..d77f86b45d
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/crl_signer.pem
@@ -0,0 +1,25 @@
+-----BEGIN CERTIFICATE-----
+MIID8zCCAtugAwIBAgIJAKU8w89SmyPyMA0GCSqGSIb3DQEBBAUAMIGGMREwDwYD
+VQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEUMBIGA1UEChMLRXJp
+Y3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UEBhMCU0UxJTAjBgkq
+hkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwHhcNMTUwMjIzMTMyNTMx
+WhcNMTUwMzI1MTMyNTMxWjCBhjERMA8GA1UEAxMIZXJsYW5nQ0ExEzARBgNVBAsT
+CkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMRIwEAYDVQQHEwlTdG9j
+a2hvbG0xCzAJBgNVBAYTAlNFMSUwIwYJKoZIhvcNAQkBFhZwZXRlckBlcml4LmVy
+aWNzc29uLnNlMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyzwkmKzy
+WTLOafHmgqZVENdt3OYECPA4BamVKyEdi8zgXI0S71wzPZ+XvuGbHDTBzsTHf71L
+xRQgoG30tv5jqWSlfh8iyS6fO+FHxBKd+xg6hLJXk5PCUa5X1D4BO8B4aapEzev+
+T8+pTaOLeVPdfGfKp0yWF50eCpdSF/kMCCIIA8QNSahfcwuLbEEzUNZof6YPZBNm
+e+XUMXCjpb/mU7krfu8nLaspG1HgxQqErEEBzGJE7mguqSVETK/xpGXEMTNIuj8N
+ziFrfqAezDob3z48xHUaHKZRBb9NIxWIjVxkTYaqOtf9UNCT96CHeZ7rk9iNscQu
+USabMIamFY8cNQIDAQABo2IwYDAPBgNVHRMBAf8EBTADAQH/MAsGA1UdDwQEAwIB
+BjAdBgNVHQ4EFgQUm2M3f6UBEIsHI1HIvphbBz60RsAwIQYDVR0RBBowGIEWcGV0
+ZXJAZXJpeC5lcmljc3Nvbi5zZTANBgkqhkiG9w0BAQQFAAOCAQEAPmm0V36HZySF
+BoV03DGyeFUSeMtO0DO058NaXXv2VNPpUXT72Mt1ovXNvVFcReggb01polF7TFFI
+4NRb6qbsLPxny29Clf/9WKY4zDhbb2MIy8yueoOyyeNQtrzY+iQjo4q9U+Aa6xj1
+pxmG1URDfOmCgX33ItCrZXFGa4ic0HrbWgJMDNo4lSOiio8bl3IYN4vBcobRfhDs
+pw5jochE5ZpPh4i76Pg6D99EFkNaLyQioWEu4n2OxR0EBSFLJkVJQ0alUx18AKio
+bje+h5nzRgTm5HApYzcorF57KfUKPDaW1Q6tRckRyHApueDuK8p49ITQE71lmkLc
+ywxoJMrNnA==
+-----END CERTIFICATE-----
+
diff --git a/lib/public_key/test/public_key_SUITE_data/idp_cert.pem b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem
new file mode 100644
index 0000000000..c2afc56a3a
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/idp_cert.pem
@@ -0,0 +1,30 @@
+-----BEGIN CERTIFICATE-----
+MIIFGjCCBAKgAwIBAgIBAzANBgkqhkiG9w0BAQQFADCBgzEOMAwGA1UEAxMFb3Rw
+Q0ExEzARBgNVBAsTCkVybGFuZyBPVFAxFDASBgNVBAoTC0VyaWNzc29uIEFCMQsw
+CQYDVQQGEwJTRTESMBAGA1UEBxMJU3RvY2tob2xtMSUwIwYJKoZIhvcNAQkBFhZw
+ZXRlckBlcml4LmVyaWNzc29uLnNlMB4XDTE1MDIyMzEzMjUzMVoXDTI1MDEwMTEz
+MjUzMVowgYQxDzANBgNVBAMTBnNlcnZlcjETMBEGA1UECxMKRXJsYW5nIE9UUDEU
+MBIGA1UEChMLRXJpY3Nzb24gQUIxCzAJBgNVBAYTAlNFMRIwEAYDVQQHEwlTdG9j
+a2hvbG0xJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDK8EDdNZEebdfxb57e3UA8uTCq
+TsFtJv5tyjnZtSFsGDrwrZYjRMOCJFh8Yv6Ddq4mZiAvUCJxMzW4zVzraMmmQC8z
+Hi3xQyuIq2UCW3ESxLvchCcuSjNOWke0z+rXHzA8Yz9y1fqhhO6AF8q5lLwGo+VQ
+sJkVV8QwB9UXZN4pAc3zTeqZkGCrNY/ZIgtCrk4jw7sY/gumS8BjhXCYGyFZRDvX
+jzIXQx6jn7/2huNbEAiBXbYYAMd7OEwhpHHAWOVA6g+/TNydgRO3W4xVmlEhDpYs
+bnMV/Tq570E1bhz1XWb642K2MnxI74g8FXmhN6x6P8d4zU/eFcs+gxO0X6KzAgMB
+AAGjggGUMIIBkDAJBgNVHRMEAjAAMAsGA1UdDwQEAwIF4DAdBgNVHQ4EFgQUo8dr
+DDQXK25dB6qMY8dNIjAKIPEwgbMGA1UdIwSBqzCBqIAU5YMIq7A5eYQhQsHsc/XC
+7GeZ+kuhgYykgYkwgYYxETAPBgNVBAMTCGVybGFuZ0NBMRMwEQYDVQQLEwpFcmxh
+bmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAGA1UEBxMJU3RvY2tob2xt
+MQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0ZXJAZXJpeC5lcmljc3Nv
+bi5zZYIBATAhBgNVHREEGjAYgRZwZXRlckBlcml4LmVyaWNzc29uLnNlMCEGA1Ud
+EgQaMBiBFnBldGVyQGVyaXguZXJpY3Nzb24uc2UwWwYDVR0fBFQwUjAkoCKgIIYe
+aHR0cDovL2xvY2FsaG9zdC9vdHBDQS9jcmwucGVtMCqgKKAmhiRodHRwOi8vbG9j
+YWxob3N0OjM3ODEzL290cENBL2NybC5wZW0wDQYJKoZIhvcNAQEEBQADggEBACwq
+o4nQTTereSIL8ZLQHweJKXYstTaZrRrAaoRUe9oClY7H++zXmMa8iZvUqqdT3fXW
+4KMXXyoB1o+cLxLnAPKOiFFL9rcbaeAMxZMIrTaFDQsOXAPVqJLSWWS5I5LsNvS6
+MlB6O6+0binTyilDKg683VV9nKNiNdL8WzGa5ig+HvK6xUpJwpOTmDmfdg09zQ+8
+aCbJrthXg0tNnGIorttAd2wFvmLUezoJrlfwLChB0M/qa+RVRCFMiPvkWupo5eVK
+Malwpz2xp2rAUlb6qQY7eI6lV8JsVK06QxBmUHP68Y9kYT5/gy5ketjOB0Ypin05
+6+3VrZKFxrkqKaEoL50=
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/idp_crl.pem b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem
new file mode 100644
index 0000000000..0872279501
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/idp_crl.pem
@@ -0,0 +1,18 @@
+-----BEGIN X509 CRL-----
+MIIC3TCCAcUCAQEwDQYJKoZIhvcNAQEEBQAwgYYxETAPBgNVBAMTCGVybGFuZ0NB
+MRMwEQYDVQQLEwpFcmxhbmcgT1RQMRQwEgYDVQQKEwtFcmljc3NvbiBBQjESMBAG
+A1UEBxMJU3RvY2tob2xtMQswCQYDVQQGEwJTRTElMCMGCSqGSIb3DQEJARYWcGV0
+ZXJAZXJpeC5lcmljc3Nvbi5zZRcNMTUwMjIzMTMyNTMxWhcNMTUwMjI0MTMyNTMx
+WqCCAQgwggEEMIG7BgNVHSMEgbMwgbCAFJtjN3+lARCLByNRyL6YWwc+tEbAoYGM
+pIGJMIGGMREwDwYDVQQDEwhlcmxhbmdDQTETMBEGA1UECxMKRXJsYW5nIE9UUDEU
+MBIGA1UEChMLRXJpY3Nzb24gQUIxEjAQBgNVBAcTCVN0b2NraG9sbTELMAkGA1UE
+BhMCU0UxJTAjBgkqhkiG9w0BCQEWFnBldGVyQGVyaXguZXJpY3Nzb24uc2WCCQCl
+PMPPUpsj8jA4BgNVHRwBAf8ELjAsoCqgKIYmaHR0cDovL2xvY2FsaG9zdDo4MDAw
+L2VybGFuZ0NBL2NybC5wZW0wCgYDVR0UBAMCAQEwDQYJKoZIhvcNAQEEBQADggEB
+AE9WKJhW1oivBEE91akeDcYCtSVp98F7DxzQyJTBLQJGMEXSg8G/oAp64F4qs3oV
+LXS5YFYwxjD9tXByGVEJoIUUMtfMeCvZMgd2V8mBlAJiyHkTrFFA8PgBv+htrJji
+nrheAhrEedqZbqwmrcU34h9fWHp0Zl6UDYyF3I/S0/5ilIz3DvNZ9SBfKKt3DYeW
+hon7qpNo6YrtEzbXyOaa2mFX9c1w39LBZ1FdY0jEzUfh2eImBLxnBjZArNxzYuU8
+a+lNMjc6JUAJwITS6C1YfI4ECsqXe0K/n90pMcm/jgiGFCZhVbXq+Nrm/24qPKBA
+zqoNos7aV7LEYLYOjknaIhY=
+-----END X509 CRL-----
diff --git a/lib/public_key/vsn.mk b/lib/public_key/vsn.mk
index 2fa2d725c3..7f752529f0 100644
--- a/lib/public_key/vsn.mk
+++ b/lib/public_key/vsn.mk
@@ -1 +1 @@
-PUBLIC_KEY_VSN = 0.22.1
+PUBLIC_KEY_VSN = 1.0
diff --git a/lib/reltool/src/reltool.app.src b/lib/reltool/src/reltool.app.src
index 65fcf4aae5..579d2c0d1b 100644
--- a/lib/reltool/src/reltool.app.src
+++ b/lib/reltool/src/reltool.app.src
@@ -36,5 +36,5 @@
{applications, [stdlib, kernel]},
{env, []},
{runtime_dependencies, ["wx-1.2","tools-2.6.14","stdlib-2.0","sasl-2.4",
- "kernel-3.0","erts-6.0"]}
+ "kernel-3.0","erts-7.0"]}
]}.
diff --git a/lib/reltool/src/reltool_fgraph_win.erl b/lib/reltool/src/reltool_fgraph_win.erl
index 66bc2b5ab3..d9e6f6d427 100644
--- a/lib/reltool/src/reltool_fgraph_win.erl
+++ b/lib/reltool/src/reltool_fgraph_win.erl
@@ -252,10 +252,10 @@ ticker_init(Pid) ->
ticker_loop(Pid, Time) ->
receive after Time ->
Pid ! {self(), redraw},
- T0 = now(),
+ T0 = erlang:monotonic_time(),
receive {Pid, ok} -> ok end,
- T1 = now(),
- D = timer:now_diff(T1, T0)/1000,
+ T1 = erlang:monotonic_time(),
+ D = erlang:convert_time_unit(T1-T0, native, milli_seconds),
case round(40 - D) of
Ms when Ms < 0 ->
%io:format("ticker: wait is 0 ms [fg ~7s ms] [fps ~7s]~n",
diff --git a/lib/reltool/src/reltool_utils.erl b/lib/reltool/src/reltool_utils.erl
index 5a3f34506d..e6b1901316 100644
--- a/lib/reltool/src/reltool_utils.erl
+++ b/lib/reltool/src/reltool_utils.erl
@@ -54,12 +54,7 @@ root_dir() ->
code:root_dir().
erl_libs() ->
- case os:getenv("ERL_LIBS") of
- false ->
- [];
- LibStr ->
- string:tokens(LibStr, ":;")
- end.
+ string:tokens(os:getenv("ERL_LIBS", ""), ":;").
lib_dirs(Dir) ->
case erl_prim_loader:list_dir(Dir) of
diff --git a/lib/reltool/test/reltool_server_SUITE.erl b/lib/reltool/test/reltool_server_SUITE.erl
index b3b7afd1a9..f140d6c55f 100644
--- a/lib/reltool/test/reltool_server_SUITE.erl
+++ b/lib/reltool/test/reltool_server_SUITE.erl
@@ -2513,10 +2513,7 @@ undefined_regexp(_Config) ->
%% Library functions
erl_libs() ->
- case os:getenv("ERL_LIBS") of
- false -> [];
- LibStr -> string:tokens(LibStr, ":;")
- end.
+ string:tokens(os:getenv("ERL_LIBS", ""), ":;").
datadir(Config) ->
%% Removes the trailing slash...
diff --git a/lib/runtime_tools/doc/src/notes.xml b/lib/runtime_tools/doc/src/notes.xml
index 2877355718..1612c62c98 100644
--- a/lib/runtime_tools/doc/src/notes.xml
+++ b/lib/runtime_tools/doc/src/notes.xml
@@ -31,6 +31,22 @@
<p>This document describes the changes made to the Runtime_Tools
application.</p>
+<section><title>Runtime_Tools 1.8.16</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ The trace process started by <c>dbg</c> would not always
+ terminate when <c>dbg:stop/0</c> was called.</p>
+ <p>
+ Own Id: OTP-12517</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Runtime_Tools 1.8.15</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/runtime_tools/src/dbg.erl b/lib/runtime_tools/src/dbg.erl
index 186563ab74..c2de57d40b 100644
--- a/lib/runtime_tools/src/dbg.erl
+++ b/lib/runtime_tools/src/dbg.erl
@@ -778,50 +778,50 @@ tracer_init(Handler, HandlerData) ->
tracer_loop(Handler, HandlerData).
tracer_loop(Handler, Hdata) ->
- receive
- Msg ->
- %% Don't match in receive to avoid giving EXIT message higher
- %% priority than the trace messages.
- case Msg of
- {'EXIT',_Pid,_Reason} ->
- ok;
- Trace ->
- NewData = recv_all_traces(Trace, Handler, Hdata),
- tracer_loop(Handler, NewData)
- end
+ {State, Suspended, Traces} = recv_all_traces(),
+ NewHdata = handle_traces(Suspended, Traces, Handler, Hdata),
+ case State of
+ done ->
+ exit(normal);
+ loop ->
+ tracer_loop(Handler, NewHdata)
end.
-
-recv_all_traces(Trace, Handler, Hdata) ->
- Suspended = suspend(Trace, []),
- recv_all_traces(Suspended, Handler, Hdata, [Trace]).
-recv_all_traces(Suspended0, Handler, Hdata, Traces) ->
+recv_all_traces() ->
+ recv_all_traces([], [], infinity).
+
+recv_all_traces(Suspended0, Traces, Timeout) ->
receive
Trace when is_tuple(Trace), element(1, Trace) == trace ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == trace_ts ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == seq_trace ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
Trace when is_tuple(Trace), element(1, Trace) == drop ->
Suspended = suspend(Trace, Suspended0),
- recv_all_traces(Suspended, Handler, Hdata, [Trace|Traces]);
+ recv_all_traces(Suspended, [Trace|Traces], 0);
+ {'EXIT', _Pid, _Reason} ->
+ {done, Suspended0, Traces};
Other ->
%%% Is this really a good idea?
io:format(user,"** tracer received garbage: ~p~n", [Other]),
- recv_all_traces(Suspended0, Handler, Hdata, Traces)
- after 0 ->
- case catch invoke_handler(Traces, Handler, Hdata) of
- {'EXIT',Reason} ->
- resume(Suspended0),
- exit({trace_handler_crashed,Reason});
- NewHdata ->
- resume(Suspended0),
- NewHdata
- end
+ recv_all_traces(Suspended0, Traces, Timeout)
+ after Timeout ->
+ {loop, Suspended0, Traces}
+ end.
+
+handle_traces(Suspended, Traces, Handler, Hdata) ->
+ case catch invoke_handler(Traces, Handler, Hdata) of
+ {'EXIT',Reason} ->
+ resume(Suspended),
+ exit({trace_handler_crashed,Reason});
+ NewHdata ->
+ resume(Suspended),
+ NewHdata
end.
invoke_handler([Tr|Traces], Handler, Hdata0) ->
diff --git a/lib/runtime_tools/src/observer_backend.erl b/lib/runtime_tools/src/observer_backend.erl
index fea0854042..fe814ceda4 100644
--- a/lib/runtime_tools/src/observer_backend.erl
+++ b/lib/runtime_tools/src/observer_backend.erl
@@ -248,7 +248,7 @@ etop_collect(Collector) ->
SchedulerWallTime = erlang:statistics(scheduler_wall_time),
ProcInfo = etop_collect(processes(), []),
- Collector ! {self(),#etop_info{now = now(),
+ Collector ! {self(),#etop_info{now = erlang:timestamp(),
n_procs = length(ProcInfo),
run_queue = erlang:statistics(run_queue),
runtime = SchedulerWallTime,
diff --git a/lib/runtime_tools/src/percept_profile.erl b/lib/runtime_tools/src/percept_profile.erl
index cdc7a0fca1..dfadb21aa8 100644
--- a/lib/runtime_tools/src/percept_profile.erl
+++ b/lib/runtime_tools/src/percept_profile.erl
@@ -119,7 +119,7 @@ stop() ->
undefined ->
{error, not_started};
Port ->
- erlang:port_command(Port, erlang:term_to_binary({profile_stop, erlang:now()})),
+ erlang:port_command(Port, erlang:term_to_binary({profile_stop, erlang:timestamp()})),
%% trace delivered?
erlang:port_close(Port),
ok
@@ -139,7 +139,7 @@ profile_to_file(Filename, Opts) ->
erlang:system_flag(multi_scheduling, block),
Port = (dbg:trace_port(file, Filename))(),
% Send start time
- erlang:port_command(Port, erlang:term_to_binary({profile_start, erlang:now()})),
+ erlang:port_command(Port, erlang:term_to_binary({profile_start, erlang:timestamp()})),
erlang:system_flag(multi_scheduling, unblock),
%% Register Port
diff --git a/lib/runtime_tools/src/runtime_tools.app.src b/lib/runtime_tools/src/runtime_tools.app.src
index 0a70802c08..32ea9e564b 100644
--- a/lib/runtime_tools/src/runtime_tools.app.src
+++ b/lib/runtime_tools/src/runtime_tools.app.src
@@ -27,6 +27,6 @@
{env, []},
{mod, {runtime_tools, []}},
{runtime_dependencies, ["stdlib-2.0","mnesia-4.12","kernel-3.0",
- "erts-6.0"]}]}.
+ "erts-7.0"]}]}.
diff --git a/lib/runtime_tools/src/system_information.erl b/lib/runtime_tools/src/system_information.erl
index 04cc33e1ad..0796e96ffc 100644
--- a/lib/runtime_tools/src/system_information.erl
+++ b/lib/runtime_tools/src/system_information.erl
@@ -577,10 +577,7 @@ get_beam_name() ->
false -> "";
true -> ".smp"
end,
- Beam = case os:getenv("EMU") of
- false -> "beam";
- Value -> Value
- end,
+ Beam = os:getenv("EMU", "beam"),
Beam ++ Type ++ Flavor.
%% Check runtime dependencies...
diff --git a/lib/runtime_tools/test/dbg_SUITE.erl b/lib/runtime_tools/test/dbg_SUITE.erl
index dfae52ed1d..0bcbd67d05 100644
--- a/lib/runtime_tools/test/dbg_SUITE.erl
+++ b/lib/runtime_tools/test/dbg_SUITE.erl
@@ -25,7 +25,7 @@
ip_port/1, file_port/1, file_port2/1, file_port_schedfix/1,
ip_port_busy/1, wrap_port/1, wrap_port_time/1,
with_seq_trace/1, dead_suspend/1, local_trace/1,
- saved_patterns/1]).
+ saved_patterns/1, tracer_exit_on_stop/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
-export([tracee1/1, tracee2/1]).
-export([dummy/0, exported/1]).
@@ -47,7 +47,7 @@ all() ->
[big, tiny, simple, message, distributed, ip_port,
file_port, file_port2, file_port_schedfix, ip_port_busy,
wrap_port, wrap_port_time, with_seq_trace, dead_suspend,
- local_trace, saved_patterns].
+ local_trace, saved_patterns, tracer_exit_on_stop].
groups() ->
[].
@@ -742,6 +742,38 @@ run_dead_suspend() ->
dummy() ->
ok.
+%% Test that a tracer process does not ignore an exit signal message when it has
+%% received (but not handled) trace messages
+tracer_exit_on_stop(_) ->
+ %% Tracer blocks waiting for fun to complete so that the trace message and
+ %% the exit signal message from the dbg process are in its message queue.
+ Fun = fun() ->
+ ?MODULE:dummy(),
+ Ref = erlang:trace_delivered(self()),
+ receive {trace_delivered, _, Ref} -> stop() end
+ end,
+ {ok, _} = dbg:tracer(process, {fun spawn_once_handler/2, {self(), Fun}}),
+ {ok, Tracer} = dbg:get_tracer(),
+ MRef = monitor(process, Tracer),
+ {ok, _} = dbg:p(self(), [call]),
+ {ok, _} = dbg:p(new, [call]),
+ {ok, _} = dbg:tp(?MODULE, dummy, []),
+ ?MODULE:dummy(),
+ receive {'DOWN', MRef, _, _, normal} -> ok end,
+ [{trace,_,call,{?MODULE, dummy,[]}},
+ {trace,_,call,{?MODULE, dummy,[]}}] = flush(),
+ ok.
+
+spawn_once_handler(Event, {Pid, done} = State) ->
+ Pid ! Event,
+ State;
+spawn_once_handler(Event, {Pid, Fun}) ->
+ {_, Ref} = spawn_monitor(Fun),
+ receive
+ {'DOWN', Ref, _, _, _} ->
+ Pid ! Event,
+ {Pid, done}
+ end.
%%
%% Support functions
diff --git a/lib/runtime_tools/test/erts_alloc_config_SUITE.erl b/lib/runtime_tools/test/erts_alloc_config_SUITE.erl
index 8ea04e1767..9be1565a02 100644
--- a/lib/runtime_tools/test/erts_alloc_config_SUITE.erl
+++ b/lib/runtime_tools/test/erts_alloc_config_SUITE.erl
@@ -79,12 +79,7 @@ basic(Config) when is_list(Config) ->
SbctMod = " +MBsbct 1024 +MHsbct 4096",
%% Make sure we have enabled allocators
- ZFlgs = case os:getenv("ERL_ZFLAGS") of
- FlgString when is_list(FlgString) ->
- FlgString;
- _ ->
- ""
- end ++ " +Mea max +Mea config",
+ ZFlgs = os:getenv("ERL_ZFLAGS", "") ++ " +Mea max +Mea config",
?line os:putenv("ERL_ZFLAGS", ZFlgs ++ SbctMod),
diff --git a/lib/runtime_tools/vsn.mk b/lib/runtime_tools/vsn.mk
index c1df23d2a2..e9f43df1aa 100644
--- a/lib/runtime_tools/vsn.mk
+++ b/lib/runtime_tools/vsn.mk
@@ -1 +1 @@
-RUNTIME_TOOLS_VSN = 1.8.15
+RUNTIME_TOOLS_VSN = 1.8.16
diff --git a/lib/sasl/doc/src/sasl_app.xml b/lib/sasl/doc/src/sasl_app.xml
index 9c3c80bd13..572e550061 100644
--- a/lib/sasl/doc/src/sasl_app.xml
+++ b/lib/sasl/doc/src/sasl_app.xml
@@ -92,6 +92,13 @@
<item>Installs <c>sasl_report_file_h</c> in the error logger.
This makes all reports go to the file <c>FileName</c>.
<c>FileName</c> is a string.</item>
+ <tag><c>{file,FileName,Modes}</c></tag>
+ <item>Same as <c>{file,FileName}</c> except that the <c>Modes</c>
+ allows to specify the modes used for opening the <c>FileName</c>
+ given to the <seealso marker="kernel:file#open/2">file:open/2</seealso>
+ call. When not specified, the <c>Modes</c> defaults to <c>[write]</c>.
+ Use <c>[append]</c> for having the <c>FileName</c> open in append mode.
+ <c>FileName</c> is a string.</item>
<tag><c>false</c></tag>
<item>
<p>No SASL error logger handler is installed.</p>
diff --git a/lib/sasl/src/sasl.erl b/lib/sasl/src/sasl.erl
index fdea6da13e..4a220f0511 100644
--- a/lib/sasl/src/sasl.erl
+++ b/lib/sasl/src/sasl.erl
@@ -55,7 +55,9 @@ get_sasl_error_logger() ->
case application:get_env(sasl, sasl_error_logger) of
{ok, false} -> undefined;
{ok, tty} -> tty;
- {ok, {file, File}} when is_list(File) -> {file, File};
+ {ok, {file, File}} when is_list(File) -> {file, File, [write]};
+ {ok, {file, File, Modes}} when is_list(File), is_list(Modes) ->
+ {file, File, Modes};
{ok, Bad} -> exit({bad_config, {sasl, {sasl_error_logger, Bad}}});
_ -> undefined
end.
@@ -125,9 +127,9 @@ delete_sasl_error_logger(Type) ->
error_logger:delete_report_handler(mod(Type)).
mod(tty) -> sasl_report_tty_h;
-mod({file, _File}) -> sasl_report_file_h.
+mod({file, _File, _Modes}) -> sasl_report_file_h.
-args({file, File}, Type) -> {File, type(Type)};
+args({file, File, Modes}, Type) -> {File, Modes, type(Type)};
args(_, Type) -> type(Type).
type(error) -> error;
diff --git a/lib/sasl/src/sasl_report_file_h.erl b/lib/sasl/src/sasl_report_file_h.erl
index f42b4b5ff2..a5bd0ac055 100644
--- a/lib/sasl/src/sasl_report_file_h.erl
+++ b/lib/sasl/src/sasl_report_file_h.erl
@@ -28,9 +28,9 @@
handle_event/2, handle_call/2, handle_info/2,
terminate/2]).
-init({File, Type}) ->
+init({File, Modes, Type}) when is_list(Modes) ->
process_flag(trap_exit, true),
- case file:open(File, [write]) of
+ case file:open(File, Modes) of
{ok,Fd} ->
{ok, {Fd, File, Type}};
What ->
diff --git a/lib/sasl/test/release_handler_SUITE.erl b/lib/sasl/test/release_handler_SUITE.erl
index bd7414fbb4..b7c5f34f58 100644
--- a/lib/sasl/test/release_handler_SUITE.erl
+++ b/lib/sasl/test/release_handler_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2011-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2011-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1802,11 +1802,17 @@ upgrade_gg(cleanup,Config) ->
%%%-----------------------------------------------------------------
%%% OTP-10463, Bug - release_handler could not handle regexp in appup
%%% files.
-otp_10463_upgrade_script_regexp(_Config) ->
- %% Assuming that kernel always has a regexp in it's appup
- KernelVsn = vsn(kernel,current),
- {ok,KernelVsn,_} =
- release_handler:upgrade_script(kernel,code:lib_dir(kernel)),
+otp_10463_upgrade_script_regexp(Config) ->
+ DataDir = ?config(data_dir,Config),
+ code:add_path(filename:join([DataDir,regexp_appup,app1,ebin])),
+ application:start(app1),
+ {ok,"1.1",_} = release_handler:upgrade_script(app1,code:lib_dir(app1)),
+ ok.
+
+otp_10463_upgrade_script_regexp(cleanup,Config) ->
+ DataDir = ?config(data_dir,Config),
+ application:stop(app1),
+ code:del_path(filename:join([DataDir,regexp_appup,app1,ebin])),
ok.
no_dot_erlang(Conf) ->
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
new file mode 100644
index 0000000000..ba6d09cd42
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.app
@@ -0,0 +1,29 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% This is an -*- erlang -*- file.
+%%
+{application, app1,
+ [
+ {description, "Test that release_handler can read appup with regexp"},
+ {vsn, "1.1"},
+ {modules, []},
+ {registered, []},
+ {applications, []}
+ ]
+}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
new file mode 100644
index 0000000000..9c657232d0
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/regexp_appup/app1/ebin/app1.appup
@@ -0,0 +1,23 @@
+%% -*- erlang -*-
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+{"1.1",
+ %% Up from
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}],
+ %% Down to
+ [{<<"1(\\.[0-9]+)*">>,[{restart_application,app1}]}]
+}.
diff --git a/lib/sasl/test/sasl_SUITE.erl b/lib/sasl/test/sasl_SUITE.erl
index d7b99d506e..d9ab9e551c 100644
--- a/lib/sasl/test/sasl_SUITE.erl
+++ b/lib/sasl/test/sasl_SUITE.erl
@@ -26,10 +26,11 @@
%% Test cases must be exported.
-export([app_test/1,
appup_test/1,
- log_mf_h_env/1]).
+ log_mf_h_env/1,
+ log_file/1]).
all() ->
- [log_mf_h_env, app_test, appup_test].
+ [log_mf_h_env, log_file, app_test, appup_test].
groups() ->
[].
@@ -151,10 +152,9 @@ check_appup([],_,_) ->
log_mf_h_env(Config) ->
PrivDir = ?config(priv_dir,Config),
LogDir = filename:join(PrivDir,sasl_SUITE_log_dir),
- ok = file:make_dir(LogDir),
+ ok = filelib:ensure_dir(LogDir),
application:stop(sasl),
- SaslEnv = application:get_all_env(sasl),
- lists:foreach(fun({E,_V}) -> application:unset_env(sasl,E) end, SaslEnv),
+ clear_env(sasl),
ok = application:set_env(sasl,error_logger_mf_dir,LogDir),
match_error(missing_config,application:start(sasl)),
@@ -178,6 +178,23 @@ log_mf_h_env(Config) ->
ok = application:set_env(sasl,error_logger_mf_dir,LogDir),
ok = application:start(sasl).
+log_file(Config) ->
+ PrivDir = ?config(priv_dir,Config),
+ LogDir = filename:join(PrivDir,sasl_SUITE_log_dir),
+ ok = filelib:ensure_dir(LogDir),
+ File = filename:join(LogDir, "file.log"),
+ application:stop(sasl),
+ clear_env(sasl),
+
+ ok = application:set_env(sasl,sasl_error_logger,{file, File}, [{persistent, true}]),
+ ok = application:start(sasl),
+ application:stop(sasl),
+ ok = application:set_env(sasl,sasl_error_logger,{file, File, [append]}, [{persistent, true}]),
+ ok = application:start(sasl),
+ application:stop(sasl),
+ ok = application:set_env(sasl,sasl_error_logger, tty, [{persistent, false}]),
+ ok = application:start(sasl).
+
%%-----------------------------------------------------------------
%% Internal
@@ -185,3 +202,7 @@ match_error(Expected,{error,{bad_return,{_,{'EXIT',{Expected,{sasl,_}}}}}}) ->
ok;
match_error(Expected,Actual) ->
?t:fail({unexpected_return,Expected,Actual}).
+
+clear_env(App) ->
+ [application:unset_env(App,Opt) || {Opt,_} <- application:get_all_env(App)],
+ ok.
diff --git a/lib/sasl/vsn.mk b/lib/sasl/vsn.mk
index 4259a2d76c..8d1a043410 100644
--- a/lib/sasl/vsn.mk
+++ b/lib/sasl/vsn.mk
@@ -1 +1 @@
-SASL_VSN = 2.4.1
+SASL_VSN = 2.4.2
diff --git a/lib/snmp/doc/src/notes.xml b/lib/snmp/doc/src/notes.xml
index fd307ef824..52022f59ff 100644
--- a/lib/snmp/doc/src/notes.xml
+++ b/lib/snmp/doc/src/notes.xml
@@ -33,7 +33,40 @@
</header>
- <section>
+ <section><title>SNMP 5.1.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ A bug in the SNMP Agent has been corrected; when opening
+ a port using the command line argument -snmpa_fd the Port
+ should be 0 when calling gen_udp:open.</p>
+ <p>
+ A bug in the SNMP manager has been corrected; it should
+ not look at the -snmp_fd command line argument, but
+ instead at -snmpm_fd.</p>
+ <p>
+ Own Id: OTP-12669 Aux Id: seq12841 </p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Improved cryptocraphic capability.</p>
+ <p>
+ Own Id: OTP-12452</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section>
<title>SNMP Development Toolkit 5.1.1</title>
<p>Version 5.1.1 supports code replacement in runtime from/to
version 5.1. </p>
diff --git a/lib/snmp/doc/src/snmp_app.xml b/lib/snmp/doc/src/snmp_app.xml
index 86f0981988..e36908a5b9 100644
--- a/lib/snmp/doc/src/snmp_app.xml
+++ b/lib/snmp/doc/src/snmp_app.xml
@@ -587,7 +587,7 @@
<marker id="manager_server_timeout"></marker>
<tag><c><![CDATA[server_timeout() = integer() <optional>]]></c></tag>
<item>
- <p>Asynchroneous request cleanup time. For every requests,
+ <p>Asynchronous request cleanup time. For every requests,
some info is stored internally, in order to be able to
deliver the reply (when it arrives) to the proper destination.
If the reply arrives, this info will be deleted. But if
diff --git a/lib/snmp/doc/src/snmp_config.xml b/lib/snmp/doc/src/snmp_config.xml
index 0ec8bb91cf..d1ee6545dd 100644
--- a/lib/snmp/doc/src/snmp_config.xml
+++ b/lib/snmp/doc/src/snmp_config.xml
@@ -616,7 +616,7 @@ in so far as it will be converted to the new format if found.
<marker id="manager_server_timeout"></marker>
<tag><c><![CDATA[server_timeout() = integer() <optional>]]></c></tag>
<item>
- <p>Asynchroneous request cleanup time. For every requests,
+ <p>Asynchronous request cleanup time. For every requests,
some info is stored internally, in order to be able to
deliver the reply (when it arrives) to the proper destination.
If the reply arrives, this info will be deleted. But if
diff --git a/lib/snmp/src/agent/snmp_shadow_table.erl b/lib/snmp/src/agent/snmp_shadow_table.erl
index 34543d542b..c4704e201b 100644
--- a/lib/snmp/src/agent/snmp_shadow_table.erl
+++ b/lib/snmp/src/agent/snmp_shadow_table.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2009. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -76,7 +76,7 @@ delete_time_stamp_table() ->
end.
update(Name, UpdateFunc, Interval) ->
- CurrentTime = get_time(),
+ CurrentTime = snmp_misc:now(ms),
case mnesia:dirty_read({time_stamp, Name}) of
[#time_stamp{data = Expire}] when CurrentTime =< Expire -> ok;
_ ->
@@ -117,9 +117,6 @@ table_func(Op, RowIndex, Cols,
update(Name, UpdateFunc, Interval),
snmp_generic:table_func(Op, RowIndex, Cols, {Name, mnesia}).
-get_time() ->
- {M,S,U} = erlang:now(),
- 1000000000 * M + 1000 * S + (U div 1000).
%%-----------------------------------------------------------------
%% Urrk.
@@ -183,5 +180,3 @@ delete_table(Tab) ->
error_msg(F, A) ->
?snmpa_error(F, A).
-
-
diff --git a/lib/snmp/src/agent/snmp_standard_mib.erl b/lib/snmp/src/agent/snmp_standard_mib.erl
index aace3fd413..53f733ae4e 100644
--- a/lib/snmp/src/agent/snmp_standard_mib.erl
+++ b/lib/snmp/src/agent/snmp_standard_mib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -546,8 +546,9 @@ dummy(_Op) -> ok.
%%-----------------------------------------------------------------
snmp_set_serial_no(new) ->
snmp_generic:variable_func(new, {snmpSetSerialNo, volatile}),
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
Val = random:uniform(2147483648) - 1,
snmp_generic:variable_func(set, Val, {snmpSetSerialNo, volatile});
diff --git a/lib/snmp/src/agent/snmp_target_mib.erl b/lib/snmp/src/agent/snmp_target_mib.erl
index ef9503cda8..f66c54849f 100644
--- a/lib/snmp/src/agent/snmp_target_mib.erl
+++ b/lib/snmp/src/agent/snmp_target_mib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -346,13 +346,6 @@ check_target_params(X) ->
error({invalid_target_params, X}).
-
-%% maybe_create_table(Name) ->
-%% case snmpa_local_db:table_exists(db(Name)) of
-%% true -> ok;
-%% _ -> snmpa_local_db:table_create(db(Name))
-%% end.
-
init_tabs(Addrs, Params) ->
?vdebug("create target address table",[]),
AddrDB = db(snmpTargetAddrTable),
@@ -679,8 +672,9 @@ snmpTargetSpinLock(print) ->
snmpTargetSpinLock(new) ->
snmp_generic:variable_func(new, {snmpTargetSpinLock, volatile}),
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
Val = random:uniform(2147483648) - 1,
snmp_generic:variable_func(set, Val, {snmpTargetSpinLock, volatile});
@@ -1080,5 +1074,3 @@ error(Reason) ->
config_err(F, A) ->
snmpa_error:config_err("[TARGET-MIB]: " ++ F, A).
-
-
diff --git a/lib/snmp/src/agent/snmp_user_based_sm_mib.erl b/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
index 69dce337ba..ce6dc21435 100644
--- a/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
+++ b/lib/snmp/src/agent/snmp_user_based_sm_mib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -439,8 +439,9 @@ usmUserSpinLock(print) ->
usmUserSpinLock(new) ->
snmp_generic:variable_func(new, {usmUserSpinLock, volatile}),
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
Val = random:uniform(2147483648) - 1,
snmp_generic:variable_func(set, Val, {usmUserSpinLock, volatile});
@@ -1191,29 +1192,7 @@ extract_new_key(Hash, OldKey, KeyChange) ->
-define(i8(Int), Int band 255).
mk_random(Len) when Len =< 20 ->
- %% Use of yield():
- %% This will either schedule another process, or fail and invoke
- %% the error_handler (in old versions). In either case, it is
- %% safe to assume that now, reductions and garbage_collection have
- %% changed in a non-deterministically way.
- {_,_,A} = erlang:now(),
- catch erlang:yield(),
- {_,_,B} = erlang:now(),
- catch erlang:yield(),
- {_,_,C} = erlang:now(),
- {D,_} = erlang:statistics(reductions),
- {E,_} = erlang:statistics(runtime),
- {F,_} = erlang:statistics(wall_clock),
- {G,H,_} = erlang:statistics(garbage_collection),
- catch erlang:yield(),
- {_,_,C2} = erlang:now(),
- {D2,_} = erlang:statistics(reductions),
- {_,H2,_} = erlang:statistics(garbage_collection),
- %% X(N) means we can use N bits from variable X:
- %% A(16) B(16) C(16) D(16) E(8) F(16) G(8) H(16)
- Rnd20 = [?i16(A),?i16(B),?i16(C),?i16(D),?i8(E),?i16(F),
- ?i8(G),?i16(H),?i16(C2),?i16(D2),?i16(H2)],
- lists:sublist(Rnd20, Len).
+ binary_to_list(crypto:strong_rand_bytes(Len)).
split(0, Rest, FirstRev) ->
{lists:reverse(FirstRev), Rest};
diff --git a/lib/snmp/src/agent/snmp_view_based_acm_mib.erl b/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
index 722bd7ac5b..28e2bdbb96 100644
--- a/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
+++ b/lib/snmp/src/agent/snmp_view_based_acm_mib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -845,8 +845,9 @@ vacmViewSpinLock(print) ->
vacmViewSpinLock(new) ->
snmp_generic:variable_func(new, volatile_db(vacmViewSpinLock)),
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
Val = random:uniform(2147483648) - 1,
snmp_generic:variable_func(set, Val, volatile_db(vacmViewSpinLock));
@@ -1133,4 +1134,3 @@ error(Reason) ->
config_err(F, A) ->
snmpa_error:config_err("[VIEW-BASED-ACM-MIB]: " ++ F, A).
-
diff --git a/lib/snmp/src/agent/snmpa_mpd.erl b/lib/snmp/src/agent/snmpa_mpd.erl
index 642b1f7fc5..24007a4e63 100644
--- a/lib/snmp/src/agent/snmpa_mpd.erl
+++ b/lib/snmp/src/agent/snmpa_mpd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -75,8 +75,9 @@
init(Vsns) ->
?vlog("init -> entry with"
"~n Vsns: ~p", [Vsns]),
- {A,B,C} = erlang:now(),
- random:seed(A,B,C),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
ets:insert(snmp_agent_table, {msg_id, random:uniform(2147483647)}),
ets:insert(snmp_agent_table, {req_id, random:uniform(2147483647)}),
init_counters(),
@@ -771,21 +772,7 @@ generate_v3_report_msg(MsgID, MsgSecurityModel, Data, LocalEngineID,
ContextEngineID, ContextName, SecData},
LocalEngineID, Log).
-%% req_id(#scopedPdu{data = #pdu{request_id = ReqId}}) ->
-%% ?vtrace("Report ReqId: ~p",[ReqId]),
-%% ReqId;
-%% req_id(_) ->
-%% 0. % RFC2572, 7.1.3.c.4
-
-%% maybe_generate_discovery1_report_msg() ->
-%% case (catch DiscoveryHandler:handle_discovery1(Ip, Udp, EngineId)) of
-%% {ok, Entry} when is_record(Entry, snmp_discovery_data1) ->
-%% ok;
-%% ignore ->
-%% ok;
-%% {error, Reason} ->
-
%% Response to stage 1 discovery message (terminating, i.e. from the manager)
generate_discovery1_report_msg(MsgID, MsgSecurityModel,
SecName, SecLevel,
diff --git a/lib/snmp/src/agent/snmpa_net_if.erl b/lib/snmp/src/agent/snmpa_net_if.erl
index 840d56d563..c813c57d56 100644
--- a/lib/snmp/src/agent/snmpa_net_if.erl
+++ b/lib/snmp/src/agent/snmpa_net_if.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -297,14 +297,14 @@ socket_open(snmpUDPDomain = Domain, [IpPort | Opts]) ->
Fd = list_to_integer(FdStr),
?vdebug("socket_open(~p, [~p | ~p]) Fd: ~p",
[Domain, IpPort, Opts, Fd]),
- gen_udp_open(IpPort, [{fd, Fd} | Opts]);
+ gen_udp_open(0, [{fd, Fd} | Opts]);
error ->
case init:get_argument(snmpa_fd) of
{ok, [[FdStr]]} ->
Fd = list_to_integer(FdStr),
?vdebug("socket_open(~p, [~p | ~p]) Fd: ~p",
[Domain, IpPort, Opts, Fd]),
- gen_udp_open(IpPort, [{fd, Fd} | Opts]);
+ gen_udp_open(0, [{fd, Fd} | Opts]);
error ->
?vdebug("socket_open(~p, [~p | ~p])",
[Domain, IpPort, Opts]),
@@ -674,7 +674,7 @@ handle_recv(
#state{mpd_state = MpdState, note_store = NS, log = Log} = S,
#transport{socket = Socket} = Transport,
From, Packet) ->
- put(n1, erlang:now()),
+ put(n1, erlang:monotonic_time(micro_seconds)),
LogF =
fun(Type, Data) ->
log(Log, Type, Data, From)
@@ -1379,15 +1379,7 @@ do_close_log(_) ->
%%% DEBUG FUNCTIONS
%%%-----------------------------------------------------------------
time_in_agent() ->
- subtr(erlang:now(), get(n1)).
-
-subtr({X1,Y1,Z1}, {X1,Y1,Z2}) ->
- Z1 - Z2;
-subtr({X1,Y1,Z1}, {X1,Y2,Z2}) ->
- ((Y1-Y2) * 1000000) + (Z1 - Z2);
-subtr({X1,Y1,Z1}, {X2,Y2,Z2}) ->
- ((X1 - X2) * 1000000000000) + ((Y1 - Y2) * 1000000) + (Z1 - Z2).
-
+ erlang:monotonic_time(micro_seconds) - get(n1).
%% ----------------------------------------------------------------
@@ -1637,10 +1629,3 @@ get_port_info(Id) ->
%% ----------------------------------------------------------------
-
-% i(F) ->
-% i(F, []).
-
-% i(F, A) ->
-% io:format("~p: " ++ F ++ "~n", [?MODULE|A]).
-
diff --git a/lib/snmp/src/agent/snmpa_usm.erl b/lib/snmp/src/agent/snmpa_usm.erl
index 719ea4e356..c571e50517 100644
--- a/lib/snmp/src/agent/snmpa_usm.erl
+++ b/lib/snmp/src/agent/snmpa_usm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -645,8 +645,9 @@ get_des_salt() ->
ets:insert(snmp_agent_table, {usm_des_salt, 0}),
0;
_ -> % it doesn't exist, initialize
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
R = random:uniform(4294967295),
ets:insert(snmp_agent_table, {usm_des_salt, R}),
R
@@ -677,8 +678,9 @@ get_aes_salt() ->
ets:insert(snmp_agent_table, {usm_aes_salt, 0}),
0;
_ -> % it doesn't exist, initialize
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
R = random:uniform(36893488147419103231),
ets:insert(snmp_agent_table, {usm_aes_salt, R}),
R
diff --git a/lib/snmp/src/agent/snmpa_vacm.erl b/lib/snmp/src/agent/snmpa_vacm.erl
index dadcf32543..281b2bd34a 100644
--- a/lib/snmp/src/agent/snmpa_vacm.erl
+++ b/lib/snmp/src/agent/snmpa_vacm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2012. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -305,8 +305,8 @@ dump_table() ->
%% time dumping the table.
unique_table_name(Pre) ->
%% We want something that is guaranteed to be unique,
- %% therefor we use erlang:now() instead of os:timestamp()
- unique_table_name(Pre, erlang:now()).
+ %% therefor we use erlang:timestamp() instead of os:timestamp()
+ unique_table_name(Pre, erlang:timestamp()).
unique_table_name(Pre, {_A, _B, C} = Now) ->
{Date, Time} = calendar:now_to_datetime(Now),
@@ -445,6 +445,3 @@ gc_tab(Oid) ->
user_err(F, A) ->
snmpa_error:user_err(F, A).
-
-% config_err(F, A) ->
-% snmpa_error:config_err(F, A).
diff --git a/lib/snmp/src/app/snmp.app.src b/lib/snmp/src/app/snmp.app.src
index cbd292e4c3..a55bb389ba 100644
--- a/lib/snmp/src/app/snmp.app.src
+++ b/lib/snmp/src/app/snmp.app.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -137,5 +137,5 @@
%% before snmp.
{applications, [kernel, stdlib]},
{mod, {snmp_app, []}},
- {runtime_dependencies, ["stdlib-2.0","runtime_tools-1.8.14","mnesia-4.12",
+ {runtime_dependencies, ["stdlib-2.5","runtime_tools-1.8.14","mnesia-4.12",
"kernel-3.0","erts-6.0","crypto-3.3"]}]}.
diff --git a/lib/snmp/src/app/snmp.appup.src b/lib/snmp/src/app/snmp.appup.src
index e7e54f5b7e..a21ff863be 100644
--- a/lib/snmp/src/app/snmp.appup.src
+++ b/lib/snmp/src/app/snmp.appup.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,9 @@
%% {update, snmpa_local_db, soft, soft_purge, soft_purge, []}
%% {add_module, snmpm_net_if_mt}
[
+ {"5.1.2", [ % Only runtime dependencies change
+ ]},
+ {"5.1.1", [{restart_application, snmp}]},
{"5.1", [ % Only compiler changes
]},
{"5.0", [{restart_application, snmp}]},
@@ -46,6 +49,9 @@
%% {remove, {snmpm_net_if_mt, soft_purge, soft_purge}}
[
+ {"5.1.2", [ % Only runtime dependencies change
+ ]},
+ {"5.1.1", [{restart_application, snmp}]},
{"5.1", [ % Only compiler changes
]},
{"5.0", [{restart_application, snmp}]},
diff --git a/lib/snmp/src/compile/snmpc.erl b/lib/snmp/src/compile/snmpc.erl
index 2f065dddac..e7839c0792 100644
--- a/lib/snmp/src/compile/snmpc.erl
+++ b/lib/snmp/src/compile/snmpc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -409,8 +409,9 @@ get_verbosity(Options) ->
%%----------------------------------------------------------------------
init(From, MibFileName, Options) ->
- {A,B,C} = now(),
- random:seed(A,B,C),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
put(options, Options),
put(verbosity, get_verbosity(Options)),
put(description, get_description(Options)),
diff --git a/lib/snmp/src/manager/snmpm.erl b/lib/snmp/src/manager/snmpm.erl
index 8976322c4e..96e3d55b46 100644
--- a/lib/snmp/src/manager/snmpm.erl
+++ b/lib/snmp/src/manager/snmpm.erl
@@ -520,7 +520,7 @@ sync_get(UserId, TargetName, Context, Oids, Timeout, ExtraInfo) ->
-%% --- asynchroneous get-request ---
+%% --- asynchronous get-request ---
%%
%% The reply will be delivered to the user
%% through a call to handle_pdu/5
@@ -588,7 +588,7 @@ sync_get_next(UserId, TargetName, Context, Oids, Timeout, ExtraInfo) ->
%% </BACKWARD-COMPAT>
-%% --- asynchroneous get_next-request ---
+%% --- asynchronous get_next-request ---
%%
async_get_next2(UserId, TargetName, Oids) ->
@@ -654,7 +654,7 @@ sync_set(UserId, TargetName, Context, VarsAndVals, Timeout, ExtraInfo) ->
%% </BACKWARD-COMPAT>
-%% --- asynchroneous set-request ---
+%% --- asynchronous set-request ---
%%
async_set2(UserId, TargetName, VarsAndVals) ->
@@ -746,7 +746,7 @@ sync_get_bulk(UserId, TargetName, NonRep, MaxRep, Context, Oids, Timeout,
%% </BACKWARD-COMPAT>
-%% --- asynchroneous get-bulk ---
+%% --- asynchronous get-bulk ---
%%
async_get_bulk2(UserId, TargetName, NonRep, MaxRep, Oids) ->
diff --git a/lib/snmp/src/manager/snmpm_mpd.erl b/lib/snmp/src/manager/snmpm_mpd.erl
index f8a7441c0a..5fc9d3655c 100644
--- a/lib/snmp/src/manager/snmpm_mpd.erl
+++ b/lib/snmp/src/manager/snmpm_mpd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -67,8 +67,9 @@
%%%-----------------------------------------------------------------
init(Vsns) ->
?vdebug("init -> entry with ~p", [Vsns]),
- {A,B,C} = erlang:now(),
- random:seed(A,B,C),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
snmpm_config:cre_counter(msg_id, random:uniform(2147483647)),
snmpm_config:cre_counter(req_id, random:uniform(2147483647)),
init_counters(),
@@ -896,17 +897,6 @@ get_agent_engine_id(Name) ->
is_known_engine_id(EngineID, {Addr, Port}) ->
snmpm_config:is_known_engine_id(EngineID, Addr, Port).
-%% is_known_engine_id(EngineID, Addr, Port) ->
-%% snmpm_config:is_known_engine_id(EngineID, Addr, Port).
-
-% get_agent_engine_id(Addr, Port) ->
-% case snmpm_config:get_agent_engine_id(Addr, Port) of
-% {ok, Id} ->
-% Id;
-% _Error ->
-% ""
-% end.
-
%%-----------------------------------------------------------------
%% Sequence number (msg-id & req-id) functions
diff --git a/lib/snmp/src/manager/snmpm_net_if.erl b/lib/snmp/src/manager/snmpm_net_if.erl
index b4cc165d2e..e81383eeea 100644
--- a/lib/snmp/src/manager/snmpm_net_if.erl
+++ b/lib/snmp/src/manager/snmpm_net_if.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -330,7 +330,7 @@ socket_params(Domain, {IpAddr, IpPort} = Addr, BindTo, CommonSocketOpts) ->
end,
case Family of
inet ->
- case init:get_argument(snmp_fd) of
+ case init:get_argument(snmpm_fd) of
{ok, [[FdStr]]} ->
Fd = list_to_integer(FdStr),
case BindTo of
@@ -489,11 +489,6 @@ handle_call({verbosity, Verbosity}, _From, State) ->
put(verbosity, Verbosity),
{reply, ok, State};
-%% handle_call({system_info_updated, What}, _From, State) ->
-%% ?vlog("received system_info_updated request with What = ~p", [What]),
-%% {NewState, Reply} = handle_system_info_updated(State, What),
-%% {reply, Reply, NewState};
-
handle_call(get_log_type, _From, State) ->
?vlog("received get-log-type request", []),
Reply = (catch handle_get_log_type(State)),
@@ -816,7 +811,7 @@ handle_inform_request(
ok;
[] ->
RePdu = make_response_pdu(Pdu),
- Expire = t() + To,
+ Expire = snmp_misc:now(ms) + To,
Rec = {Key, Expire, {Vsn, ACM, RePdu}},
ets:insert(snmpm_inform_request_table, Rec)
end.
@@ -876,7 +871,7 @@ maybe_send_inform_response(
handle_inform_response_gc(#state{irb = IRB} = State) ->
ets:safe_fixtable(snmpm_inform_request_table, true),
- do_irgc(ets:first(snmpm_inform_request_table), t()),
+ do_irgc(ets:first(snmpm_inform_request_table), snmp_misc:now(ms)),
ets:safe_fixtable(snmpm_inform_request_table, false),
State#state{irgc = irgc_start(IRB)}.
@@ -1023,110 +1018,6 @@ handle_disk_log(_Log, _Info, State) ->
State.
-%% mk_discovery_msg('version-3', Pdu, _VsnHdr, UserName) ->
-%% ScopedPDU = #scopedPdu{contextEngineID = "",
-%% contextName = "",
-%% data = Pdu},
-%% Bytes = snmp_pdus:enc_scoped_pdu(ScopedPDU),
-%% MsgID = get(msg_id),
-%% put(msg_id,MsgID+1),
-%% UsmSecParams =
-%% #usmSecurityParameters{msgAuthoritativeEngineID = "",
-%% msgAuthoritativeEngineBoots = 0,
-%% msgAuthoritativeEngineTime = 0,
-%% msgUserName = UserName,
-%% msgPrivacyParameters = "",
-%% msgAuthenticationParameters = ""},
-%% SecBytes = snmp_pdus:enc_usm_security_parameters(UsmSecParams),
-%% PduType = Pdu#pdu.type,
-%% Hdr = #v3_hdr{msgID = MsgID,
-%% msgMaxSize = 1000,
-%% msgFlags = snmp_misc:mk_msg_flags(PduType, 0),
-%% msgSecurityModel = ?SEC_USM,
-%% msgSecurityParameters = SecBytes},
-%% Msg = #message{version = 'version-3', vsn_hdr = Hdr, data = Bytes},
-%% case (catch snmp_pdus:enc_message_only(Msg)) of
-%% {'EXIT', Reason} ->
-%% error("Encoding error. Pdu: ~w. Reason: ~w",[Pdu, Reason]),
-%% error;
-%% L when list(L) ->
-%% {Msg, L}
-%% end;
-%% mk_discovery_msg(Version, Pdu, {Com, _, _, _, _}, UserName) ->
-%% Msg = #message{version = Version, vsn_hdr = Com, data = Pdu},
-%% case catch snmp_pdus:enc_message(Msg) of
-%% {'EXIT', Reason} ->
-%% error("Encoding error. Pdu: ~w. Reason: ~w",[Pdu, Reason]),
-%% error;
-%% L when list(L) ->
-%% {Msg, L}
-%% end.
-
-
-%% mk_msg('version-3', Pdu, {Context, User, EngineID, CtxEngineId, SecLevel},
-%% MsgData) ->
-%% %% Code copied from snmp_mpd.erl
-%% {MsgId, SecName, SecData} =
-%% if
-%% tuple(MsgData), Pdu#pdu.type == 'get-response' ->
-%% MsgData;
-%% true ->
-%% Md = get(msg_id),
-%% put(msg_id, Md + 1),
-%% {Md, User, []}
-%% end,
-%% ScopedPDU = #scopedPdu{contextEngineID = CtxEngineId,
-%% contextName = Context,
-%% data = Pdu},
-%% ScopedPDUBytes = snmp_pdus:enc_scoped_pdu(ScopedPDU),
-
-%% PduType = Pdu#pdu.type,
-%% V3Hdr = #v3_hdr{msgID = MsgId,
-%% msgMaxSize = 1000,
-%% msgFlags = snmp_misc:mk_msg_flags(PduType, SecLevel),
-%% msgSecurityModel = ?SEC_USM},
-%% Message = #message{version = 'version-3', vsn_hdr = V3Hdr,
-%% data = ScopedPDUBytes},
-%% SecEngineID = case PduType of
-%% 'get-response' -> snmp_framework_mib:get_engine_id();
-%% _ -> EngineID
-%% end,
-%% case catch snmp_usm:generate_outgoing_msg(Message, SecEngineID,
-%% SecName, SecData, SecLevel) of
-%% {'EXIT', Reason} ->
-%% error("Encoding error. Pdu: ~w. Reason: ~w",[Pdu, Reason]),
-%% error;
-%% {error, Reason} ->
-%% error("Encoding error. Pdu: ~w. Reason: ~w",[Pdu, Reason]),
-%% error;
-%% Packet ->
-%% Packet
-%% end;
-%% mk_msg(Version, Pdu, {Com, _User, _EngineID, _Ctx, _SecLevel}, _SecData) ->
-%% Msg = #message{version = Version, vsn_hdr = Com, data = Pdu},
-%% case catch snmp_pdus:enc_message(Msg) of
-%% {'EXIT', Reason} ->
-%% error("Encoding error. Pdu: ~w. Reason: ~w",[Pdu, Reason]),
-%% error;
-%% B when list(B) ->
-%% B
-%% end.
-
-
-%% handle_system_info_updated(#state{log = {Log, _OldType}} = State,
-%% audit_trail_log_type = _What) ->
-%% %% Just to make sure, check that ATL is actually enabled
-%% case snmpm_config:system_info(audit_trail_log) of
-%% {ok, true} ->
-%% {ok, Type} = snmpm_config:system_info(audit_trail_log_type),
-%% NewState = State#state{log = {Log, Type}},
-%% {NewState, ok};
-%% _ ->
-%% {State, {error, {adt_not_enabled}}}
-%% end;
-%% handle_system_info_updated(_State, _What) ->
-%% ok.
-
handle_get_log_type(#state{log = {_Log, Value}} = State) ->
%% Just to make sure, check that ATL is actually enabled
case snmpm_config:system_info(audit_trail_log) of
@@ -1257,13 +1148,6 @@ maybe_process_extra_info(_ExtraInfo) ->
%% -------------------------------------------------------------------
-t() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
-
-
-%% -------------------------------------------------------------------
-
%% info_msg(F, A) ->
%% ?snmpm_info("NET-IF server: " ++ F, A).
@@ -1301,8 +1185,6 @@ proc_mem(P) when is_pid(P) ->
_ ->
undefined
end.
-%% proc_mem(_) ->
-%% undefined.
get_port_info(Id) ->
@@ -1382,20 +1264,6 @@ counters() ->
inc(Name) -> inc(Name, 1).
inc(Name, N) -> snmpm_config:incr_stats_counter(Name, N).
-%% get_counters() ->
-%% Counters = counters(),
-%% get_counters(Counters, []).
-
-%% get_counters([], Acc) ->
-%% lists:reverse(Acc);
-%% get_counters([Counter|Counters], Acc) ->
-%% case snmpm_config:get_stats_counter(Counter) of
-%% {ok, CounterVal} ->
-%% get_counters(Counters, [{Counter, CounterVal}|Acc]);
-%% _ ->
-%% get_counters(Counters, Acc)
-%% end.
-
%% ----------------------------------------------------------------
diff --git a/lib/snmp/src/manager/snmpm_server.erl b/lib/snmp/src/manager/snmpm_server.erl
index a75122d0bb..00a9b82daa 100644
--- a/lib/snmp/src/manager/snmpm_server.erl
+++ b/lib/snmp/src/manager/snmpm_server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -467,27 +467,6 @@ cancel_async_request(UserId, ReqId) ->
call({cancel_async_request, UserId, ReqId}).
-%% discovery(UserId, BAddr) ->
-%% discovery(UserId, BAddr, ?SNMP_AGENT_PORT, [],
-%% ?DEFAULT_ASYNC_EXPIRE, ?EXTRA_INFO).
-
-%% discovery(UserId, BAddr, Config) when is_list(Config) ->
-%% discovery(UserId, BAddr, ?SNMP_AGENT_PORT, Config,
-%% ?DEFAULT_ASYNC_EXPIRE, ?EXTRA_INFO);
-
-%% discovery(UserId, BAddr, Expire) when is_integer(Expire) ->
-%% discovery(UserId, BAddr, ?SNMP_AGENT_PORT, [], Expire, ?EXTRA_INFO).
-
-%% discovery(UserId, BAddr, Config, Expire) ->
-%% discovery(UserId, BAddr, ?SNMP_AGENT_PORT, Config, Expire, ?EXTRA_INFO).
-
-%% discovery(UserId, BAddr, Port, Config, Expire) ->
-%% discovery(UserId, BAddr, Port, Config, Expire, ?EXTRA_INFO).
-
-%% discovery(UserId, BAddr, Port, Config, Expire, ExtraInfo) ->
-%% call({discovery, self(), UserId, BAddr, Port, Config, Expire, ExtraInfo}).
-
-
verbosity(Verbosity) ->
case ?vvalidate(Verbosity) of
Verbosity ->
@@ -927,14 +906,6 @@ handle_call({cancel_async_request, UserId, ReqId}, _From, State) ->
{reply, Reply, State};
-%% handle_call({discovery, Pid, UserId, BAddr, Port, Config, Expire, ExtraInfo},
-%% _From, State) ->
-%% ?vlog("received discovery request", []),
-%% Reply = (catch handle_discovery(Pid, UserId, BAddr, Port, Config,
-%% Expire, ExtraInfo, State)),
-%% {reply, Reply, State};
-
-
handle_call({load_mib, Mib}, _From, State) ->
?vlog("received load_mib request", []),
case snmpm_config:load_mib(Mib) of
@@ -988,13 +959,6 @@ handle_call(is_started, _From, State) ->
IsStarted = is_started(State),
{reply, IsStarted, State};
-%% handle_call({system_info_updated, Target, What}, _From, State) ->
-%% ?vlog("received system_info_updated request: "
-%% "~n Target: ~p"
-%% "~n What: ~p", [Target, What]),
-%% Reply = handle_system_info_updated(State, Target, What),
-%% {reply, Reply, State};
-
handle_call(get_log_type, _From, State) ->
?vlog("received get_log_type request", []),
Reply = handle_get_log_type(State),
@@ -1042,11 +1006,6 @@ handle_info({snmp_error, ReqId, Reason, Domain, Addr}, State) ->
handle_snmp_error(Domain, Addr, ReqId, Reason, State),
{noreply, State};
-%% handle_info({snmp_error, ReqId, Pdu, Reason, Addr, Port}, State) ->
-%% ?vlog("received snmp_error message", []),
-%% handle_snmp_error(Pdu, ReqId, Reason, Addr, Port, State),
-%% {noreply, State};
-
handle_info({snmp_pdu, Pdu, Domain, Addr}, State) ->
?vlog("received snmp_pdu message", []),
@@ -1411,7 +1370,7 @@ handle_async_get(Pid, UserId, TargetName, Oids, SendOpts, State) ->
address = Addr,
type = get,
data = MsgData,
- expire = t() + Expire},
+ expire = snmp_misc:now(ms) + Expire},
ets:insert(snmpm_request_table, Req),
gct_activate(State#state.gct),
@@ -1460,7 +1419,7 @@ handle_async_get_next(Pid, UserId, TargetName, Oids, SendOpts, State) ->
address = Addr,
type = get_next,
data = MsgData,
- expire = t() + Expire},
+ expire = snmp_misc:now(ms) + Expire},
ets:insert(snmpm_request_table, Req),
gct_activate(State#state.gct),
@@ -1516,7 +1475,7 @@ handle_async_get_bulk(Pid,
address = Addr,
type = get_bulk,
data = MsgData,
- expire = t() + Expire},
+ expire = snmp_misc:now(ms) + Expire},
ets:insert(snmpm_request_table, Req),
gct_activate(State#state.gct),
{ok, ReqId};
@@ -1564,7 +1523,7 @@ handle_async_set(Pid, UserId, TargetName, VarsAndVals, SendOpts, State) ->
address = Addr,
type = set,
data = MsgData,
- expire = t() + Expire},
+ expire = snmp_misc:now(ms) + Expire},
ets:insert(snmpm_request_table, Req),
gct_activate(State#state.gct),
@@ -1600,18 +1559,6 @@ handle_cancel_async_request(UserId, ReqId, _State) ->
?vlog("handle_cancel_async_request -> not found", []),
{error, not_found}
end.
-
-
-%% handle_system_info_updated(#state{net_if = Pid, net_if_mod = Mod} = _State,
-%% net_if = _Target, What) ->
-%% case (catch Mod:system_info_updated(Pid, What)) of
-%% {'EXIT', _} ->
-%% {error, not_supported};
-%% Else ->
-%% Else
-%% end;
-%% handle_system_info_updated(_State, Target, What) ->
-%% {error, {bad_target, Target, What}}.
handle_get_log_type(#state{net_if = Pid, net_if_mod = Mod}) ->
case (catch Mod:get_log_type(Pid)) of
@@ -1629,47 +1576,6 @@ handle_set_log_type(#state{net_if = Pid, net_if_mod = Mod}, NewType) ->
Else
end.
-
-%% handle_discovery(Pid, UserId, BAddr, Port, Config, Expire, ExtraInfo, State) ->
-%% ?vtrace("handle_discovery -> entry with"
-%% "~n Pid: ~p"
-%% "~n UserId: ~p"
-%% "~n BAddr: ~p"
-%% "~n Port: ~p"
-%% "~n Config: ~p"
-%% "~n Expire: ~p",
-%% [Pid, UserId, BAddr, Port, Config, Expire]),
-%% case agent_data(default, default, "", Config) of
-%% {ok, Addr, Port, Vsn, MsgData} ->
-%% ?vtrace("handle_discovery -> send a ~p disco message", [Vsn]),
-%% ReqId = send_discovery(Vsn, MsgData, BAddr, Port, ExtraInfo,
-%% State),
-%% ?vdebug("handle_discovery -> ReqId: ~p", [ReqId]),
-%% MonRef = erlang:monitor(process, Pid),
-%% ?vtrace("handle_discovery -> MonRef: ~p", [MonRef]),
-%% Req = #request{id = ReqId,
-%% user_id = UserId,
-%% target = TargetName,
-%% addr = BAddr,
-%% port = Port,
-%% type = get,
-%% data = MsgData,
-%% mon = MonRef,
-%% discovery = true,
-%% expire = t() + Expire},
-%% ets:insert(snmpm_request_table, Req),
-%% gct_activate(State#state.gct),
-%% {ok, ReqId};
-
-%% Error ->
-%% ?vinfo("failed retrieving agent data for discovery (get):"
-%% "~n BAddr: ~p"
-%% "~n Port: ~p"
-%% "~n Error: ~p", [BAddr, Port, Error]),
-%% Error
-%% end.
-
-
handle_sync_timeout(ReqId, From, State) ->
?vtrace("handle_sync_timeout -> entry with"
"~n ReqId: ~p"
@@ -1693,7 +1599,7 @@ handle_sync_timeout(ReqId, From, State) ->
Req = Req0#request{ref = undefined,
mon = undefined,
from = undefined,
- expire = t()},
+ expire = snmp_misc:now(ms)},
ets:insert(snmpm_request_table, Req),
gct_activate(State#state.gct),
ok;
@@ -2116,7 +2022,8 @@ do_handle_agent(DefUserId, DefMod,
ok;
InvalidResult ->
- CallbackArgs = [Domain, Addr, Type, SnmpInfo, DefData],
+ CallbackArgs =
+ [Domain_or_Ip, Addr_or_Port, Type, SnmpInfo, DefData],
handle_invalid_result(handle_agent, CallbackArgs, InvalidResult)
catch
@@ -2212,7 +2119,8 @@ do_handle_agent(DefUserId, DefMod,
end;
T:E ->
- CallbackArgs = [Domain, Addr, Type, SnmpInfo, DefData],
+ CallbackArgs =
+ [Domain_or_Ip, Addr_or_Port, Type, SnmpInfo, DefData],
handle_invalid_result(handle_agent, CallbackArgs, T, E)
end.
@@ -3024,7 +2932,7 @@ cancel_timer(Ref) ->
handle_gc(GCT) ->
ets:safe_fixtable(snmpm_request_table, true),
- case do_gc(ets:first(snmpm_request_table), t()) of
+ case do_gc(ets:first(snmpm_request_table), snmp_misc:now(ms)) of
0 ->
gct_deactivate(GCT);
_ ->
@@ -3098,23 +3006,11 @@ send_set_request(VarsAndVals, Vsn, MsgData, Domain, Addr, ExtraInfo,
Mod:send_pdu(NetIf, Pdu, Vsn, MsgData, Domain, Addr, ExtraInfo),
Pdu#pdu.request_id.
-%% send_discovery(Vsn, MsgData, Addr, Port, ExtraInfo,
-%% #state{net_if = NetIf,
-%% net_if_mod = Mod}) ->
-%% Pdu = make_discovery_pdu(),
-%% Mod:send_pdu(NetIf, Pdu, Vsn, MsgData, Addr, Port, ExtraInfo),
-%% Pdu#pdu.request_id.
-
-
%%----------------------------------------------------------------------
%%
%%----------------------------------------------------------------------
-%% make_discovery_pdu() ->
-%% Oids = [?sysObjectID_instance, ?sysDescr_instance, ?sysUpTime_instance],
-%% make_pdu_impl(get, Oids).
-
make_pdu(set, VarsAndVals, MiniMIB) ->
VBs = [var_and_value_to_varbind(VAV, MiniMIB) || VAV <- VarsAndVals],
make_pdu_impl(set, VBs);
@@ -3397,7 +3293,7 @@ gct_init(#gct{parent = Parent, timeout = Timeout} = State) ->
gct(State, Timeout).
gct(#gct{parent = Parent, state = active} = State, Timeout) ->
- T = t(),
+ T = snmp_misc:now(ms),
receive
{stop, Parent} ->
ok;
@@ -3455,7 +3351,7 @@ gct(#gct{parent = Parent, state = idle} = State, Timeout) ->
end.
new_timeout(T1, T2) ->
- case T1 - (t() - T2) of
+ case T1 - (snmp_misc:now(ms) - T2) of
T when (T > 0) ->
T;
_ ->
@@ -3475,11 +3371,6 @@ maybe_demonitor(undefined) ->
maybe_demonitor(MonRef) ->
erlang:demonitor(MonRef).
-%% Time in milli seconds
-t() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
-
mk_target_name(Domain, Addr, Config) ->
snmpm_config:mk_target_name(Domain, Addr, Config).
@@ -3518,12 +3409,6 @@ call(Req) ->
call(Req, To) ->
gen_server:call(?SERVER, Req, To).
-%% cast(Msg) ->
-%% gen_server:cast(?SERVER, Msg).
-
-%% info_msg(F, A) ->
-%% ?snmpm_info("Server: " ++ F, A).
-
warning_msg(F, A) ->
?snmpm_warning("Server: " ++ F, A).
@@ -3599,20 +3484,3 @@ note_store_info(Pid) ->
%%----------------------------------------------------------------------
-
-
-%%----------------------------------------------------------------------
-%% Debug
-%%----------------------------------------------------------------------
-
-% sz(L) when is_list(L) ->
-% length(lists:flatten(L));
-% sz(B) when is_binary(B) ->
-% size(B).
-
-%% p(F) ->
-%% p(F, []).
-
-%% p(F, A) ->
-%% io:format("~w:" ++ F ++ "~n", [?MODULE | A]).
-
diff --git a/lib/snmp/src/misc/snmp_misc.erl b/lib/snmp/src/misc/snmp_misc.erl
index c36cee2a53..cc438977c9 100644
--- a/lib/snmp/src/misc/snmp_misc.erl
+++ b/lib/snmp/src/misc/snmp_misc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -101,21 +101,14 @@ sleep(Time) ->
%% Returns time in ms = sec/1000
% now() -> now(ms).
now(ms) ->
- Now = erlang:now(),
- element(1,Now)*1000000000+
- element(2,Now)*1000+
- (element(3,Now) div 1000);
+ erlang:monotonic_time(milli_seconds);
+
%% Returns time in cs = sec/100
now(cs) ->
- Now = erlang:now(),
- element(1,Now)*100000000+
- element(2,Now)*100+
- (element(3,Now) div 10000);
+ erlang:monotonic_time(100);
+
now(sec) ->
- Now = erlang:now(),
- element(1,Now)*1000000+
- element(2,Now)+
- (element(3,Now) div 1000000).
+ erlang:monotonic_time(seconds).
is_crypto_supported(Alg) ->
@@ -479,7 +472,3 @@ format_val('OBJECT IDENTIFIER', _, Val, MiniMib) ->
io_lib:format("~w", [NVal]);
format_val(_, _, Val, _MiniMib) ->
io_lib:format("~p", [Val]).
-
-
-
-
diff --git a/lib/snmp/src/misc/snmp_verbosity.erl b/lib/snmp/src/misc/snmp_verbosity.erl
index f27c31db03..c9192158ef 100644
--- a/lib/snmp/src/misc/snmp_verbosity.erl
+++ b/lib/snmp/src/misc/snmp_verbosity.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -69,7 +69,7 @@ print2(_Verbosity,Format,Arguments) ->
timestamp() ->
- format_timestamp(now()).
+ format_timestamp(os:timestamp()).
format_timestamp({_N1, _N2, N3} = Now) ->
{Date, Time} = calendar:now_to_datetime(Now),
@@ -162,4 +162,3 @@ validate(log) -> log;
validate(debug) -> debug;
validate(trace) -> trace;
validate(_) -> silence.
-
diff --git a/lib/snmp/test/snmp_agent_test.erl b/lib/snmp/test/snmp_agent_test.erl
index b4770ad0a9..a28cdf6aca 100644
--- a/lib/snmp/test/snmp_agent_test.erl
+++ b/lib/snmp/test/snmp_agent_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -426,10 +426,6 @@
-include_lib("snmp/include/snmp_types.hrl").
-include_lib("snmp/src/agent/snmpa_atl.hrl").
-%% -include_lib("snmp/include/SNMP-COMMUNITY-MIB.hrl").
-%% -include_lib("snmp/include/SNMP-VIEW-BASED-ACM-MIB.hrl").
-%% -include_lib("snmp/include/SNMP-USER-BASED-SM-MIB.hrl").
-
-define(klas1, [1,3,6,1,2,1,7]).
-define(klas2, [1,3,6,1,2,1,9]).
@@ -1612,7 +1608,8 @@ app_dir(App) ->
create_local_db_dir(Config) when is_list(Config) ->
?P(create_local_db_dir),
DataDir = snmp_test_lib:lookup(data_dir, Config),
- T = erlang:now(),
+ UName = erlang:unique_integer([positive]),
+ T = {UName, UName, UName},
[As,Bs,Cs] = [integer_to_list(I) || I <- tuple_to_list(T)],
DbDir = filename:join([DataDir, As, Bs, Cs]),
ok = del_dir(DbDir, 3),
@@ -2448,10 +2445,6 @@ mul_cases() ->
].
-%% multiple_reqs_3(_X) ->
-%% {req, [], {conf, init_mul, mul_cases_3(), finish_mul}}.
-
-
mul_cases_2() ->
[
mul_get_2,
@@ -3200,19 +3193,18 @@ v1_get_next_p() ->
%% 4.1.3:2
gn([[tTooBig]]),
io:format("We currently don't handle tooBig correct!!!\n"),
-% ?line ?expect3(tooBig, 0, [{[tTooBig], 'NULL'}]),
+
?line ?expect3(tooBig, 0, any),
%% 4.1.3:3
gn([[tGenErr1]]),
-% ?line expect(40, genErr, 1, [{[tGenErr1], 'NULL'}]),
+
?line ?expect3(genErr, 1, any),
gn([[tGenErr2]]),
-% ?line ?expect3(genErr, 1, [{[tGenErr2], 'NULL'}]),
+
?line ?expect3(genErr, 1, any),
gn([[sysDescr], [tGenErr3]]),
-% ?line ?expect3(genErr, 2, [{[sysDescr], 'NULL'},
-% {[tGenErr3], 'NULL'}]).
+
?line ?expect3(genErr, 2, any).
v1_set_p() ->
@@ -3451,8 +3443,7 @@ v2_set_p() ->
%% Req. OLD-SNMPEA-MIB
table_test() ->
io:format("Testing simple get, next and set on communityTable...~n"),
-%% {[147,214,36,45], "public", 2, readWrite}.
-%% {[147,214,36,45], "standard trap", 2, read}.
+
Key1c3 = [intCommunityViewIndex,get(mip),is("public")],
Key2c3 = [intCommunityViewIndex,get(mip),is("standard trap")],
Key1c4 = [intCommunityAccess,get(mip),is("public")],
@@ -3620,8 +3611,6 @@ notify(Pid, What) ->
%% Req: system group, OLD-SNMPEA-MIB, Klas1
big_test() ->
- %% put(sname, {?MODULE, big_test}),
- %% put(verbosity, trace),
?DBG("big_test -> testing simple next/get/set @ master agent...",[]),
simple_standard_test(),
@@ -5691,8 +5680,7 @@ loop_mib_1(suite) -> [];
loop_mib_1(Config) when is_list(Config) ->
?P(loop_mib_1),
?LOG("loop_mib_1 -> initiate case",[]),
- %% snmpa:verbosity(master_agent,debug),
- %% snmpa:verbosity(mib_server,info),
+
{_SaNode, _MgrNode, _MibDir} = init_case(Config),
?DBG("loop_mib_1 -> ~n"
"\tSaNode: ~p~n"
@@ -6643,7 +6631,6 @@ otp8395({init, Config}) when is_list(Config) ->
%%
{ok, AgentNode} = start_node(agent),
- %% {ok, SubAgentNode} = start_node(sub_agent),
{ok, ManagerNode} = start_node(manager),
%% --
@@ -6654,16 +6641,9 @@ otp8395({init, Config}) when is_list(Config) ->
AgentMnesiaDir = join([AgentDbDir, "mnesia"]),
mnesia_init(AgentNode, AgentMnesiaDir),
- %% SubAgentDir = ?config(sub_agent_dir, Config),
- %% SubAgentMnesiaDir = join([SubAgentDir, "mnesia"]),
- %% mnesia_init(SubAgentNode, SubAgentMnesiaDir),
-
- %% ok = mnesia_create_schema(AgentNode, [AgentNode, SubAgentNode]),
- %% ok = mnesia:create_schema([AgentNode, SubAgentNode]),
mnesia_create_schema(AgentNode, [AgentNode]),
mnesia_start(AgentNode),
- %% mnesia_start(SubAgentNode),
%% --
%% Host & IP
@@ -6749,11 +6729,6 @@ otp8395({fin, Config}) when is_list(Config) ->
?DBG("otp8395(fin) -> stop agent node", []),
stop_node(AgentNode),
-
- %% SubAgentNode = ?config(sub_agent_node, Config),
- %% stop_node(SubAgentNode),
-
-
%% -
%% Stop the manager node
%%
@@ -6970,20 +6945,6 @@ process_options(Defaults, _Opts) ->
%% process_options(Defaults, Opts, []).
Defaults.
-%% process_options([], _Opts, Acc) ->
-%% lists:reverse(Acc);
-%% process_options([{Key, DefaultValue}|Defaults], Opts, Acc) ->
-%% case lists:keysearch(Key, 1, Opts) of
-%% {value, {Key, Value}} when is_list->
-
-
-%% snmp_app_env_init(Node, Entity, Conf) ->
-%% rpc:call(Node, snmp_app_env_init, [Entity, Conf]).
-
-%% snmp_app_env_init(Entity, Conf) ->
-%% application:unload(snmp),
-%% application:load(snmp),
-%% application:set_env(snmp, Entity, Conf).
start_stdalone_agent(Node, Config) ->
rpc:call(Node, ?MODULE, start_stdalone_agent, [Config]).
@@ -7063,9 +7024,6 @@ do_info(MaNode) ->
tree_size_bytes,
db_memory]}],
verify_info(Info, Keys),
- %% OldInfo = snmpa:old_info_format(Info),
- %% ?DBG("info_test1 -> OldInfo: ~n~p", [OldInfo]),
- %% verify_old_info(OldInfo),
ok.
verify_info([], []) ->
@@ -7107,21 +7065,6 @@ verify_subinfo(Info0, [Key|Keys]) ->
Info ->
verify_subinfo(Info, Keys)
end.
-
-%% verify_old_info(Info) ->
-%% Keys = [vsns, subagents, loaded_mibs,
-%% tree_size_bytes, process_memory, db_memory],
-%% verify_old_info(Keys, Info).
-
-%% verify_old_info([], _) ->
-%% ok;
-%% verify_old_info([Key|Keys], Info) ->
-%% case lists:keymember(Key, 1, Info) of
-%% true ->
-%% verify_old_info(Keys, Info);
-%% false ->
-%% ?FAIL({missing_old_info, Key})
-%% end.
%% Index String - string used in index
is(S) -> [length(S) | S].
@@ -7184,8 +7127,6 @@ rewrite_usm_mgr(Dir, ShaKey, DesKey) ->
reset_usm_mgr(Dir) ->
snmp_agent_test_lib:reset_usm_mgr(Dir).
-%% update_community(Vsns, Dir) ->
-%% snmp_agent_test_lib:update_community(Vsns, Dir).
update_vacm(Vsn, Dir) ->
snmp_agent_test_lib:update_vacm(Vsn, Dir).
@@ -7196,8 +7137,6 @@ write_community_conf(Dir, Conf) ->
write_target_addr_conf(Dir, Conf) ->
snmp_agent_test_lib:write_target_addr_conf(Dir, Conf).
-%% write_target_addr_conf(Dir, ManagerIp, UDP, Vsns) ->
-%% snmp_agent_test_lib:write_target_addr_conf(Dir, ManagerIp, UDP, Vsns).
rewrite_target_addr_conf(Dir, NewPort) ->
snmp_agent_test_lib:rewrite_target_addr_conf(Dir, NewPort).
@@ -7218,10 +7157,6 @@ reset_target_params_conf(Dir) ->
write_notify_conf(Dir) ->
snmp_agent_test_lib:write_notify_conf(Dir).
-%% write_view_conf(Dir) ->
-%% snmp_agent_test_lib:write_view_conf(Dir).
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
copy_file(From, To) ->
@@ -7381,9 +7316,6 @@ lists_key1search(Key, List) when is_atom(Key) ->
end.
-%% regs() ->
-%% lists:sort(registered()).
-
%% ------
join(Parts) ->
diff --git a/lib/snmp/test/snmp_app_test.erl b/lib/snmp/test/snmp_app_test.erl
index 9b13e7cf1a..1e68b4e2c8 100644
--- a/lib/snmp/test/snmp_app_test.erl
+++ b/lib/snmp/test/snmp_app_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -32,8 +32,6 @@
modules/1,
exportall/1,
app_depend/1,
- undef_funcs/1,
-
start_and_stop_empty/1,
start_and_stop_with_agent/1,
@@ -59,7 +57,6 @@ all() ->
modules,
exportall,
app_depend,
- undef_funcs,
{group, start_and_stop}
],
Cases.
@@ -131,9 +128,6 @@ end_per_suite(Config) when is_list(Config) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Test server callbacks
-init_per_testcase(undef_funcs, Config) ->
- Config2 = lists:keydelete(watchdog, 1, Config),
- [{watchdog, ?WD_START(?MINS(10))} | Config2];
init_per_testcase(_Case, Config) ->
Config.
@@ -293,88 +287,6 @@ check_apps([App|Apps]) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-undef_funcs(suite) ->
- [];
-undef_funcs(doc) ->
- [];
-undef_funcs(Config) when is_list(Config) ->
- App = snmp,
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
- Root = code:root_dir(),
- LibDir = code:lib_dir(App),
- EbinDir = filename:join([LibDir,"ebin"]),
- XRefTestName = undef_funcs_make_name(App, xref_test_name),
- {ok, XRef} = xref:start(XRefTestName),
- ok = xref:set_default(XRef,
- [{verbose,false},{warnings,false}]),
- XRefName = undef_funcs_make_name(App, xref_name),
- {ok, XRefName} = xref:add_release(XRef, Root, {name,XRefName}),
- {ok, App} = xref:replace_application(XRef, App, EbinDir),
- {ok, Undefs} = xref:analyze(XRef, undefined_function_calls),
- xref:stop(XRef),
- analyze_undefined_function_calls(Undefs, Mods, []).
-
-valid_undef(crypto = CalledMod) ->
- case (catch CalledMod:version()) of
- Version when is_list(Version) ->
- %% The called module was crypto and the version
- %% function returns a valid value.
- %% This means that the function is
- %% actually undefined...
- true;
- _ ->
- %% The called module was crypto but the version
- %% function does *not* return a valid value.
- %% This means the crypto was not actually not
- %% build, which is an case snmp handles.
- false
- end;
-valid_undef(_) ->
- true.
-
-
-analyze_undefined_function_calls([], _, []) ->
- ok;
-analyze_undefined_function_calls([], _, AppUndefs) ->
- exit({suite_failed, {undefined_function_calls, AppUndefs}});
-analyze_undefined_function_calls([{{Mod, _F, _A}, _C} = AppUndef|Undefs],
- AppModules, AppUndefs) ->
- %% Check that this module is our's
- case lists:member(Mod,AppModules) of
- true ->
- {Calling,Called} = AppUndef,
- {Mod1,Func1,Ar1} = Calling,
- {Mod2,Func2,Ar2} = Called,
- %% If the called module is crypto, then we will *not*
- %% fail if crypto is not built (since crypto is actually
- %% not built for all platforms)
- case valid_undef(Mod2) of
- true ->
- io:format("undefined function call: "
- "~n ~w:~w/~w calls ~w:~w/~w~n",
- [Mod1,Func1,Ar1,Mod2,Func2,Ar2]),
- analyze_undefined_function_calls(
- Undefs, AppModules, [AppUndef|AppUndefs]);
- false ->
- io:format("skipping ~p (calling ~w:~w/~w)~n",
- [Mod, Mod2, Func2, Ar2]),
- analyze_undefined_function_calls(Undefs,
- AppModules, AppUndefs)
- end;
- false ->
- io:format("dropping ~p~n", [Mod]),
- analyze_undefined_function_calls(Undefs, AppModules, AppUndefs)
- end.
-
-%% This function is used simply to avoid cut-and-paste errors later...
-undef_funcs_make_name(App, PostFix) ->
- list_to_atom(atom_to_list(App) ++ "_" ++ atom_to_list(PostFix)).
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/snmp/test/snmp_appup_mgr.erl b/lib/snmp/test/snmp_appup_mgr.erl
index 6648ce9dbe..b07f8b3c72 100644
--- a/lib/snmp/test/snmp_appup_mgr.erl
+++ b/lib/snmp/test/snmp_appup_mgr.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2010. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -140,7 +140,7 @@ handle_req(#agent{host = Host, port = Port}, Reqs) ->
{ok, ReqId} = snmpm:ag(?USER_ID, Host, Port, Oids),
p("issued get-request (~w) for: ~s", [ReqId, oid_descs(Descs)]),
ReqTimer = erlang:send_after(?REQ_TIMEOUT, self(), {req_timeout, ReqId}),
- {ReqId, erlang:now(), ReqTimer}.
+ {ReqId, erlang:monotonic_time(micro_seconds), ReqTimer}.
oid_descs([]) ->
[];
@@ -163,7 +163,7 @@ handle_req_timeout(#state{ids = IDs0} = State, ReqId) ->
handle_snmp(#state{ids = IDs0} = S, {error, ReqId, Reason}) ->
case lists:keysearch(ReqId, 1, IDs0) of
{value, {ReqId, T, Ref}} ->
- Diff = timer:now_diff(erlang:now(), T),
+ Diff = erlang:monotonic_time(micro_seconds) - T,
p("SNMP error regarding outstanding request after ~w microsec:"
"~n ReqId: ~w"
"~n Reason: ~w", [Diff, ReqId, Reason]),
@@ -187,7 +187,7 @@ handle_snmp(State, {agent, Addr, Port, SnmpInfo}) ->
handle_snmp(#state{ids = IDs0} = S, {pdu, Addr, Port, ReqId, SnmpResponse}) ->
case lists:keysearch(ReqId, 1, IDs0) of
{value, {ReqId, T, Ref}} ->
- Diff = timer:now_diff(erlang:now(), T),
+ Diff = erlang:monotonic_time(micro_seconds) - T,
p("SNMP pdu regarding outstanding request after ~w microsec:"
"~n ReqId: ~w"
"~n Addr: ~w"
diff --git a/lib/snmp/test/snmp_conf_test.erl b/lib/snmp/test/snmp_conf_test.erl
index 7f5d11c0e7..dacedf0847 100644
--- a/lib/snmp/test/snmp_conf_test.erl
+++ b/lib/snmp/test/snmp_conf_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -117,7 +117,7 @@ check_mandatory(Config) when is_list(Config) ->
{b, mandatory},
{d, {value, 20202}},
{e, {value, "kalle"}}],
- ?line {ok, L1} = verify_mandatory(A1, B1),
+ ?line {ok, _L1} = verify_mandatory(A1, B1),
?DBG("check_mandatory -> L1: ~p", [L1]),
A2 = [{a, hej}, {c, 10}, {d, 10101}, {f, 10.88}],
B2 = [{a, {value, hejsan}},
diff --git a/lib/snmp/test/snmp_log_test.erl b/lib/snmp/test/snmp_log_test.erl
index fb7285110f..ed71dba23f 100644
--- a/lib/snmp/test/snmp_log_test.erl
+++ b/lib/snmp/test/snmp_log_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -633,11 +633,11 @@ log_to_txt3(Config) when is_list(Config) ->
log_reader_log_to(Reader,
fun() ->
I = disk_log:info(Log),
- T1 = t(),
+ T1 = snmp_misc:now(ms),
R = snmp_log:log_to_txt(Log, LogFile, Dir,
Mibs, TxtFile),
- T2 = t(),
- io:format(user,
+ T2 = snmp_misc:now(ms),
+ io:format(user,
"Time converting file: ~w ms~n",
[T2 - T1]),
{R, I}
@@ -704,10 +704,10 @@ log_writer_start(Name, File, Size, Repair) ->
log_writer_stop(Pid) ->
Pid ! {stop, self()},
- _T1 = t(),
+ _T1 = snmp_misc:now(ms),
receive
{'EXIT', Pid, normal} ->
- _T2 = t(),
+ _T2 = snmp_misc:now(ms),
?DBG("it took ~w ms to stop the writer", [_T2 - _T1]),
ok
after 60000 ->
@@ -721,10 +721,10 @@ log_writer_info(Pid) ->
log_writer_sleep(Pid, Time) ->
Pid ! {sleep, Time, self()},
- _T1 = t(),
+ _T1 = snmp_misc:now(ms),
receive
{sleeping, Pid} ->
- _T2 = t(),
+ _T2 = snmp_misc:now(ms),
?DBG("it took ~w ms to put the writer to sleep", [_T2 - _T1]),
ok;
{'EXIT', Pid, Reason} ->
@@ -793,10 +793,10 @@ lp(F, A) ->
log_reader_start() ->
Pid = spawn_link(?MODULE, log_reader_main, [self()]),
- _T1 = t(),
+ _T1 = snmp_misc:now(ms),
receive
{started, Pid} ->
- _T2 = t(),
+ _T2 = snmp_misc:now(ms),
?DBG("it took ~w ms to start the reader", [_T2 - _T1]),
{ok, Pid};
{'EXIT', Pid, Reason} ->
@@ -807,10 +807,10 @@ log_reader_start() ->
log_reader_stop(Pid) ->
Pid ! {stop, self()},
- _T1 = t(),
+ _T1 = snmp_misc:now(ms),
receive
{'EXIT', Pid, normal} ->
- _T2 = t(),
+ _T2 = snmp_misc:now(ms),
?DBG("it took ~w ms to put the reader to eleep", [_T2 - _T1]),
ok
after 1000 ->
@@ -1124,8 +1124,3 @@ join(D, F) ->
p(Case) ->
io:format(user, "test case: ~w~n", [Case]).
-
-%% Time in milli sec
-t() ->
- {A,B,C} = erlang:now(),
- A*1000000000+B*1000+(C div 1000).
diff --git a/lib/snmp/test/snmp_manager_config_test.erl b/lib/snmp/test/snmp_manager_config_test.erl
index f37e957dae..ba674edce3 100644
--- a/lib/snmp/test/snmp_manager_config_test.erl
+++ b/lib/snmp/test/snmp_manager_config_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -2169,7 +2169,6 @@ register_usm_user_using_function(Conf) when is_list(Conf) ->
%% --
p("done"),
ok.
-%% ?SKIP(not_yet_implemented).
%%
@@ -2259,8 +2258,9 @@ create_and_increment(Conf) when is_list(Conf) ->
?line {ok, _Pid} = snmpm_config:start_link(Opts),
%% Random init
- {A,B,C} = erlang:now(),
- random:seed(A,B,C),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
StartVal = random:uniform(2147483647),
IncVal = 42,
diff --git a/lib/snmp/test/snmp_test_lib.erl b/lib/snmp/test/snmp_test_lib.erl
index 5e611340a3..72c7452ec4 100644
--- a/lib/snmp/test/snmp_test_lib.erl
+++ b/lib/snmp/test/snmp_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -30,7 +30,7 @@
lookup/2,
replace_config/3, set_config/3, get_config/2, get_config/3]).
-export([fail/3, skip/3]).
--export([millis/0, millis_diff/2, hours/1, minutes/1, seconds/1, sleep/1]).
+-export([hours/1, minutes/1, seconds/1, sleep/1]).
-export([flush_mqueue/0, trap_exit/0, trap_exit/1]).
-export([ping/1, local_nodes/0, nodes_on/1]).
-export([start_node/2]).
@@ -334,14 +334,6 @@ skip(Reason, Module, Line) ->
%% Time related function
%%
-millis() ->
- erlang:now().
-
-millis_diff(A,B) ->
- T1 = (element(1,A)*1000000) + element(2,A) + (element(3,A)/1000000),
- T2 = (element(1,B)*1000000) + element(2,B) + (element(3,B)/1000000),
- T1 - T2.
-
hours(N) -> trunc(N * 1000 * 60 * 60).
minutes(N) -> trunc(N * 1000 * 60).
seconds(N) -> trunc(N * 1000).
@@ -628,4 +620,3 @@ format_timestamp({_N1, _N2, N3} = Now) ->
io_lib:format("~.4w:~.2.0w:~.2.0w ~.2.0w:~.2.0w:~.2.0w ~w",
[YYYY,MM,DD,Hour,Min,Sec,round(N3/1000)]),
lists:flatten(FormatDate).
-
diff --git a/lib/snmp/test/snmp_test_lib.hrl b/lib/snmp/test/snmp_test_lib.hrl
index 9b7609b831..fd584880da 100644
--- a/lib/snmp/test/snmp_test_lib.hrl
+++ b/lib/snmp/test/snmp_test_lib.hrl
@@ -1,8 +1,8 @@
-%%<copyright>
-%% <year>2002-2014</year>
-%% <holder>Ericsson AB, All Rights Reserved</holder>
-%%</copyright>
-%%<legalnotice>
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
+%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
%% compliance with the License. You should have received a copy of the
@@ -15,7 +15,7 @@
%% under the License.
%%
%% The Initial Developer of the Original Code is Ericsson AB.
-%%</legalnotice>
+%% %CopyrightEnd%
%%
%%----------------------------------------------------------------------
%% Purpose: Define common macros for testing
@@ -73,8 +73,6 @@
-endif.
-define(SLEEP(MSEC), snmp_test_lib:sleep(MSEC)).
--define(M(), snmp_test_lib:millis()).
--define(MDIFF(A,B), snmp_test_lib:millis_diff(A,B)).
%% - Process utility macros -
@@ -149,4 +147,3 @@
-define(PRINT(P,F,A),
snmp_test_lib:print(P,?MODULE,?LINE,F,A)).
-
diff --git a/lib/snmp/test/snmp_test_mgr.erl b/lib/snmp/test/snmp_test_mgr.erl
index 8cb6ec588e..1bf7efc695 100644
--- a/lib/snmp/test/snmp_test_mgr.erl
+++ b/lib/snmp/test/snmp_test_mgr.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -188,8 +188,9 @@ receive_trap(Timeout) ->
init({Options, CallerPid}) ->
put(sname, mgr),
put(verbosity, debug),
- {A1,A2,A3} = erlang:now(),
- random:seed(A1,A2,A3),
+ random:seed(erlang:phash2([node()]),
+ erlang:monotonic_time(),
+ erlang:unique_integer()),
case (catch is_options_ok(Options)) of
true ->
put(debug, get_value(debug, Options, false)),
@@ -1135,4 +1136,3 @@ d(_,_F,_A) ->
formated_timestamp() ->
snmp_test_lib:formated_timestamp().
-
diff --git a/lib/snmp/vsn.mk b/lib/snmp/vsn.mk
index 345cc790f2..14da37a225 100644
--- a/lib/snmp/vsn.mk
+++ b/lib/snmp/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2014. All Rights Reserved.
+# Copyright Ericsson AB 1997-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -18,6 +18,6 @@
# %CopyrightEnd%
APPLICATION = snmp
-SNMP_VSN = 5.1.1
+SNMP_VSN = 5.2
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(SNMP_VSN)$(PRE_VSN)"
diff --git a/lib/ssh/doc/src/introduction.xml b/lib/ssh/doc/src/introduction.xml
index b42910cb34..1efbc16016 100644
--- a/lib/ssh/doc/src/introduction.xml
+++ b/lib/ssh/doc/src/introduction.xml
@@ -25,31 +25,181 @@
<title>Introduction</title>
<prepared>OTP team</prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
<file>introduction.xml</file>
</header>
-
+ <p>SSH is a protocol for secure remote logon and
+ other secure network services over an insecure network.</p>
<section>
- <title>Purpose</title>
+ <title>Scope and Purpose</title>
- <p>Secure Shell (SSH) is a protocol for secure remote login and
- other secure network services over an insecure network. SSH
- provides a single, full-duplex, byte-oriented connection between
+ <p>SSH provides a single, full-duplex, and byte-oriented connection between
client and server. The protocol also provides privacy, integrity,
- server authentication and man-in-the-middle protection.</p>
-
- <p>The Erlang SSH application is an implementation of the SSH
- protocol in Erlang which offers API functions to write customized
- SSH clients and servers as well as making the Erlang shell
- available via SSH. Also included in the SSH application are an
- SFTP (SSH File Transfer Protocol) client <seealso
- marker="ssh_sftp">ssh_sftp</seealso> and server <seealso
- marker="ssh_sftp">ssh_sftpd</seealso>.</p>
+ server authentication, and man-in-the-middle protection.</p>
+
+ <p>The <c>ssh</c> application is an implementation of the SSH Transport, Connection and Authentication
+ Layer Protocols in Erlang. It provides the following:</p>
+ <list type="bulleted">
+ <item>API functions to write customized SSH clients and servers applications</item>
+ <item>The Erlang shell available over SSH</item>
+ <item>An SFTP client (<seealso marker="ssh_sftp">ssh_sftp</seealso>)
+ and server (<seealso marker="ssh_sftp">ssh_sftpd</seealso>)</item>
+ </list>
</section>
<section>
<title>Prerequisites</title>
- <p>It is assumed that the reader is familiar with the concepts of <seealso marker="doc/design_principles:des_princ">OTP</seealso>
- and has a basic understanding of <url href="http://en.wikipedia.org/wiki/Public-key_cryptography">public keys</url>.</p>
+ <p>It is assumed that the reader is familiar with the Erlang programming language,
+ concepts of <em>OTP</em>, and has a basic understanding of <em>public keys</em>.</p>
+ </section>
+
+<section>
+ <title>SSH Protocol Overview</title>
+
+ <p>Conceptually, the SSH protocol can be partitioned into four
+ layers:</p>
+
+ <image file="SSH_protocols.png">
+ <icaption>SSH Protocol Architecture</icaption>
+ </image>
+
+ <section>
+ <title>Transport Protocol</title>
+
+ <p>The SSH Transport Protocol is a secure, low-level transport.
+ It provides strong encryption, cryptographic host
+ authentication, and integrity protection. A minimum of
+ Message Authentication Code (MAC) and encryption
+ algorithms are supported. For details, see the
+ <seealso marker="ssh">ssh(3)</seealso> manual page in <c>ssh</c>.</p>
+ </section>
+
+ <section>
+ <title>Authentication Protocol</title>
+
+ <p>The SSH Authentication Protocol is a general-purpose user
+ authentication protocol run over the SSH Transport Layer
+ Protocol. The <c>ssh</c> application supports user authentication as follows:
+ </p>
+ <list type="bulleted">
+ <item>
+ Using public key technology. RSA and DSA, X509-certificates
+ are not supported.
+ </item>
+ <item>
+ Using keyboard-interactive authentication.
+ This is suitable for interactive authentication methods
+ that do not need any special software support on the client side.
+ Instead, all authentication data is entered from the keyboard.
+ </item>
+ <item>
+ Using a pure password-based authentication scheme.
+ Here, the plain text password is encrypted before sent
+ over the network.
+ </item>
+ </list>
+ <p>Several configuration options for
+ authentication handling are available in
+ <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso>
+ and <seealso marker="ssh#daemon-2">ssh:daemon/[2,3]</seealso>.</p>
+ <p>
+ The public key handling can be customized by implementing
+ the following behaviours from <c>ssh</c>:</p>
+ <list type="bulleted">
+ <item>Module
+ <seealso marker="ssh_client_key_api">ssh_client_key_api</seealso>.
+ </item>
+ <item>Module
+ <seealso marker="ssh_server_key_api">ssh_server_key_api</seealso>.
+ </item>
+ </list>
+ </section>
+
+ <section>
+ <title>Connection Protocol</title>
+
+ <p>The SSH Connection Protocol provides application-support
+ services over the transport pipe, for example, channel multiplexing,
+ flow control, remote program execution, signal propagation, and
+ connection forwarding. Functions for handling the SSH
+ Connection Protocol can be found in the module <seealso
+ marker="ssh_connection">ssh_connection</seealso> in <c>ssh</c>.
+ </p>
+ </section>
+
+ <section>
+ <title>Channels</title>
+
+ <p>All terminal sessions, forwarded connections, and so on, are
+ channels. Multiple channels are multiplexed into a single
+ connection. All channels are flow-controlled. This means that no
+ data is sent to a channel peer until a message is received to
+ indicate that window space is available.
+ The <em>initial window size</em> specifies how many bytes of channel
+ data that can be sent to the channel peer without adjusting the
+ window. Typically, an SSH client opens a channel, sends data (commands),
+ receives data (control information), and then closes the channel.
+ The <seealso marker="ssh_channel">ssh_channel</seealso> behaviour
+ handles generic parts of SSH channel management. This makes it easy
+ to write your own SSH client/server processes that use flow-control
+ and thus opens for more focus on the application logic.
+ </p>
+
+ <p>Channels come in the following three flavors:</p>
+
+ <list type="bulleted">
+ <item><em>Subsystem</em> - Named services that can be run as
+ part of an SSH server, such as SFTP <seealso
+ marker="ssh_sftpd">(ssh_sftpd)</seealso>, that is built into the
+ SSH daemon (server) by default, but it can be disabled. The Erlang <c>ssh</c>
+ daemon can be configured to run any Erlang-
+ implemented SSH subsystem.
+ </item>
+ <item><em>Shell</em> - Interactive shell. By default the
+ Erlang daemon runs the Erlang shell. The shell can be customized by
+ providing your own read-eval-print loop. You can also provide your
+ own Command-Line Interface (CLI) implementation,
+ but that is much more work.
+ </item>
+ <item><em>Exec</em> - One-time remote execution of commands. See function
+ <seealso marker="ssh_connection#exec-4">ssh_connection:exec/4</seealso>
+ for more information.</item>
+ </list>
+ </section>
+
+
+
</section>
+ <section>
+ <title>Where to Find More Information</title>
+ <p>
+ For detailed information about the SSH protocol, refer to the
+ following Request for Comments(RFCs):
+ </p>
+
+ <list type="bulleted">
+ <item><url href="http://www.ietf.org/rfc/rfc4250.txt">RFC 4250</url> -
+ Protocol Assigned Numbers</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4251.txt">RFC 4251</url> -
+ Protocol Architecture</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4252.txt">RFC 4252</url> -
+ Authentication Protocol</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4253.txt">RFC 4253</url> -
+ Transport Layer Protocol</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url> -
+ Connection Protocol</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4255.txt">RFC 4255</url> -
+ Key Fingerprints</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4344.txt">RFC 4344</url> -
+ Transport Layer Encryption Modes</item>
+ <item><url href="http://www.ietf.org/rfc/rfc4716.txt">RFC 4716</url> -
+ Public Key File Format</item>
+ </list>
+ </section>
</chapter>
diff --git a/lib/ssh/doc/src/notes.xml b/lib/ssh/doc/src/notes.xml
index 3aa61aa9ec..c77ee1e77a 100644
--- a/lib/ssh/doc/src/notes.xml
+++ b/lib/ssh/doc/src/notes.xml
@@ -29,6 +29,139 @@
<file>notes.xml</file>
</header>
+<section><title>Ssh 3.2.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Gracefully terminate if sockets is unexpectedly closed.</p>
+ <p>
+ Own Id: OTP-12782</p>
+ </item>
+ <item>
+ <p>
+ Made Codenomicon Defensics test suite pass: <list>
+ <item>limit number of algorithms in kexinit
+ message</item> <item>check 'e' and 'f' parameters in
+ kexdh</item> <item>implement 'keyboard-interactive' user
+ authentication on server side</item> <item> return plain
+ text message to bad version exchange message</item>
+ </list></p>
+ <p>
+ Own Id: OTP-12784</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Ssh 3.2.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ A new option for handling the SSH_MSG_DEBUG message's
+ printouts. A fun could be given in the options that will
+ be called whenever the SSH_MSG_DEBUG message arrives.
+ This enables the user to format the printout or just
+ discard it.</p>
+ <p>
+ Own Id: OTP-12738 Aux Id: seq12860 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Ssh 3.2.2</title>
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ New option <c>id_string</c> for <c>ssh:daemon</c> and
+ <c>ssh:connect</c> for limiting banner grabbing attempts.</p>
+ <p>
+ The possible values are: <c>{id_string,string()}</c> and
+ <c>{id_string,random}</c>. The latter will make ssh
+ generate a random nonsence id-string for each new
+ connection.</p>
+ <p>
+ Own Id: OTP-12659</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Ssh 3.2.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Ssh crashed if a message was sent on a channel with
+ packet_size = 0.</p>
+ <p>
+ A new option for ssh:daemon is also introduced:
+ <c>minimal_remote_max_packet_size</c>. This option sets
+ the least max packet size declaration that the daemon
+ will accept from a client. The default value is 0 to
+ maintain compatibility with OpenSSH and the rfc:s.</p>
+ <p>
+ Own Id: OTP-12645 Aux Id: seq12816 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Ssh 3.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ If a channel is closed by the peer while using a function
+ with call semantics in ssh_connection.erl return {error,
+ closed}. Document that the functions can return {error,
+ timeout | closed} and not only ssh_request_status()</p>
+ <p>
+ Own Id: OTP-12004</p>
+ </item>
+ <item>
+ <p>
+ Bug that causes ssh:connect to return
+ <c>{error,int()}</c> instead of <c>{error,timeout}</c>
+ when ssh handshake takes too long time.</p>
+ <p>
+ Own Id: OTP-12369</p>
+ </item>
+ <item>
+ <p>
+ Documentation corrections. (Thanks to Rabbe Fogelholm)</p>
+ <p>
+ Own Id: OTP-12399</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Example of ssh_connection:exec added.</p>
+ <p>
+ Own Id: OTP-12558</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Ssh 3.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/ssh/doc/src/ref_man.xml b/lib/ssh/doc/src/ref_man.xml
index 55339298e8..afe3f2ddf9 100644
--- a/lib/ssh/doc/src/ref_man.xml
+++ b/lib/ssh/doc/src/ref_man.xml
@@ -28,8 +28,8 @@
<file>ref_man.xml</file>
</header>
<description>
- <p>The SSH application is an erlang implementation of the
- secure shell protocol (SSH) as defined by RFC 4250 - 4254</p>
+ <p>The <c>ssh</c> application is an Erlang implementation of the
+ Secure Shell Protocol (SSH) as defined by RFC 4250 - 4254.</p>
</description>
<xi:include href="ssh_app.xml"/>
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 9f5d1c003d..0516945c0e 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -22,54 +22,85 @@
</legalnotice>
<title>ssh</title>
+ <prepared></prepared>
+ <docno></docno>
<date>2007-10-06</date>
+ <rev></rev>
</header>
<module>ssh</module>
- <modulesummary>Main API of the SSH application</modulesummary>
+ <modulesummary>Main API of the ssh application</modulesummary>
<description>
- <p>Interface module for the SSH application. </p>
+ <p>Interface module for the <c>ssh</c> application.</p>
</description>
<section>
<title>SSH</title>
-
+ <marker id="supported"/>
<list type="bulleted">
- <item>SSH requires the crypto and public_key applications.</item>
- <item>Supported SSH version is 2.0 </item>
- <item>Supported MAC algorithms: hmac-sha2-256 and hmac-sha1</item>
- <item>Supported encryption algorithms: aes128-ctr, aes128-cb and 3des-cbc</item>
- <item>Supports unicode filenames if the emulator and the underlaying OS supports it. See the DESCRIPTION section in <seealso marker="kernel:file">file</seealso> for information about this subject</item>
- <item>Supports unicode in shell and cli</item>
+ <item>For application dependencies see <seealso marker="SSH_app"> ssh(6)</seealso> </item>
+ <item>Supported SSH version is 2.0.</item>
+ <item>Supported public key algorithms: ssh-rsa and ssh-dss.</item>
+ <item>Supported MAC algorithms: hmac-sha2-256 and hmac-sha1.</item>
+ <item>Supported encryption algorithms: aes128-ctr, aes128-cb and 3des-cbc.</item>
+ <item>Supported key exchange algorithms: diffie-hellman-group1-sha1.</item>
+ <item>Supported compression algorithms: none, zlib, [email protected],</item>
+ <item>Supports unicode filenames if the emulator and the underlaying OS support it.
+ See section DESCRIPTION in the
+ <seealso marker="kernel:file">file</seealso> manual page in <c>kernel</c>
+ for information about this subject.</item>
+ <item>Supports unicode in shell and CLI.</item>
</list>
</section>
<section>
- <title>DATA TYPES </title>
+ <title>DATA TYPES</title>
<p>Type definitions that are used more than once in
- this module and/or abstractions to indicate the intended use of the data
- type:</p>
- <p><c>boolean() = true | false </c></p>
- <p><c>string() = [byte()]</c></p>
- <p><c>ssh_daemon_ref() - opaque to the user
- returned by ssh:daemon/[1,2,3]</c></p>
- <p><c>ssh_connection_ref() - opaque to the user
- returned by ssh:connect/3</c></p>
- <p><c>ip_address() - inet::ip_address()</c></p>
- <p><c>subsystem_spec() = {subsystem_name(),
- {channel_callback(), channel_init_args()}} </c></p>
- <p><c>subsystem_name() = string() </c></p>
- <p><c>channel_callback() = atom() - Name of the erlang module
- implementing the subsystem using the ssh_channel behavior see</c>
- <seealso marker="ssh_channel">ssh_channel(3)</seealso></p>
- <p><c>channel_init_args() = list()</c></p>
- </section>
+ this module, or abstractions to indicate the intended use of the data
+ type, or both:</p>
+ <taglist>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
+ <tag><c>string() =</c></tag>
+ <item><p><c>[byte()]</c></p></item>
+ <tag><c>ssh_daemon_ref() =</c></tag>
+ <item><p>opaque() -
+ as returned by <c>ssh:daemon/[1,2,3]</c></p></item>
+ <tag><c>ssh_connection_ref() =</c></tag>
+ <item><p>opaque() - as returned by <c>ssh:connect/3</c></p></item>
+ <tag><c>ip_address() =</c></tag>
+ <item><p><c>inet::ip_address</c></p></item>
+ <tag><c>subsystem_spec() =</c></tag>
+ <item><p><c>{subsystem_name(),
+ {channel_callback(), channel_init_args()}}</c></p></item>
+ <tag><c>subsystem_name() =</c></tag>
+ <item><p><c>string()</c></p></item>
+ <tag><c>channel_callback() =</c></tag>
+ <item><p><c>atom()</c> - Name of the Erlang module
+ implementing the subsystem using the <c>ssh_channel</c> behavior, see
+ <seealso marker="ssh_channel">ssh_channel(3)</seealso></p></item>
+ <tag><c>channel_init_args() =</c></tag>
+ <item><p><c>list()</c></p></item>
+
+ <tag><c>algs_list() =</c></tag>
+ <item><p><c>list( alg_entry() )</c></p></item>
+
+ <tag><c>alg_entry() =</c></tag>
+ <item><p><c>{kex, simple_algs()} | {public_key, simple_algs()} | {cipher, double_algs()} | {mac, double_algs()} | {compression, double_algs()}</c></p></item>
+
+ <tag><c>simple_algs() =</c></tag>
+ <item><p><c>list( atom() )</c></p></item>
+
+ <tag><c>double_algs() =</c></tag>
+ <item><p><c>[{client2serverlist,simple_algs()},{server2client,simple_algs()}] | simple_algs()</c></p></item>
+ </taglist>
+</section>
<funcs>
<func>
<name>close(ConnectionRef) -> ok </name>
- <fsummary>Closes an SSH connection</fsummary>
+ <fsummary>Closes an SSH connection.</fsummary>
<type>
<v>ConnectionRef = ssh_connection_ref()</v>
</type>
@@ -81,135 +112,202 @@
<name>connect(Host, Port, Options) -> </name>
<name>connect(Host, Port, Options, Timeout) -> {ok,
ssh_connection_ref()} | {error, Reason}</name>
- <fsummary>Connect to an ssh server.</fsummary>
+ <fsummary>Connects to an SSH server.</fsummary>
<type>
<v>Host = string()</v>
<v>Port = integer()</v>
- <d>The default is <c><![CDATA[22]]></c>, the assigned well known port
+ <d><c><![CDATA[22]]></c> is default, the assigned well-known port
number for SSH.</d>
<v>Options = [{Option, Value}]</v>
- <v>Timeout = infinity | integer(milliseconds)</v>
- <d>Negotiation timeout, for connection timeout use the option <c>{connect_timeout, timeout()}</c>.</d>
+ <v>Timeout = infinity | integer()</v>
+ <d>Negotiation time-out in milli-seconds. The default value is <c>infinity</c>.
+ For connection time-out, use option <c>{connect_timeout, timeout()}</c>.</d>
</type>
<desc>
<p>Connects to an SSH server. No channel is started. This is done
by calling
- <seealso marker="ssh_connection#session_channel/2">ssh_connection:session_channel/[2, 4]</seealso>.</p>
- <p>Options are:</p>
+ <seealso marker="ssh_connection#session_channel/2">
+ ssh_connection:session_channel/[2, 4]</seealso>.</p>
+ <p>Options:</p>
<taglist>
<tag><c><![CDATA[{inet, inet | inet6}]]></c></tag>
- <item> IP version to use.</item>
+ <item>
+ <p>IP version to use.</p>
+ </item>
<tag><c><![CDATA[{user_dir, string()}]]></c></tag>
<item>
- <p>Sets the user directory i.e. the directory containing
- ssh configuration files for the user such as
+ <p>Sets the user directory, that is, the directory containing
+ <c>ssh</c> configuration files for the user, such as
<c><![CDATA[known_hosts]]></c>, <c><![CDATA[id_rsa,
- id_dsa]]></c> and
+ id_dsa]]></c>, and
<c><![CDATA[authorized_key]]></c>. Defaults to the
directory normally referred to as
- <c><![CDATA[~/.ssh]]></c> </p>
+ <c><![CDATA[~/.ssh]]></c>.</p>
</item>
<tag><c><![CDATA[{dsa_pass_phrase, string()}]]></c></tag>
<item>
- <p>If the user dsa key is protected by a passphrase it can be
+ <p>If the user DSA key is protected by a passphrase, it can be
supplied with this option.
</p>
</item>
<tag><c><![CDATA[{rsa_pass_phrase, string()}]]></c></tag>
<item>
- <p>If the user rsa key is protected by a passphrase it can be
+ <p>If the user RSA key is protected by a passphrase, it can be
supplied with this option.
</p>
</item>
<tag><c><![CDATA[{silently_accept_hosts, boolean()}]]></c></tag>
<item>
- <p>When true hosts are added to the
+ <p>When <c>true</c>, hosts are added to the
file <c><![CDATA[known_hosts]]></c> without asking the user.
- Defaults to false.
+ Defaults to <c>false</c>.
</p>
</item>
<tag><c><![CDATA[{user_interaction, boolean()}]]></c></tag>
<item>
- <p>If false disables the client to connect to the server
- if any user interaction is needed such as accepting that
- the server will be added to the <c>known_hosts</c> file or
- supplying a password. Defaults to true.
+ <p>If <c>false</c>, disables the client to connect to the server
+ if any user interaction is needed, such as accepting
+ the server to be added to the <c>known_hosts</c> file, or
+ supplying a password. Defaults to <c>true</c>.
Even if user interaction is allowed it can be
- suppressed by other options such as silently_accept_hosts and
- password. Do note that it may not always be desirable to use
- those options from a security point of view.</p>
+ suppressed by other options, such as <c>silently_accept_hosts</c>
+ and <c>password</c>. However, those optins are not always desirable
+ to use from a security point of view.</p>
</item>
+
+ <tag><c><![CDATA[{disconnectfun, fun(Reason:term()) -> _}]]></c></tag>
+ <item>
+ <p>Provides a fun to implement your own logging when a server disconnects the client.</p>
+ </item>
+
<tag><c><![CDATA[{public_key_alg, 'ssh-rsa' | 'ssh-dss'}]]></c></tag>
<item>
+ <note>
+ <p>This option is kept for compatibility. It is ignored if the <c>preferred_algorithms</c>
+ option is used. The equivalence of <c>{public_key_alg,'ssh-dss'}</c> is
+ <c>{preferred_algorithms, [{public_key,['ssh-dss','ssh-rsa']}]}</c>.</p>
+ </note>
<p>Sets the preferred public key algorithm to use for user
- authentication. If the the preferred algorithm fails for
- some reason, the other algorithm is tried. The default is
+ authentication. If the preferred algorithm fails,
+ the other algorithm is tried. The default is
to try <c><![CDATA['ssh-rsa']]></c> first.</p>
</item>
+
<tag><c><![CDATA[{pref_public_key_algs, list()}]]></c></tag>
<item>
- <p>List of public key algorithms to try to use, 'ssh-rsa' and 'ssh-dss' available.
- Will override <c><![CDATA[{public_key_alg, 'ssh-rsa' | 'ssh-dss'}]]></c></p>
+ <note>
+ <p>This option is kept for compatibility. It is ignored if the <c>preferred_algorithms</c>
+ option is used. The equivalence of <c>{pref_public_key_algs,['ssh-dss']}</c> is
+ <c>{preferred_algorithms, [{public_key,['ssh-dss']}]}</c>.</p>
+ </note>
+ <p>List of public key algorithms to try to use.
+ <c>'ssh-rsa'</c> and <c>'ssh-dss'</c> are available.
+ Overrides <c><![CDATA[{public_key_alg, 'ssh-rsa' | 'ssh-dss'}]]></c></p>
</item>
+
+ <tag><c><![CDATA[{preferred_algorithms, algs_list()}]]></c></tag>
+ <item>
+ <p>List of algorithms to use in the algorithm negotiation. The default <c>algs_list()</c> can
+ be obtained from <seealso marker="#default_algorithms/0">default_algorithms/0</seealso>.
+ </p>
+ <p>Here is an example of this option:</p>
+ <code>
+{preferred_algorithms,
+ [{public_key,['ssh-rsa','ssh-dss']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['aes128-cbc','3des-cbc']}]},
+ {mac,['hmac-sha2-256','hmac-sha1']},
+ {compression,[none,zlib]}
+}
+</code>
+ <p>The example specifies different algorithms in the two directions (client2server and server2client), for cipher but specifies the same
+algorithms for mac and compression in both directions. The kex (key exchange) and public key algorithms are set to their default values,
+kex is implicit but public_key is set explicitly.</p>
+
+ <warning>
+ <p>Changing the values can make a connection less secure. Do not change unless you
+ know exactly what you are doing. If you do not understand the values then you
+ are not supposed to change them.</p>
+ </warning>
+ </item>
+
<tag><c><![CDATA[{connect_timeout, timeout()}]]></c></tag>
<item>
- <p>Sets a timeout on the transport layer
- connection. Defaults to <c>infinity</c>.</p>
+ <p>Sets a time-out on the transport layer
+ connection. For <c>gen_tcp</c> the time is in milli-seconds and the default value is
+ <c>infinity</c>.</p>
</item>
<tag><c><![CDATA[{user, string()}]]></c></tag>
<item>
- <p>Provides a user name. If this option is not given, ssh
+ <p>Provides a username. If this option is not given, <c>ssh</c>
reads from the environment (<c><![CDATA[LOGNAME]]></c> or
- <c><![CDATA[USER]]></c> on unix,
+ <c><![CDATA[USER]]></c> on UNIX,
<c><![CDATA[USERNAME]]></c> on Windows).</p>
</item>
<tag><c><![CDATA[{password, string()}]]></c></tag>
<item>
- <p>Provide a password for password authentication. If
- this option is not given, the user will be asked for a
- password if the password authentication method is
+ <p>Provides a password for password authentication.
+ If this option is not given, the user is asked for a
+ password, if the password authentication method is
attempted.</p>
</item>
<tag><c><![CDATA[{key_cb, atom()}]]></c></tag>
<item>
- <p>Module implementing the behaviour <seealso marker="ssh_client_key_api">ssh_client_key_api</seealso>.
+ <p>Module implementing the behaviour
+ <seealso marker="ssh_client_key_api">ssh_client_key_api</seealso>.
Can be used to customize the handling of public keys.
</p>
</item>
<tag><c><![CDATA[{quiet_mode, atom() = boolean()}]]></c></tag>
<item>
- <p>If true, the client will not print out anything on authorization.</p>
+ <p>If <c>true</c>, the client does not print anything on authorization.</p>
+ </item>
+
+ <tag><c><![CDATA[{id_string, random | string()}]]></c></tag>
+ <item>
+ <p>The string that the client presents to a connected server initially. The default value is "Erlang/VSN" where VSN is the ssh application version number.
+ </p>
+ <p>The value <c>random</c> will cause a random string to be created at each connection attempt. This is to make it a bit more difficult for a malicious peer to find the ssh software brand and version.
+ </p>
</item>
+
<tag><c><![CDATA[{fd, file_descriptor()}]]></c></tag>
<item>
- <p>Allow an existing file descriptor to be used
- (simply passed on to the transport protocol).</p></item>
+ <p>Allows an existing file descriptor to be used
+ (by passing it on to the transport protocol).</p></item>
<tag><c><![CDATA[{rekey_limit, integer()}]]></c></tag>
<item>
- <p>Provide, in bytes, when rekeying should be initiated,
- defaults to one time each GB and one time per hour.</p>
+ <p>Provides, in bytes, when rekeying is to be initiated.
+ Defaults to once per each GB and once per hour.</p>
</item>
<tag><c><![CDATA[{idle_time, integer()}]]></c></tag>
<item>
- <p>Sets a timeout on connection when no channels are active, default is infinity</p></item>
+ <p>Sets a time-out on a connection when no channels are active.
+ Defaults to <c>infinity</c>.</p></item>
+ <tag><c><![CDATA[{ssh_msg_debug_fun, fun(ConnectionRef::ssh_connection_ref(), AlwaysDisplay::boolean(), Msg::binary(), LanguageTag::binary()) -> _}]]></c></tag>
+ <item>
+ <p>Provide a fun to implement your own logging of the SSH message SSH_MSG_DEBUG. The last three parameters are from the message, see RFC4253, section 11.3. The <c>ConnectionRef</c> is the reference to the connection on which the message arrived. The return value from the fun is not checked.</p>
+ <p>The default behaviour is ignore the message.
+ To get a printout for each message with <c>AlwaysDisplay = true</c>, use for example <c>{ssh_msg_debug_fun, fun(_,true,M,_)-> io:format("DEBUG: ~p~n", [M]) end}</c></p>
+ </item>
+
</taglist>
</desc>
</func>
<func>
<name>connection_info(ConnectionRef, [Option]) ->[{Option,
- Value}] </name>
- <fsummary> Retrieves information about a connection. </fsummary>
+ Value}]</name>
+ <fsummary>Retrieves information about a connection.</fsummary>
<type>
<v>Option = client_version | server_version | user | peer | sockname </v>
<v>Value = [option_value()] </v>
- <v>option_value() = {{Major::integer(), Minor::integer()}, VersionString::string()} | User::string() |
- Peer::{inet:hostname(), {inet::ip_adress(), inet::port_number()}} |
- Sockname::{inet::ip_adress(), inet::port_number()} () </v>
+ <v>option_value() = {{Major::integer(), Minor::integer()}, VersionString::string()} |
+ User::string() | Peer::{inet:hostname(), {inet::ip_adress(), inet::port_number()}} |
+ Sockname::{inet::ip_adress(), inet::port_number()}</v>
</type>
<desc>
- <p> Retrieves information about a connection.
- </p>
+ <p>Retrieves information about a connection.</p>
</desc>
</func>
@@ -230,155 +328,243 @@
<desc>
<p>Starts a server listening for SSH connections on the given
port.</p>
- <p>Options are:</p>
+ <p>Options:</p>
<taglist>
<tag><c><![CDATA[{inet, inet | inet6}]]></c></tag>
- <item> IP version to use when the host address is specified as <c>any</c>. </item>
- <tag><c><![CDATA[{subsystems, [subsystem_spec()]]]></c></tag>
+ <item><p>IP version to use when the host address is specified as <c>any</c>.</p></item>
+ <tag><c><![CDATA[{subsystems, [subsystem_spec()]}]]></c></tag>
<item>
- Provides specifications for handling of subsystems. The
- "sftp" subsystem spec can be retrieved by calling
- ssh_sftpd:subsystem_spec/1. If the subsystems option in
- not present the value of
- <c>[ssh_sftpd:subsystem_spec([])]</c> will be used. It is
- of course possible to set the option to the empty list if
- you do not want the daemon to run any subsystems at all.
+ <p>Provides specifications for handling of subsystems. The
+ "sftp" subsystem specification is retrieved by calling
+ <c>ssh_sftpd:subsystem_spec/1</c>. If the subsystems option is
+ not present, the value of
+ <c>[ssh_sftpd:subsystem_spec([])]</c> is used.
+ The option can be set to the empty list if
+ you do not want the daemon to run any subsystems.</p>
</item>
<tag><c><![CDATA[{shell, {Module, Function, Args} |
fun(string() = User) - > pid() | fun(string() = User,
ip_address() = PeerAddr) -> pid()}]]></c></tag>
<item>
- Defines the read-eval-print loop used when a shell is
- requested by the client. Default is to use the erlang shell:
- <c><![CDATA[{shell, start, []}]]></c>
+ <p>Defines the read-eval-print loop used when a shell is
+ requested by the client. The default is to use the Erlang shell:
+ <c><![CDATA[{shell, start, []}]]></c></p>
</item>
<tag><c><![CDATA[{ssh_cli, {channel_callback(),
channel_init_args()} | no_cli}]]></c></tag>
<item>
- Provides your own CLI implementation, i.e. a channel callback
- module that implements a shell and command execution. Note
- that you may customize the shell read-eval-print loop using the
- option <c>shell</c> which is much less work than implementing
- your own CLI channel. If set to <c>no_cli</c> you will disable
- CLI channels and only subsystem channels will be allowed.
+ <p>Provides your own CLI implementation, that is, a channel callback
+ module that implements a shell and command execution. The shell
+ read-eval-print loop can be customized, using the
+ option <c>shell</c>. This means less work than implementing
+ an own CLI channel. If set to <c>no_cli</c>, the CLI channels
+ are disabled and only subsystem channels are allowed.</p>
</item>
<tag><c><![CDATA[{user_dir, String}]]></c></tag>
<item>
- <p>Sets the user directory i.e. the directory containing
- ssh configuration files for the user such as
+ <p>Sets the user directory. That is, the directory containing
+ <c>ssh</c> configuration files for the user, such as
<c><![CDATA[known_hosts]]></c>, <c><![CDATA[id_rsa,
- id_dsa]]></c> and
+ id_dsa]]></c>, and
<c><![CDATA[authorized_key]]></c>. Defaults to the
directory normally referred to as
- <c><![CDATA[~/.ssh]]></c> </p>
+ <c><![CDATA[~/.ssh]]></c>.</p>
</item>
<tag><c><![CDATA[{system_dir, string()}]]></c></tag>
<item>
<p>Sets the system directory, containing the host key files
- that identifies the host keys for ssh. The default is
- <c><![CDATA[/etc/ssh]]></c>, note that for security reasons
- this directory is normally only accessible by the root user.</p>
+ that identify the host keys for <c>ssh</c>. Defaults to
+ <c><![CDATA[/etc/ssh]]></c>. For security reasons,
+ this directory is normally accessible only to the root user.</p>
</item>
<tag><c><![CDATA[{auth_methods, string()}]]></c></tag>
<item>
- <p>Comma separated string that determines which
- authentication methodes that the server should support and
- in what order they will be tried. Defaults to
+ <p>Comma-separated string that determines which
+ authentication methods that the server is to support and
+ in what order they are tried. Defaults to
<c><![CDATA["publickey,keyboard-interactive,password"]]></c></p>
</item>
<tag><c><![CDATA[{user_passwords, [{string() = User,
string() = Password}]}]]></c></tag>
<item>
- <p>Provide passwords for password authentication.They will
- be used when someone tries to connect to the server and
- public key user authentication fails. The option provides
- a list of valid user names and the corresponding password.
+ <p>Provides passwords for password authentication. The passwords
+ are used when someone tries to connect to the server and
+ public key user-authentication fails. The option provides
+ a list of valid usernames and the corresponding passwords.
</p>
</item>
<tag><c><![CDATA[{password, string()}]]></c></tag>
<item>
- <p>Provide a global password that will authenticate any
+ <p>Provides a global password that authenticates any
user. From a security perspective this option makes
the server very vulnerable.</p>
</item>
+
+ <tag><c><![CDATA[{preferred_algorithms, algs_list()}]]></c></tag>
+ <item>
+ <p>List of algorithms to use in the algorithm negotiation. The default <c>algs_list()</c> can
+ be obtained from <seealso marker="#default_algorithms/0">default_algorithms/0</seealso>.
+ </p>
+ <p>Here is an example of this option:</p>
+ <code>
+{preferred_algorithms,
+ [{public_key,['ssh-rsa','ssh-dss']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['aes128-cbc','3des-cbc']}]},
+ {mac,['hmac-sha2-256','hmac-sha1']},
+ {compression,[none,zlib]}
+}
+</code>
+ <p>The example specifies different algorithms in the two directions (client2server and server2client), for cipher but specifies the same
+algorithms for mac and compression in both directions. The kex (key exchange) and public key algorithms are set to their default values,
+kex is implicit but public_key is set explicitly.</p>
+
+ <warning>
+ <p>Changing the values can make a connection less secure. Do not change unless you
+ know exactly what you are doing. If you do not understand the values then you
+ are not supposed to change them.</p>
+ </warning>
+ </item>
+
<tag><c><![CDATA[{pwdfun, fun(User::string(), password::string()) -> boolean()}]]></c></tag>
<item>
- <p>Provide a function for password validation. This is called
- with user and password as strings, and should return
+ <p>Provides a function for password validation. This function is called
+ with user and password as strings, and returns
<c><![CDATA[true]]></c> if the password is valid and
<c><![CDATA[false]]></c> otherwise.</p>
</item>
<tag><c><![CDATA[{negotiation_timeout, integer()}]]></c></tag>
<item>
- <p>Max time in milliseconds for the authentication negotiation. The default value is 2 minutes. If the client fails to login within this time, the connection is closed.
+ <p>Maximum time in milliseconds for the authentication negotiation.
+ Defaults to 120000 (2 minutes). If the client fails to log in within this time,
+ the connection is closed.
</p>
</item>
<tag><c><![CDATA[{max_sessions, pos_integer()}]]></c></tag>
<item>
- <p>The maximum number of simultaneous sessions that are accepted at any time for this daemon. This includes sessions that are being authorized. So if set to <c>N</c>, and <c>N</c> clients have connected but not started the login process, the <c>N+1</c> connection attempt will be aborted. If <c>N</c> connections are authenticated and still logged in, no more loggins will be accepted until one of the existing ones log out.
+ <p>The maximum number of simultaneous sessions that are accepted at any time
+ for this daemon. This includes sessions that are being authorized.
+ Thus, if set to <c>N</c>, and <c>N</c> clients have connected but not started
+ the login process, connection attempt <c>N+1</c> is aborted.
+ If <c>N</c> connections are authenticated and still logged in, no more logins
+ are accepted until one of the existing ones log out.
</p>
- <p>The counter is per listening port, so if two daemons are started, one with <c>{max_sessions,N}</c> and the other with <c>{max_sessions,M}</c> there will be in total <c>N+M</c> connections accepted for the whole ssh application.
+ <p>The counter is per listening port. Thus, if two daemons are started, one with
+ <c>{max_sessions,N}</c> and the other with <c>{max_sessions,M}</c>, in total
+ <c>N+M</c> connections are accepted for the whole <c>ssh</c> application.
</p>
- <p>Note that if <c>parallel_login</c> is <c>false</c>, only one client at a time may be in the authentication phase.
+ <p>Notice that if <c>parallel_login</c> is <c>false</c>, only one client
+ at a time can be in the authentication phase.
</p>
- <p>As default, the option is not set. This means that the number is not limited.
+ <p>By default, this option is not set. This means that the number is not limited.
</p>
</item>
<tag><c><![CDATA[{parallel_login, boolean()}]]></c></tag>
<item>
- <p>If set to false (the default value), only one login is handled a time. If set to true, an unlimited number of login attempts will be allowed simultanously.
+ <p>If set to false (the default value), only one login is handled at a time.
+ If set to true, an unlimited number of login attempts are allowed simultaneously.
</p>
- <p>If the <c>max_sessions</c> option is set to <c>N</c> and <c>parallel_login</c> is set to <c>true</c>, the max number of simultaneous login attempts at any time is limited to <c>N-K</c> where <c>K</c> is the number of authenticated connections present at this daemon.
+ <p>If the <c>max_sessions</c> option is set to <c>N</c> and <c>parallel_login</c>
+ is set to <c>true</c>, the maximum number of simultaneous login attempts at any time is
+ limited to <c>N-K</c>, where <c>K</c> is the number of authenticated connections present
+ at this daemon.
</p>
<warning>
- <p>Do not enable <c>parallel_logins</c> without protecting the server by other means, for example the <c>max_sessions</c> option or a firewall configuration. If set to <c>true</c>, there is no protection against DOS attacks.</p>
+ <p>Do not enable <c>parallel_logins</c> without protecting the server by other means,
+ for example, by the <c>max_sessions</c> option or a firewall configuration. If set to
+ <c>true</c>, there is no protection against DOS attacks.</p>
</warning>
</item>
+ <tag><c><![CDATA[{minimal_remote_max_packet_size, non_negative_integer()}]]></c></tag>
+ <item>
+ <p>The least maximum packet size that the daemon will accept in channel open requests from the client. The default value is 0.
+ </p>
+ </item>
+
+ <tag><c><![CDATA[{id_string, random | string()}]]></c></tag>
+ <item>
+ <p>The string the daemon will present to a connecting peer initially. The default value is "Erlang/VSN" where VSN is the ssh application version number.
+ </p>
+ <p>The value <c>random</c> will cause a random string to be created at each connection attempt. This is to make it a bit more difficult for a malicious peer to find the ssh software brand and version.
+ </p>
+ </item>
+
<tag><c><![CDATA[{key_cb, atom()}]]></c></tag>
<item>
- <p>Module implementing the behaviour <seealso marker="ssh_server_key_api">ssh_server_key_api</seealso>.
+ <p>Module implementing the behaviour
+ <seealso marker="ssh_server_key_api">ssh_server_key_api</seealso>.
Can be used to customize the handling of public keys.
</p>
</item>
<tag><c><![CDATA[{fd, file_descriptor()}]]></c></tag>
<item>
- <p>Allow an existing file-descriptor to be used
- (simply passed on to the transport protocol).</p></item>
- <tag><c><![CDATA[{failfun, fun(User::string(), PeerAddress::ip_address(), Reason::term()) -> _}]]></c></tag>
+ <p>Allows an existing file-descriptor to be used
+ (passed on to the transport protocol).</p></item>
+ <tag><c><![CDATA[{failfun, fun(User::string(),
+ PeerAddress::ip_address(), Reason::term()) -> _}]]></c></tag>
<item>
- <p>Provide a fun to implement your own logging when a user fails to authenticate.</p>
+ <p>Provides a fun to implement your own logging when a user fails to authenticate.</p>
</item>
- <tag><c><![CDATA[{connectfun, fun(User::string(), PeerAddress::ip_address(), Method::string()) ->_}]]></c></tag>
+ <tag><c><![CDATA[{connectfun, fun(User::string(), PeerAddress::ip_address(),
+ Method::string()) ->_}]]></c></tag>
<item>
- <p>Provide a fun to implement your own logging when a user authenticates to the server.</p>
+ <p>Provides a fun to implement your own logging when a user authenticates to the server.</p>
</item>
<tag><c><![CDATA[{disconnectfun, fun(Reason:term()) -> _}]]></c></tag>
<item>
- <p>Provide a fun to implement your own logging when a user disconnects from the server.</p>
+ <p>Provides a fun to implement your own logging when a user disconnects from the server.</p>
</item>
- </taglist>
- </desc>
+
+ <tag><c><![CDATA[{ssh_msg_debug_fun, fun(ConnectionRef::ssh_connection_ref(), AlwaysDisplay::boolean(), Msg::binary(), LanguageTag::binary()) -> _}]]></c></tag>
+ <item>
+ <p>Provide a fun to implement your own logging of the SSH message SSH_MSG_DEBUG. The last three parameters are from the message, see RFC4253, section 11.3. The <c>ConnectionRef</c> is the reference to the connection on which the message arrived. The return value from the fun is not checked.</p>
+ <p>The default behaviour is ignore the message.
+ To get a printout for each message with <c>AlwaysDisplay = true</c>, use for example <c>{ssh_msg_debug_fun, fun(_,true,M,_)-> io:format("DEBUG: ~p~n", [M]) end}</c></p>
+ </item>
+
+ </taglist>
+ </desc>
</func>
+ <func>
+ <name>default_algorithms() -> algs_list()</name>
+ <fsummary>Get a list declaring the supported algorithms</fsummary>
+ <desc>
+ <p>Returns a key-value list, where the keys are the different types of algorithms and the values are the
+ algorithms themselves. An example:</p>
+ <code>
+20> ssh:default_algorithms().
+[{kex,['diffie-hellman-group1-sha1']},
+ {public_key,['ssh-rsa','ssh-dss']},
+ {cipher,[{client2server,['aes128-ctr','aes128-cbc','3des-cbc']},
+ {server2client,['aes128-ctr','aes128-cbc','3des-cbc']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha1']}]},
+ {compression,[{client2server,[none,zlib]},
+ {server2client,[none,zlib]}]}]
+21>
+</code>
+ </desc>
+ </func>
<func>
<name>shell(Host) -> </name>
<name>shell(Host, Option) -> </name>
<name>shell(Host, Port, Option) -> _</name>
- <fsummary> </fsummary>
+ <fsummary>Starts an interactive shell over an SSH server.</fsummary>
<type>
- <v> Host = string()</v>
- <v> Port = integer()</v>
- <v> Options - see ssh:connect/3</v>
+ <v>Host = string()</v>
+ <v>Port = integer()</v>
+ <v>Options - see ssh:connect/3</v>
</type>
<desc>
- <p>Starts an interactive shell via an SSH server on the
+ <p>Starts an interactive shell over an SSH server on the
given <c>Host</c>. The function waits for user input,
- and will not return until the remote shell is ended (i.e.
+ and does not return until the remote shell is ended (that is,
exit from the shell).
</p>
</desc>
@@ -387,28 +573,29 @@
<func>
<name>start() -> </name>
<name>start(Type) -> ok | {error, Reason}</name>
- <fsummary>Starts the SSH application. </fsummary>
+ <fsummary>Starts the SSH application.</fsummary>
<type>
<v>Type = permanent | transient | temporary</v>
<v>Reason = term() </v>
</type>
<desc>
- <p>Utility function that starts crypto, public_key and the SSH
- application. Defult type is temporary.
- See also <seealso marker="kernel:application">application(3)</seealso>
- </p>
+ <p>Utility function that starts the applications <c>crypto</c>, <c>public_key</c>,
+ and <c>ssh</c>. Default type is <c>temporary</c>.
+ For more information, see the <seealso marker="kernel:application">application(3)</seealso>
+ manual page in <c>kernel</c>.</p>
</desc>
</func>
<func>
<name>stop() -> ok | {error, Reason}</name>
- <fsummary>Stops the SSH application.</fsummary>
+ <fsummary>Stops the <c>ssh</c> application.</fsummary>
<type>
<v>Reason = term()</v>
</type>
<desc>
- <p>Stops the SSH application. See also
- <seealso marker="kernel:application">application(3)</seealso></p>
+ <p>Stops the <c>ssh</c> application.
+ For more information, see the <seealso marker="kernel:application">application(3)</seealso>
+ manual page in <c>kernel</c>.</p>
</desc>
</func>
@@ -432,7 +619,7 @@
<name>stop_listener(DaemonRef) -> </name>
<name>stop_listener(Address, Port) -> ok </name>
<fsummary>Stops the listener, but leaves existing connections started
- by the listener up and running.</fsummary>
+ by the listener operational.</fsummary>
<type>
<v>DaemonRef = ssh_daemon_ref()</v>
<v>Address = ip_address()</v>
@@ -440,7 +627,7 @@
</type>
<desc>
<p>Stops the listener, but leaves existing connections started
- by the listener up and running.</p>
+ by the listener operational.</p>
</desc>
</func>
diff --git a/lib/ssh/doc/src/ssh_app.xml b/lib/ssh/doc/src/ssh_app.xml
index a1d2402790..1dfe68b17d 100644
--- a/lib/ssh/doc/src/ssh_app.xml
+++ b/lib/ssh/doc/src/ssh_app.xml
@@ -18,83 +18,103 @@
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
-
</legalnotice>
<title>SSH</title>
+ <prepared></prepared>
+ <docno></docno>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
<file>ssh_app.xml</file>
</header>
<app>SSH</app>
- <appsummary>The ssh application implements the SSH (Secure Shell) protocol and
- provides an SFTP (SSH File Transfer Protocol) client and server. </appsummary>
+ <appsummary>The ssh application implements the Secure Shell (SSH) protocol and
+ provides an SSH File Transfer Protocol (SFTP) client and server.</appsummary>
+ <description>
+ <p>The <c>ssh</c> application is an implementation of the SSH protocol in Erlang.
+ <c>ssh</c> offers API functions to write customized SSH clients and servers as well as
+ making the Erlang shell available over SSH. An SFTP client, <c>ssh_sftp</c>, and server,
+ <c>ssh_sftpd</c>, are also included.</p>
+ </description>
- <section>
+ <section>
<title>DEPENDENCIES</title>
- <p>The ssh application uses the Erlang applications public_key and
- crypto to handle public keys and encryption, hence these
- applications needs to be loaded for the ssh application to work. In
- an embedded environment that means they need to be started with
- application:start/[1,2] before the ssh application is started.
+ <p>The <c>ssh</c> application uses the applications <c>public_key</c> and
+ <c>crypto</c> to handle public keys and encryption. Hence, these
+ applications must be loaded for the <c>ssh</c> application to work. In
+ an embedded environment this means that they must be started with
+ <c>application:start/[1,2]</c> before the <c>ssh</c> application is started.
</p>
</section>
<section>
<title>CONFIGURATION</title>
- <p>The ssh application does not currently have an application
- specific configuration file as described in application(3),
- however it will by default use the following configuration files
- from openssh: known_hosts, authorized_keys, authorized_keys2,
- id_dsa and id_rsa, ssh_host_dsa_key and ssh_host_rsa_key. By
- default Erlang SSH will look for id_dsa, id_rsa, known_hosts
- and authorized_keys in ~/.ssh, and the host key files in /etc/ssh
- . These locations may be changed by the options user_dir and
- system_dir. Public key handling may also be customized by
- providing a callback module implementing the behaviors
- <seealso marker="ssh_client_key_api">ssh_client_key_api</seealso> and
- <seealso marker="ssh_server_key_api">ssh_server_key_api</seealso>.
- </p>
+ <p>The <c>ssh</c> application does not have an application-
+ specific configuration file, as described in <seealso marker="kernel:application">application(3)</seealso>.
+ However, by default it use the following configuration files
+ from OpenSSH:</p>
+ <list type="bulleted">
+ <item><c>known_hosts</c></item>
+ <item><c>authorized_keys</c></item>
+ <item><c>authorized_keys2</c></item>
+ <item><c>id_dsa</c></item>
+ <item><c>id_rsa</c></item>
+ <item><c>ssh_host_dsa_key</c></item>
+ <item><c>ssh_host_rsa_key</c></item>
+ </list>
+ <p>By default, <c>ssh</c> looks for <c>id_dsa</c>, <c>id_rsa</c>,
+ <c>known_hosts</c>, and <c>authorized_keys</c> in ~/.ssh,
+ and for the host key files in <c>/etc/ssh</c>. These locations can be changed
+ by the options <c>user_dir</c> and <c>system_dir</c>.
+ </p>
+ <p>Public key handling can also be customized through a callback module that
+ implements the behaviors
+ <seealso marker="ssh_client_key_api">ssh_client_key_api</seealso> and
+ <seealso marker="ssh_server_key_api">ssh_server_key_api</seealso>.
+ </p>
- <section>
- <title>PUBLIC KEYS</title>
- <p>
- id_dsa and id_rsa are the users private key files, note that
- the public key is part of the private key so the ssh
- application will not use the id_&lt;*>.pub files. These are
- for the users convenience when he/she needs to convey their
+ </section>
+ <section>
+ <title>Public Keys</title>
+ <p><c>id_dsa</c> and <c>id_rsa</c> are the users private key files.
+ Notice that the public key is part of the private key so the <c>ssh</c>
+ application does not use the <c>id_&lt;*>.pub</c> files. These are
+ for the user's convenience when it is needed to convey the user's
public key.
</p>
- </section>
-
- <section>
- <title>KNOW HOSTS</title>
- <p>The known_hosts file contains a list of approved servers and
- their public keys. Once a server is listed, it can be verified
+ </section>
+ <section>
+ <title>Known Hosts</title>
+ <p>The <c>known_hosts</c> file contains a list of approved servers and
+ their public keys. Once a server is listed, it can be verified
without user interaction.
</p>
- </section>
-
- <section>
- <title>AUTHORIZED KEYS</title>
- <p>The authorized key file keeps track of the user's authorized
+ </section>
+ <section>
+ <title>Authorized Keys</title>
+ <p>The <c>authorized_key</c> file keeps track of the user's authorized
public keys. The most common use of this file is to let users
- log in without entering their password which is supported by the
- Erlang SSH daemon.
+ log in without entering their password, which is supported by the
+ Erlang <c>ssh</c> daemon.
</p>
- </section>
-
- <section>
- <title>HOST KEYS</title>
- <p>Currently rsa and dsa host keys are supported and are
- expected to be found in files named ssh_host_rsa_key and
- ssh_host_dsa_key.
+ </section>
+ <section>
+ <title>Host Keys</title>
+ <p>RSA and DSA host keys are supported and are
+ expected to be found in files named <c>ssh_host_rsa_key</c> and
+ <c>ssh_host_dsa_key</c>.
</p>
- </section>
+ </section>
+ <section>
+ <title>ERROR LOGGER AND EVENT HANDLERS</title>
+ <p>The <c>ssh</c> application uses the default <seealso marker="kernel:error_logger">OTP error logger</seealso> to log unexpected errors or print information about special events.</p>
</section>
<section>
<title>SEE ALSO</title>
- <p>application(3)</p>
+ <p><seealso marker="kernel:application">application(3)</seealso></p>
</section>
</appref>
diff --git a/lib/ssh/doc/src/ssh_channel.xml b/lib/ssh/doc/src/ssh_channel.xml
index a52a6a115e..2fdecf9072 100644
--- a/lib/ssh/doc/src/ssh_channel.xml
+++ b/lib/ssh/doc/src/ssh_channel.xml
@@ -23,69 +23,84 @@
The Initial Developer of the Original Code is Ericsson AB.
</legalnotice>
<title>ssh_channel</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
</header>
<module>ssh_channel</module>
<modulesummary>-behaviour(ssh_channel).
</modulesummary>
<description>
<p>SSH services (clients and servers) are implemented as channels
- that are multiplexed over an SSH connection and communicates via
+ that are multiplexed over an SSH connection and communicates over
the <url href="http://www.ietf.org/rfc/rfc4254.txt"> SSH
Connection Protocol</url>. This module provides a callback API
- that takes care of generic channel aspects such as flow control
- and close messages and lets the callback functions take care of
+ that takes care of generic channel aspects, such as flow control
+ and close messages. It lets the callback functions take care of
the service (application) specific parts. This behavior also ensures
that the channel process honors the principal of an OTP-process so
that it can be part of a supervisor tree. This is a requirement of
channel processes implementing a subsystem that will be added to
- the SSH applications supervisor tree.
+ the <c>ssh</c> applications supervisor tree.
</p>
- <note> <p>When implementing a SSH subsystem use the
- <c>-behaviour(ssh_daemon_channel).</c> instead of <c>-behaviour(ssh_channel).</c>
- as the only relevant callback functions for subsystems are
- init/1, handle_ssh_msg/2, handle_msg/2 and terminate/2, so the ssh_daemon_channel
- behaviour is limited version of the ssh_channel behaviour.
- </p> </note>
+ <note><p>When implementing an <c>ssh</c> subsystem, use
+ <c>-behaviour(ssh_daemon_channel)</c> instead of <c>-behaviour(ssh_channel)</c>.
+ The reason is that the only relevant callback functions for subsystems are
+ <c>init/1</c>, <c>handle_ssh_msg/2</c>, <c>handle_msg/2</c>, and <c>terminate/2</c>.
+ So, the <c>ssh_daemon_channel</c> behaviour is a limited version of the
+ <c>ssh_channel</c> behaviour.
+ </p></note>
</description>
<section>
- <title>DATA TYPES </title>
+ <title>DATA TYPES</title>
- <p>Type definitions that are used more than once in this module
- and/or abstractions to indicate the intended use of the data
- type:</p>
+ <p>Type definitions that are used more than once in this module,
+ or abstractions to indicate the intended use of the data
+ type, or both:</p>
- <p><c>boolean() = true | false </c></p>
- <p><c>string() = list of ASCII characters</c></p>
- <p><c>timeout() = infinity | integer() - in milliseconds.</c></p>
- <p><c>ssh_connection_ref() - opaque to the user returned by
- ssh:connect/3 or sent to an SSH channel process</c></p>
- <p><c>ssh_channel_id() = integer() </c></p>
- <p><c>ssh_data_type_code() = 1 ("stderr") | 0 ("normal") are
- currently valid values see <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254 </url> section 5.2.</c></p>
+ <taglist>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
+ <tag><c>string() =</c></tag>
+ <item><p>list of ASCII characters</p></item>
+ <tag><c>timeout() =</c></tag>
+ <item><p><c>infinity | integer()</c> in milliseconds</p></item>
+ <tag><c>ssh_connection_ref() =</c></tag>
+ <item><p>opaque() -as returned by
+ <c>ssh:connect/3</c> or sent to an SSH channel process</p></item>
+ <tag><c>ssh_channel_id() =</c></tag>
+ <item><p><c>integer()</c></p></item>
+ <tag><c>ssh_data_type_code() =</c></tag>
+ <item><p><c>1</c> ("stderr") | <c>0</c> ("normal") are
+ the valid values,
+ see <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url>
+ Section 5.2</p></item>
+ </taglist>
</section>
<funcs>
<func>
<name>call(ChannelRef, Msg) -></name>
<name>call(ChannelRef, Msg, Timeout) -> Reply | {error, Reason}</name>
- <fsummary> Makes a synchronous call to a channel.</fsummary>
+ <fsummary>Makes a synchronous call to a channel.</fsummary>
<type>
<v>ChannelRef = pid() </v>
- <d>As returned by start_link/4 </d>
- <v>Msg = term() </v>
- <v>Timeout = timeout() </v>
- <v>Reply = term() </v>
- <v>Reason = closed | timeout </v>
+ <d>As returned by <seealso marker = "#start_link-4">ssh_channel:start_link/4</seealso></d>
+ <v>Msg = term()</v>
+ <v>Timeout = timeout()</v>
+ <v>Reply = term()</v>
+ <v>Reason = closed | timeout</v>
</type>
<desc>
<p>Makes a synchronous call to the channel process by sending
- a message and waiting until a reply arrives or a timeout
- occurs. The channel will call <seealso marker =
+ a message and waiting until a reply arrives, or a time-out
+ occurs. The channel calls <seealso marker =
"#Module:handle_call-3">Module:handle_call/3</seealso>
- to handle the message. If the channel process does not exist
+ to handle the message. If the channel process does not exist,
<c>{error, closed}</c> is returned.
</p>
</desc>
@@ -96,14 +111,14 @@
<fsummary>Sends an asynchronous message to the channel
ChannelRef and returns ok.</fsummary>
<type>
- <v>ChannelRef = pid() </v>
- <d>As returned by start_link/4 </d>
- <v>Msg = term() </v>
+ <v>ChannelRef = pid()</v>
+ <d>As returned by <seealso marker = "#start_link-4">ssh_channel:start_link/4</seealso></d>
+ <v>Msg = term()</v>
</type>
<desc>
<p>Sends an asynchronous message to the channel process and
returns ok immediately, ignoring if the destination node or
- channel process does not exist. The channel will call
+ channel process does not exist. The channel calls
<seealso marker = "#Module:handle_cast-2">Module:handle_cast/2</seealso>
to handle the message.
</p>
@@ -112,31 +127,32 @@
<func>
<name>enter_loop(State) -> _ </name>
- <fsummary> Makes an existing process an ssh_channel process. </fsummary>
+ <fsummary>Makes an existing process an ssh_channel process.</fsummary>
<type>
- <v> State = term() - as returned by <seealso marker = "#init-1">ssh_channel:init/1</seealso></v>
+ <v>State = term()</v>
+ <d>as returned by <seealso marker = "#init-1">ssh_channel:init/1</seealso></d>
</type>
<desc>
- <p> Makes an existing process an <c>ssh_channel</c>
- process. Does not return, instead the calling process will
- enter the <c>ssh_channel</c> process receive loop and become an
- <c>ssh_channel process.</c> The process must have been started using
- one of the start functions in proc_lib, see <seealso
- marker="stdlib:proc_lib">proc_lib(3)</seealso>. The
- user is responsible for any initialization of the process
- and needs to call <seealso marker = "#init-1">ssh_channel:init/1</seealso>
+ <p>Makes an existing process an <c>ssh_channel</c>
+ process. Does not return, instead the calling process
+ enters the <c>ssh_channel</c> process receive loop and become an
+ <c>ssh_channel process</c>. The process must have been started using
+ one of the start functions in <c>proc_lib</c>, see the <seealso
+ marker="stdlib:proc_lib">proc_lib(3)</seealso> manual page in <c>stdlib</c>.
+ The user is responsible for any initialization of the process
+ and must call <seealso marker = "#init-1">ssh_channel:init/1</seealso>.
</p>
</desc>
</func>
<func>
<name>init(Options) -> {ok, State} | {ok, State, Timeout} | {stop, Reason} </name>
- <fsummary> Initiates a ssh_channel process.</fsummary>
+ <fsummary>Initiates an <c>ssh_channel</c> process.</fsummary>
<type>
<v>Options = [{Option, Value}]</v>
<v>State = term()</v>
- <v>Timeout = timeout() </v>
- <v>Reason = term() </v>
+ <v>Timeout = timeout()</v>
+ <v>Reason = term()</v>
</type>
<desc>
<p>
@@ -144,48 +160,47 @@
</p>
<taglist>
<tag><c><![CDATA[{channel_cb, atom()}]]></c></tag>
- <item>The module that implements the channel behaviour.</item>
+ <item><p>The module that implements the channel behaviour.</p></item>
<tag><c><![CDATA[{init_args(), list()}]]></c></tag>
- <item> The list of arguments to the callback module's
- init function.</item>
+ <item><p>The list of arguments to the <c>init</c> function of the callback module.</p></item>
<tag><c><![CDATA[{cm, connection_ref()}]]></c></tag>
- <item> Reference to the ssh connection as returned by <seealso
- marker="ssh#connect-3">ssh:connect/3</seealso></item>
+ <item><p>Reference to the <c>ssh</c> connection as returned by <seealso
+ marker="ssh#connect-3">ssh:connect/3</seealso></p></item>
<tag><c><![CDATA[{channel_id, channel_id()}]]></c></tag>
- <item> Id of the SSH channel.</item>
+ <item><p>Id of the <c>ssh</c> channel.</p></item>
</taglist>
<note><p>This function is normally not called by the
- user. The user only needs to call if for some reason the
+ user. The user only needs to call if the
channel process needs to be started with help of
<c>proc_lib</c> instead of calling
<c>ssh_channel:start/4</c> or
- <c>ssh_channel:start_link/4</c> </p>
+ <c>ssh_channel:start_link/4</c>.</p>
</note>
</desc>
</func>
<func>
<name>reply(Client, Reply) -> _</name>
- <fsummary>Send a reply to a client.</fsummary>
+ <fsummary>Sends a reply to a client.</fsummary>
<type>
- <v>Client - opaque to the user, see explanation below</v>
+ <v>Client = opaque()</v>
<v>Reply = term()</v>
</type>
<desc>
- <p>This function can be used by a channel to explicitly send a
+ <p>This function can be used by a channel to send a
reply to a client that called <c>call/[2,3]</c> when the reply
cannot be defined in the return value of
<seealso marker ="#Module:handle_call-3">Module:handle_call/3</seealso>.</p>
<p><c>Client</c> must be the <c>From</c> argument provided to
the callback function <c>handle_call/3</c>.
<c>Reply</c> is an arbitrary term,
- which will be given back to the client as the return value of
- <seealso marker="#call-2">ssh_channel:call/[2,3].</seealso>></p>
+ which is given back to the client as the return value of
+ <seealso marker="#call-2">ssh_channel:call/[2,3].</seealso></p>
</desc>
</func>
@@ -193,24 +208,25 @@
<name>start(SshConnection, ChannelId, ChannelCb, CbInitArgs) -> </name>
<name>start_link(SshConnection, ChannelId, ChannelCb, CbInitArgs) ->
{ok, ChannelRef} | {error, Reason}</name>
- <fsummary> Starts a processes that handles a SSH channel. </fsummary>
+ <fsummary>Starts a process that handles an SSH channel.</fsummary>
<type>
<v>SshConnection = ssh_connection_ref()</v>
- <v>ChannelId = ssh_channel_id() </v>
- <d> As returned by cannot be defined in the return value of
- <seealso marker ="ssh_connection#session_channel/2">ssh_connection:session_channel/[2,4]</seealso></d>
+ <v>ChannelId = ssh_channel_id()</v>
+ <d>As returned by
+ <seealso marker ="ssh_connection#session_channel/2">
+ ssh_connection:session_channel/[2,4]</seealso>.</d>
<v>ChannelCb = atom()</v>
- <d> The name of the module implementing the service specific parts
+ <d>Name of the module implementing the service-specific parts
of the channel.</d>
<v>CbInitArgs = [term()]</v>
- <d>Argument list for the init function in the callback module. </d>
+ <d>Argument list for the <c>init</c> function in the callback module.</d>
<v>ChannelRef = pid()</v>
</type>
<desc>
- <p>Starts a processes that handles an SSH channel. It will be
- called internally by the SSH daemon or explicitly by the SSH
- client implementations. The behavior will set the
- <c>trap_exit</c> flag to true.
+ <p>Starts a process that handles an SSH channel. It is
+ called internally, by the <c>ssh</c> daemon, or explicitly by the <c>ssh</c>
+ client implementations. The behavior sets the
+ <c>trap_exit</c> flag to <c>true</c>.
</p>
</desc>
</func>
@@ -219,19 +235,19 @@
<section>
<marker id="cb_timeouts"></marker>
- <title> CALLBACK TIMEOUTS</title>
+ <title>CALLBACK TIME-OUTS</title>
- <p>The timeout values that may be returned by the callback functions
- has the same semantics as in a <seealso marker="stdlib:gen_server">gen_server</seealso>
- If the timeout occurs <seealso marker="#Module:handle_msg-2">handle_msg/2</seealso>
- will be called as <c>handle_msg(timeout, State). </c></p>
+ <p>The time-out values that can be returned by the callback functions
+ have the same semantics as in a <seealso marker="stdlib:gen_server">gen_server</seealso>.
+ If the time-out occurs, <seealso marker="#Module:handle_msg-2">handle_msg/2</seealso>
+ is called as <c>handle_msg(timeout, State)</c>.</p>
</section>
<funcs>
<func>
<name>Module:code_change(OldVsn, State, Extra) -> {ok,
NewState}</name>
- <fsummary> Converts process state when code is changed.</fsummary>
+ <fsummary>Converts process state when code is changed.</fsummary>
<type>
<v>OldVsn = term()</v>
<d>In the case of an upgrade, <c>OldVsn</c> is <c>Vsn</c>, and
@@ -241,31 +257,31 @@
<c>Module</c>. If no such attribute is defined, the version is
the checksum of the BEAM file.</d>
<v>State = term()</v>
- <d>The internal state of the channel.</d>
+ <d>Internal state of the channel.</d>
<v>Extra = term()</v>
- <d>Passed as-is from the <c>{advanced,Extra}</c>
+ <d>Passed "as-is" from the <c>{advanced,Extra}</c>
part of the update instruction.</d>
</type>
<desc>
- <p> Converts process state when code is changed.</p>
+ <p>Converts process state when code is changed.</p>
- <p>This function is called by a client side channel when it
- should update its internal state during a release
- upgrade/downgrade, i.e. when the instruction
- <c>{update,Module,Change,...}</c> where
- <c>Change={advanced,Extra}</c> is given in the <c>appup</c>
- file. See <seealso marker="doc/design_principles:release_handling#instr">OTP
- Design Principles</seealso> for more information.
+ <p>This function is called by a client-side channel when it
+ is to update its internal state during a release
+ upgrade or downgrade, that is, when the instruction
+ <c>{update,Module,Change,...}</c>, where
+ <c>Change={advanced,Extra}</c>, is given in the <c>appup</c>
+ file. For more information, refer to Section 9.11.6
+ Release Handling Instructions in the
+ <seealso marker="doc/design_principles:release_handling#instr">System Documentation</seealso>.
</p>
<note><p>Soft upgrade according to the OTP release concept
is not straight forward for the server side, as subsystem
- channel processes are spawned by the SSH application and
- hence added to its supervisor tree. It could be possible to
- upgrade the subsystem channels, when upgrading the user
- application, if the callback functions can handle two
- versions of the state, but this function can not be used in
- the normal way.</p>
+ channel processes are spawned by the <c>ssh</c> application and
+ hence added to its supervisor tree. The subsystem channels can
+ be upgraded when upgrading the user application, if the callback
+ functions can handle two versions of the state, but this function
+ cannot be used in the normal way.</p>
</note>
</desc>
@@ -274,36 +290,38 @@
<func>
<name>Module:init(Args) -> {ok, State} | {ok, State, timeout()} |
{stop, Reason}</name>
- <fsummary> Makes necessary initializations and returns the
+ <fsummary>Makes necessary initializations and returns the
initial channel state if the initializations succeed.</fsummary>
<type>
- <v> Args = term() </v>
- <d> Last argument to ssh_channel:start_link/4.</d>
- <v> State = term() </v>
- <v> Reason = term() </v>
+ <v>Args = term()</v>
+ <d>Last argument to <c>ssh_channel:start_link/4</c>.</d>
+ <v>State = term()</v>
+ <v>Reason = term()</v>
</type>
<desc>
- <p> Makes necessary initializations and returns the initial channel
+ <p>Makes necessary initializations and returns the initial channel
state if the initializations succeed.
</p>
- <p>For more detailed information on timeouts see the section
- <seealso marker="#cb_timeouts">CALLBACK TIMEOUTS</seealso>. </p>
+ <p>For more detailed information on time-outs, see Section
+ <seealso marker="#cb_timeouts">CALLBACK TIME-OUTS</seealso>. </p>
</desc>
</func>
<func>
<name>Module:handle_call(Msg, From, State) -> Result</name>
- <fsummary> Handles messages sent by calling
- <c>ssh_channel:call/[2,3]</c></fsummary>
+ <fsummary>Handles messages sent by calling
+ <c>ssh_channel:call/[2,3]</c>.</fsummary>
<type>
<v>Msg = term()</v>
- <v>From = opaque to the user should be used as argument to
- ssh_channel:reply/2</v>
+ <v>From = opaque()</v>
+ <d>Is to be used as argument to
+ <seealso marker="#reply-2">ssh_channel:reply/2</seealso></d>
<v>State = term()</v>
<v>Result = {reply, Reply, NewState} | {reply, Reply, NewState, timeout()}
| {noreply, NewState} | {noreply , NewState, timeout()}
| {stop, Reason, Reply, NewState} | {stop, Reason, NewState} </v>
- <v>Reply = term() - will be the return value of ssh_channel:call/[2,3]</v>
+ <v>Reply = term()</v>
+ <d>Will be the return value of <seealso marker="#call-2">ssh_channel:call/[2,3]</seealso></d>
<v>NewState = term()</v>
<v>Reason = term()</v>
</type>
@@ -311,15 +329,15 @@
<p>Handles messages sent by calling
<seealso marker="#call-2">ssh_channel:call/[2,3]</seealso>
</p>
- <p>For more detailed information on timeouts see the section
- <seealso marker="#cb_timeouts">CALLBACK TIMEOUTS</seealso>. </p>
+ <p>For more detailed information on time-outs,, see Section
+ <seealso marker="#cb_timeouts">CALLBACK TIME-OUTS</seealso>.</p>
</desc>
</func>
<func>
<name>Module:handle_cast(Msg, State) -> Result</name>
- <fsummary> Handles messages sent by calling
- <c>ssh_channel:cact/2</c></fsummary>
+ <fsummary>Handles messages sent by calling
+ <c>ssh_channel:cact/2</c>.</fsummary>
<type>
<v>Msg = term()</v>
<v>State = term()</v>
@@ -329,11 +347,11 @@
<v>Reason = term()</v>
</type>
<desc>
- <p> Handles messages sent by calling
- <c>ssh_channel:cast/2</c>
+ <p>Handles messages sent by calling
+ <c>ssh_channel:cast/2</c>.
</p>
- <p>For more detailed information on timeouts see the section
- <seealso marker="#cb_timeouts">CALLBACK TIMEOUTS</seealso>. </p>
+ <p>For more detailed information on time-outs, see Section
+ <seealso marker="#cb_timeouts">CALLBACK TIME-OUTS</seealso>.</p>
</desc>
</func>
@@ -341,33 +359,33 @@
<name>Module:handle_msg(Msg, State) -> {ok, State} |
{stop, ChannelId, State}</name>
- <fsummary> Handle other messages than SSH connection protocol,
- call or cast messages sent to the channel.</fsummary>
+ <fsummary>Handles other messages than SSH connection protocol,
+ call, or cast messages sent to the channel.</fsummary>
<type>
<v>Msg = timeout | term()</v>
<v>ChannelId = ssh_channel_id()</v>
<v>State = term() </v>
</type>
<desc>
- <p>Handle other messages than ssh connection protocol, call or
+ <p>Handles other messages than SSH Connection Protocol, call, or
cast messages sent to the channel.
</p>
- <p> Possible erlang 'EXIT'-messages should be handled by this
- function and all channels should handle the following message.</p>
+ <p>Possible Erlang 'EXIT' messages is to be handled by this
+ function and all channels are to handle the following message.</p>
<taglist>
<tag><c><![CDATA[{ssh_channel_up, ssh_channel_id(),
ssh_connection_ref()}]]></c></tag>
- <item>This is the first messages that will be received by
- the channel, it is sent just before the <seealso
+ <item><p>This is the first message that the channel receives.
+ It is sent just before the <seealso
marker="#init-1">ssh_channel:init/1</seealso> function
- returns successfully. This is especially useful if the
+ returns successfully. This is especially useful if the
server wants to send a message to the client without first
receiving a message from it. If the message is not
- useful for your particular scenario just ignore it by
- immediately returning {ok, State}.
- </item>
+ useful for your particular scenario, ignore it by
+ immediately returning <c>{ok, State}</c>.
+ </p></item>
</taglist>
</desc>
</func>
@@ -375,42 +393,44 @@
<func>
<name>Module:handle_ssh_msg(Msg, State) -> {ok, State} | {stop,
ChannelId, State}</name>
- <fsummary> Handles ssh connection protocol messages. </fsummary>
+ <fsummary>Handles <c>ssh</c> connection protocol messages.</fsummary>
<type>
- <v>Msg = <seealso marker="ssh_connection"> ssh_connection:event() </seealso> </v>
+ <v>Msg = ssh_connection:event()</v>
<v>ChannelId = ssh_channel_id()</v>
<v>State = term()</v>
</type>
<desc>
- <p> Handles SSH connection protocol messages that may need
- service specific attention.
+ <p>Handles SSH Connection Protocol messages that may need
+ service-specific attention. For details,
+ see <seealso marker="ssh_connection"> ssh_connection:event()</seealso>.
</p>
- <p> The following message is completely taken care of by the
- SSH channel behavior</p>
+ <p>The following message is taken care of by the
+ <c>ssh_channel</c> behavior.</p>
<taglist>
<tag><c><![CDATA[{closed, ssh_channel_id()}]]></c></tag>
- <item> The channel behavior will send a close message to the
- other side if such a message has not already been sent and
- then terminate the channel with reason normal.</item>
+ <item><p>The channel behavior sends a close message to the
+ other side, if such a message has not already been sent.
+ Then it terminates the channel with reason <c>normal</c>.</p></item>
</taglist>
</desc>
</func>
<func>
<name>Module:terminate(Reason, State) -> _</name>
- <fsummary> </fsummary>
+ <fsummary>Does cleaning up before channel process termination.
+ </fsummary>
<type>
<v>Reason = term()</v>
<v>State = term()</v>
</type>
<desc>
<p>This function is called by a channel process when it is
- about to terminate. Before this function is called <seealso
+ about to terminate. Before this function is called, <seealso
marker="ssh_connection#close-2"> ssh_connection:close/2
- </seealso> will be called if it has not been called earlier.
- This function should do any necessary cleaning
+ </seealso> is called, if it has not been called earlier.
+ This function does any necessary cleaning
up. When it returns, the channel process terminates with
reason <c>Reason</c>. The return value is ignored.
</p>
diff --git a/lib/ssh/doc/src/ssh_client_key_api.xml b/lib/ssh/doc/src/ssh_client_key_api.xml
index f3d05a8980..9a892d71fd 100644
--- a/lib/ssh/doc/src/ssh_client_key_api.xml
+++ b/lib/ssh/doc/src/ssh_client_key_api.xml
@@ -23,102 +23,112 @@
The Initial Developer of the Original Code is Ericsson AB.
</legalnotice>
<title>ssh_client_key_api</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
</header>
<module>ssh_client_key_api</module>
<modulesummary>
-behaviour(ssh_client_key_api).
</modulesummary>
<description>
- <p> Behavior describing the API for an SSH client's public key handling.
- By implementing the callbacks defined.
- in this behavior it is possible to customize the SSH client's public key
- handling. By default the SSH application implements this behavior
- with help of the standard openssh files, see <seealso marker="SSH_app"> ssh(6)</seealso>. </p>
+ <p>Behavior describing the API for public key handling of an SSH client. By implementing
+ the callbacks defined in this behavior, the public key handling of an SSH client can
+ be customized. By default the <c>ssh</c> application implements this behavior
+ with help of the standard OpenSSH files,
+ see the <seealso marker="SSH_app"> ssh(6)</seealso> application manual.</p>
</description>
<section>
- <title>DATA TYPES </title>
+ <title>DATA TYPES</title>
- <p>Type definitions that are used more than once in this module
- and/or abstractions to indicate the intended use of the data
- type. For more details on public key data types
- see the <seealso marker="public_key:public_key_records"> public_key user's guide.</seealso>
+ <p>Type definitions that are used more than once in this module,
+ or abstractions to indicate the intended use of the data
+ type, or both. For more details on public key data types,
+ refer to Section 2 Public Key Records in the
+ <seealso marker="public_key:public_key_records"> public_key user's guide:</seealso>
</p>
-
- <p> boolean() = true | false</p>
- <p> string() = [byte()] </p>
- <p> public_key() = #'RSAPublicKey'{}| {integer(), #'Dss-Parms'{}}| term()</p>
- <p> private_key() = #'RSAPrivateKey'{} | #'DSAPrivateKey'{} | term()</p>
- <p> public_key_algorithm() = 'ssh-rsa'| 'ssh-dss' | atom()</p>
-
+ <taglist>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
+ <tag><c>string() =</c></tag>
+ <item><p><c>[byte()]</c></p></item>
+ <tag><c>public_key() =</c></tag>
+ <item><p><c>#'RSAPublicKey'{}| {integer(), #'Dss-Parms'{}}| term()</c></p></item>
+ <tag><c>private_key() =</c></tag>
+ <item><p><c>#'RSAPrivateKey'{} | #'DSAPrivateKey'{} | term()</c></p></item>
+ <tag><c>public_key_algorithm() =</c></tag>
+ <item><p><c>'ssh-rsa'| 'ssh-dss' | atom()</c></p></item>
+ </taglist>
</section>
<funcs>
<func>
<name>Module:add_host_key(HostNames, Key, ConnectOptions) -> ok | {error, Reason}</name>
- <fsummary>Adds a host key to the set of trusted host keys</fsummary>
+ <fsummary>Adds a host key to the set of trusted host keys.</fsummary>
<type>
<v>HostNames = string()</v>
- <d>Description of the host that owns the <c>PublicKey</c></d>
+ <d>Description of the host that owns the <c>PublicKey</c>.</d>
- <v>Key = public_key() </v>
- <d> Normally an RSA or DSA public key but handling of other public keys can be added</d>
+ <v>Key = public_key()</v>
+ <d>Normally an RSA or DSA public key, but handling of other public keys can be added.</d>
- <v>ConnectOptions = proplists:proplist() </v>
- <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso></d>
- <v>Reason = term() </v>
+ <v>ConnectOptions = proplists:proplist()</v>
+ <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso></d>
+ <v>Reason = term().</v>
</type>
<desc>
- <p> Adds a host key to the set of trusted host keys</p>
+ <p>Adds a host key to the set of trusted host keys.</p>
</desc>
</func>
<func>
<name>Module:is_host_key(Key, Host, Algorithm, ConnectOptions) -> Result</name>
- <fsummary>Checks if a host key is trusted</fsummary>
+ <fsummary>Checks if a host key is trusted.</fsummary>
<type>
<v>Key = public_key() </v>
- <d> Normally an RSA or DSA public key but handling of other public keys can be added</d>
+ <d>Normally an RSA or DSA public key, but handling of other public keys can be added.</d>
<v>Host = string()</v>
- <d>Description of the host</d>
+ <d>Description of the host.</d>
<v>Algorithm = public_key_algorithm()</v>
- <d> Host key algorithm. Should support 'ssh-rsa'| 'ssh-dss' but additional algorithms
+ <d>Host key algorithm. Is to support <c>'ssh-rsa'| 'ssh-dss'</c>, but more algorithms
can be handled.</d>
- <v> ConnectOptions = proplists:proplist() </v>
- <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso></d>
+ <v>ConnectOptions = proplists:proplist() </v>
+ <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso>.</d>
- <v> Result = boolean()</v>
+ <v>Result = boolean()</v>
</type>
<desc>
- <p>Checks if a host key is trusted</p>
+ <p>Checks if a host key is trusted.</p>
</desc>
</func>
<func>
<name>Module:user_key(Algorithm, ConnectOptions) ->
{ok, PrivateKey} | {error, Reason}</name>
- <fsummary>Fetches the users "public key" matching the <c>Algorithm</c>.</fsummary>
+ <fsummary>Fetches the users <em>public key</em> matching the <c>Algorithm</c>.</fsummary>
<type>
<v>Algorithm = public_key_algorithm()</v>
- <d> Host key algorithm. Should support 'ssh-rsa'| 'ssh-dss' but additional algorithms
+ <d>Host key algorithm. Is to support <c>'ssh-rsa'| 'ssh-dss'</c> but more algorithms
can be handled.</d>
- <v> ConnectOptions = proplists:proplist() </v>
- <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso></d>
+ <v>ConnectOptions = proplists:proplist()</v>
+ <d>Options provided to <seealso marker="ssh#connect-3">ssh:connect/[3,4]</seealso></d>
- <v> PrivateKey = private_key()</v>
- <d> The private key of the user matching the <c>Algorithm</c></d>
+ <v>PrivateKey = private_key()</v>
+ <d>Private key of the user matching the <c>Algorithm</c>.</d>
- <v>Reason = term() </v>
+ <v>Reason = term()</v>
</type>
<desc>
- <p>Fetches the users "public key" matching the <c>Algorithm</c>.
- <note><p>The private key contains the public key</p></note>
- </p>
+ <p>Fetches the users <em>public key</em> matching the <c>Algorithm</c>.</p>
+ <note><p>The private key contains the public key.</p></note>
+
</desc>
</func>
diff --git a/lib/ssh/doc/src/ssh_connection.xml b/lib/ssh/doc/src/ssh_connection.xml
index ff72cf7ee0..5422633dc3 100644
--- a/lib/ssh/doc/src/ssh_connection.xml
+++ b/lib/ssh/doc/src/ssh_connection.xml
@@ -24,155 +24,174 @@
</legalnotice>
<title>ssh_connection</title>
+ <prepared></prepared>
+ <docno></docno>
<date></date>
+ <rev></rev>
</header>
<module>ssh_connection</module>
- <modulesummary>This module provides API functions to send <url href="http://www.ietf.org/rfc/rfc4254.txt"> SSH Connection Protocol </url>
+ <modulesummary>This module provides API functions to send
+ <url href="http://www.ietf.org/rfc/rfc4254.txt"> SSH Connection Protocol </url>
events to the other side of an SSH channel.
</modulesummary>
<description>
- <p>The SSH Connection Protocol is used by clients and servers
- (i.e. SSH channels) to communicate over the SSH connection. The
- API functions in this module sends SSH Connection Protocol events
- that are received as messages by the remote channel.
- In the case that the receiving channel is an Erlang process the
- message will be on the following format
- <c><![CDATA[{ssh_cm, ssh_connection_ref(), ssh_event_msg()}]]></c>. If the <seealso
- marker="ssh_channel">ssh_channel</seealso> behavior is used to
- implement the channel process these will be handled by
- <seealso
- marker="ssh_channel#Module:handle_ssh_msg-2">handle_ssh_msg/2 </seealso>.</p>
+ <p>The SSH Connection Protocol is used by clients and servers,
+ that is, SSH channels, to communicate over the SSH connection. The
+ API functions in this module send SSH Connection Protocol events,
+ which are received as messages by the remote channel.
+ If the receiving channel is an Erlang process, the
+ messages have the format
+ <c><![CDATA[{ssh_cm, ssh_connection_ref(), ssh_event_msg()}]]></c>.
+ If the <seealso marker="ssh_channel">ssh_channel</seealso> behavior is used to
+ implement the channel process, these messages are handled by
+ <seealso marker="ssh_channel#Module:handle_ssh_msg-2">handle_ssh_msg/2</seealso>.</p>
</description>
<section>
- <title>DATA TYPES </title>
-
- <p>Type definitions that are used more than once in this module and/or
- abstractions to indicate the intended use of the data type:</p>
-
- <p><c>boolean() = true | false </c></p>
- <p><c>string() = list of ASCII characters</c></p>
- <p><c>timeout() = infinity | integer() - in milliseconds.</c></p>
- <p><c>ssh_connection_ref() - opaque to the user returned by
- ssh:connect/3 or sent to an SSH channel processes</c></p>
- <p><c>ssh_channel_id() = integer() </c></p>
- <p><c>ssh_data_type_code() = 1 ("stderr") | 0 ("normal") are
- currently valid values see</c> <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254 </url> section 5.2.</p>
- <p><c>ssh_request_status() = success | failure</c></p>
- <p><c>event() = {ssh_cm, ssh_connection_ref(), ssh_event_msg()} </c></p>
- <p><c>ssh_event_msg() = data_events() | status_events() | terminal_events() </c></p>
+ <title>DATA TYPES</title>
+
+ <p>Type definitions that are used more than once in this module,
+ or abstractions to indicate the intended use of the data
+ type, or both:</p>
+
+ <taglist>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false </c></p></item>
+ <tag><c>string() =</c></tag>
+ <item><p>list of ASCII characters</p></item>
+ <tag><c>timeout() =</c></tag>
+ <item><p><c>infinity | integer()</c> in milliseconds</p></item>
+ <tag><c>ssh_connection_ref() =</c></tag>
+ <item><p>opaque() -as returned by
+ <c>ssh:connect/3</c> or sent to an SSH channel processes</p></item>
+ <tag><c>ssh_channel_id() =</c></tag>
+ <item><p><c>integer()</c></p></item>
+ <tag><c>ssh_data_type_code() =</c></tag>
+ <item><p><c>1</c> ("stderr") | <c>0</c> ("normal") are
+ valid values, see
+ <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url> Section 5.2.</p></item>
+ <tag><c>ssh_request_status() =</c></tag>
+ <item><p> <c>success | failure</c></p></item>
+ <tag><c>event() =</c></tag>
+ <item><p><c>{ssh_cm, ssh_connection_ref(), ssh_event_msg()}</c></p></item>
+ <tag><c>ssh_event_msg() =</c></tag>
+ <item><p><c>data_events() | status_events() | terminal_events()</c></p></item>
+ <tag><c>reason() =</c></tag>
+ <item><p><c>timeout | closed</c></p></item>
+ </taglist>
<taglist>
- <tag><b>data_events()</b></tag>
+ <tag><em>data_events()</em></tag>
<item>
<taglist>
<tag><c><![CDATA[{data, ssh_channel_id(), ssh_data_type_code(), binary() = Data}]]></c></tag>
- <item> Data has arrived on the channel. This event is sent as
- result of calling <seealso marker="ssh_connection#send-3"> ssh_connection:send/[3,4,5] </seealso></item>
+ <item><p>Data has arrived on the channel. This event is sent as a
+ result of calling <seealso marker="ssh_connection#send-3">
+ ssh_connection:send/[3,4,5]</seealso>.</p></item>
<tag><c><![CDATA[{eof, ssh_channel_id()}]]></c></tag>
- <item>Indicates that the other side will not send any more
- data. This event is sent as result of calling <seealso
- marker="ssh_connection#send_eof-2"> ssh_connection:send_eof/2</seealso>
- </item>
+ <item><p>Indicates that the other side sends no more data.
+ This event is sent as a result of calling <seealso
+ marker="ssh_connection#send_eof-2"> ssh_connection:send_eof/2</seealso>.
+ </p></item>
</taglist>
</item>
- <tag><b>status_events()</b></tag>
+ <tag><em>status_events()</em></tag>
<item>
<taglist>
<tag><c><![CDATA[{signal, ssh_channel_id(), ssh_signal()}]]></c></tag>
- <item>A signal can be delivered to the remote process/service
- using the following message. Some systems will not support
- signals, in which case they should ignore this message. There is
- currently no funtion to generate this event as the signals
- refered to are on OS-level and not something generated by an
- Erlang program.</item>
+ <item><p>A signal can be delivered to the remote process/service
+ using the following message. Some systems do not support
+ signals, in which case they are to ignore this message. There is
+ currently no function to generate this event as the signals
+ referred to are on OS-level and not something generated by an
+ Erlang program.</p></item>
<tag><c><![CDATA[{exit_signal, ssh_channel_id(), string() = ExitSignal, string() = ErrorMsg,
string() = LanguageString}]]></c></tag>
- <item>A remote execution may terminate violently due to a signal
- then this message may be received. For details on valid string
- values see <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url> section 6.10. Special case of the signals
- mentioned above.</item>
+ <item><p>A remote execution can terminate violently because of a signal.
+ Then this message can be received. For details on valid string
+ values, see <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url>
+ Section 6.10, which shows a special case of these signals.</p></item>
<tag><c><![CDATA[{exit_status, ssh_channel_id(), integer() = ExitStatus}]]></c></tag>
- <item> When the command running at the other end terminates, the
+ <item><p>When the command running at the other end terminates, the
following message can be sent to return the exit status of the
- command. A zero 'exit_status' usually means that the command
- terminated successfully. This event is sent as result of calling
+ command. A zero <c>exit_status</c> usually means that the command
+ terminated successfully. This event is sent as a result of calling
<seealso marker="ssh_connection#exit_status-3">
- ssh_connection:exit_status/3</seealso></item>
+ ssh_connection:exit_status/3</seealso>.</p></item>
<tag><c><![CDATA[{closed, ssh_channel_id()}]]></c></tag>
- <item> This event is sent as result of calling
- <seealso marker="ssh_connection#close-2">ssh_connection:close/2</seealso> Both the handling of this
- event and sending of it will be taken care of by the
- <seealso marker="ssh_channel">ssh_channel</seealso> behavior.</item>
+ <item><p>This event is sent as a result of calling
+ <seealso marker="ssh_connection#close-2">ssh_connection:close/2</seealso>.
+ Both the handling of this event and sending it are taken care of by the
+ <seealso marker="ssh_channel">ssh_channel</seealso> behavior.</p></item>
</taglist>
</item>
- <tag><b>terminal_events()</b></tag>
+ <tag><em>terminal_events()</em></tag>
<item>
- <p> Channels implementing a shell and command execution on the
- server side should handle the following messages that may be sent by client channel processes. </p>
+ <p>Channels implementing a shell and command execution on the
+ server side are to handle the following messages that can be sent by client-
+ channel processes.</p>
- <note> <p>Events that includes a <c> WantReply</c> expects the event handling
- process to call <seealso marker="ssh_connection#reply_request-4">ssh_connection:reply_request/4</seealso>
- with the boolean value of <c> WantReply</c> as the second
- argument. </p></note>
+ <p>Events that include a <c>WantReply</c> expect the event handling
+ process to call <seealso marker="ssh_connection#reply_request-4">
+ ssh_connection:reply_request/4</seealso>
+ with the boolean value of <c>WantReply</c> as the second argument.</p>
<taglist>
<tag><c><![CDATA[{env, ssh_channel_id(), boolean() = WantReply,
string() = Var, string() = Value}]]></c></tag>
- <item> Environment variables may be passed to the shell/command
- to be started later. This event is sent as result of calling <seealso
- marker="ssh_connection#setenv-5"> ssh_connection:setenv/5</seealso>
- </item>
+ <item><p>Environment variables can be passed to the shell/command
+ to be started later. This event is sent as a result of calling <seealso
+ marker="ssh_connection#setenv-5"> ssh_connection:setenv/5</seealso>.
+ </p></item>
<tag><c><![CDATA[{pty, ssh_channel_id(),
boolean() = WantReply, {string() = Terminal, integer() = CharWidth,
integer() = RowHeight, integer() = PixelWidth, integer() = PixelHeight,
[{atom() | integer() = Opcode,
integer() = Value}] = TerminalModes}}]]></c></tag>
- <item>A pseudo-terminal has been requested for the
- session. Terminal is the value of the TERM environment
- variable value (e.g., vt100). Zero dimension parameters must
- be ignored. The character/row dimensions override the pixel
- dimensions (when nonzero). Pixel dimensions refer to the
- drawable area of the window. The <c>Opcode</c> in the
+ <item><p>A pseudo-terminal has been requested for the
+ session. <c>Terminal</c> is the value of the TERM environment
+ variable value, that is, <c>vt100</c>. Zero dimension parameters must
+ be ignored. The character/row dimensions override the pixel
+ dimensions (when non-zero). Pixel dimensions refer to the
+ drawable area of the window. <c>Opcode</c> in the
<c>TerminalModes</c> list is the mnemonic name, represented
- as an lowercase erlang atom, defined in
- <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254 </url> section 8.
- It may also be an opcode if the mnemonic name is not listed in the
- RFC. Example <c>OP code: 53, mnemonic name ECHO erlang atom:
- echo</c>.This event is sent as result of calling <seealso
- marker="ssh_connection#ptty_alloc/4">ssh_connection:ptty_alloc/4</seealso></item>
+ as a lowercase Erlang atom, defined in
+ <url href="http://www.ietf.org/rfc/rfc4254.txt">RFC 4254</url>, Section 8.
+ It can also be an <c>Opcode</c> if the mnemonic name is not listed in the
+ RFC. Example: <c>OP code: 53, mnemonic name ECHO erlang atom:
+ echo</c>. This event is sent as a result of calling <seealso
+ marker="ssh_connection#ptty_alloc/4">ssh_connection:ptty_alloc/4</seealso>.</p></item>
<tag><c><![CDATA[{shell, boolean() = WantReply}]]></c></tag>
- <item> This message will request that the user's default shell
- be started at the other end. This event is sent as result of calling <seealso
- marker="ssh_connection#shell-2"> ssh_connection:shell/2</seealso>
- </item>
+ <item><p>This message requests that the user default shell
+ is started at the other end. This event is sent as a result of calling
+ <seealso marker="ssh_connection#shell-2"> ssh_connection:shell/2</seealso>.
+ </p></item>
<tag><c><![CDATA[{window_change, ssh_channel_id(), integer() = CharWidth,
integer() = RowHeight, integer() = PixWidth, integer() = PixHeight}]]></c></tag>
- <item> When the window (terminal) size changes on the client
- side, it MAY send a message to the server side to inform it of
- the new dimensions. There is currently no API function to generate this
- event.</item>
+ <item><p>When the window (terminal) size changes on the client
+ side, it <em>can</em> send a message to the server side to inform it of
+ the new dimensions. No API function generates this event.</p></item>
<tag><c><![CDATA[{exec, ssh_channel_id(),
boolean() = WantReply, string() = Cmd}]]></c></tag>
- <item> This message will request that the server starts
- execution of the given command. This event is sent as result of calling <seealso
- marker="ssh_connection#exec-4">ssh_connection:exec/4 </seealso>
- </item>
+ <item><p>This message requests that the server starts
+ execution of the given command. This event is sent as a result of calling <seealso
+ marker="ssh_connection#exec-4">ssh_connection:exec/4 </seealso>.
+ </p></item>
</taglist>
</item>
</taglist>
@@ -182,80 +201,83 @@
<func>
<name>adjust_window(ConnectionRef, ChannelId, NumOfBytes) -> ok</name>
- <fsummary>Adjusts the SSH flowcontrol window. </fsummary>
+ <fsummary>Adjusts the SSH flow control window.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id() </v>
- <v> NumOfBytes = integer()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>NumOfBytes = integer()</v>
</type>
<desc>
- <p>Adjusts the SSH flowcontrol window. This shall be done by both client and server side channel processes.</p>
+ <p>Adjusts the SSH flow control window. This is to be done by both the
+ client- and server-side channel processes.</p>
- <note><p>Channels implemented with the <seealso marker="ssh_channel"> ssh_channel
- behavior</seealso> will normaly not need to call this function as flow control
- will be handled by the behavior. The behavior will adjust the window every time
+ <note><p>Channels implemented with the <seealso marker="ssh_channel"> ssh_channel</seealso>
+ behavior do not normally need to call this function as flow control
+ is handled by the behavior. The behavior adjusts the window every time
the callback <seealso marker="ssh_channel#Module:handle_ssh_msg-2">
- handle_ssh_msg/2 </seealso> has returned after processing channel data</p> </note>
+ handle_ssh_msg/2</seealso> returns after processing channel data.</p></note>
</desc>
</func>
<func>
<name>close(ConnectionRef, ChannelId) -> ok</name>
- <fsummary>Sends a close message on the channel <c>ChannelId</c>. </fsummary>
+ <fsummary>Sends a close message on the channel <c>ChannelId</c>.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
</type>
<desc>
- <p>A server or client channel process can choose to close their session by sending a close event.
+ <p>A server- or client-channel process can choose to close their session by
+ sending a close event.
</p>
- <note><p>This function will be called by the ssh_channel
- behavior when the channel is terminated see <seealso
- marker="ssh_channel"> ssh_channel(3) </seealso> so channels implemented with the
- behavior should not call this function explicitly.</p></note>
+ <note><p>This function is called by the <c>ssh_channel</c>
+ behavior when the channel is terminated, see <seealso
+ marker="ssh_channel"> ssh_channel(3)</seealso>. Thus, channels implemented
+ with the behavior are not to call this function explicitly.</p></note>
</desc>
</func>
<func>
- <name>exec(ConnectionRef, ChannelId, Command, TimeOut) -> ssh_request_status() </name>
- <fsummary>Request that the server start the execution of the given command. </fsummary>
+ <name>exec(ConnectionRef, ChannelId, Command, TimeOut) -> ssh_request_status() |
+ {error, reason()}</name>
+ <fsummary>Requests that the server starts the execution of the given command.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Command = string()</v>
- <v>Timeout = timeout() </v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Command = string()</v>
+ <v>Timeout = timeout()</v>
</type>
<desc>
- <p>Should be called by a client channel process to request that the server starts execution of the
- given command, the result will be several messages according to the following pattern. Note
- that the last message will be a channel close message, as the exec request is a one time
- execution that closes the channel when it is done.</p>
+ <p>Is to be called by a client-channel process to request that the server starts
+ executing the given command. The result is several messages according to the
+ following pattern. The last message is a channel close message, as the <c>exec</c>
+ request is a one-time execution that closes the channel when it is done.</p>
<taglist>
- <tag><c> N x {ssh_cm, ssh_connection_ref(),
- {data, ssh_channel_id(), ssh_data_type_code(), binary() = Data}} </c></tag>
- <item>The result of executing the command may be only one line
- or thousands of lines depending on the command.</item>
+ <tag><c>N x {ssh_cm, ssh_connection_ref(),
+ {data, ssh_channel_id(), ssh_data_type_code(), binary() = Data}}</c></tag>
+ <item><p>The result of executing the command can be only one line
+ or thousands of lines depending on the command.</p></item>
<tag><c>0 or 1 x {ssh_cm, ssh_connection_ref(), {eof, ssh_channel_id()}}</c></tag>
- <item>Indicates that no more data will be sent.</item>
+ <item><p>Indicates that no more data is to be sent.</p></item>
<tag><c>0 or 1 x {ssh_cm,
ssh_connection_ref(), {exit_signal,
ssh_channel_id(), string() = ExitSignal, string() = ErrorMsg, string() = LanguageString}}</c></tag>
- <item>Not all systems send signals. For details on valid string
- values see RFC 4254 section 6.10 </item>
+ <item><p>Not all systems send signals. For details on valid string
+ values, see RFC 4254, Section 6.10</p></item>
<tag><c>0 or 1 x {ssh_cm, ssh_connection_ref(), {exit_status,
ssh_channel_id(), integer() = ExitStatus}}</c></tag>
- <item>It is recommended by the <c>ssh connection protocol</c> that this
- message shall be sent, but that may not always be the case.</item>
+ <item><p>It is recommended by the SSH Connection Protocol to send this
+ message, but that is not always the case.</p></item>
- <tag><c> 1 x {ssh_cm, ssh_connection_ref(),
+ <tag><c>1 x {ssh_cm, ssh_connection_ref(),
{closed, ssh_channel_id()}}</c></tag>
- <item>Indicates that the ssh channel started for the
- execution of the command has now been shutdown.</item>
+ <item><p>Indicates that the <c>ssh_channel</c> started for the
+ execution of the command has now been shut down.</p></item>
</taglist>
</desc>
</func>
@@ -264,77 +286,72 @@
<name>exit_status(ConnectionRef, ChannelId, Status) -> ok</name>
<fsummary>Sends the exit status of a command to the client.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Status = integer()</v>
+ <v>ConnectionRef = ssh_connection_ref() </v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Status = integer()</v>
</type>
<desc>
- <p>Should be called by a server channel process to sends the exit status of a command to the client.</p>
+ <p>Is to be called by a server-channel process to send the exit status of a command
+ to the client.</p>
</desc>
</func>
<func>
- <name>ptty_alloc(ConnectionRef, ChannelId, Options, Timeout) -> success | failure</name>
- <fsummary>Send status replies to requests that want such replies. </fsummary>
+ <name>ptty_alloc(ConnectionRef, ChannelId, Options) -></name>
+ <name>ptty_alloc(ConnectionRef, ChannelId, Options, Timeout) -> > ssh_request_status() |
+ {error, reason()}</name>
+ <fsummary>Sends an SSH Connection Protocol <c>pty_req</c>,
+ to allocate a pseudo-terminal.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Options = proplists:proplist()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Options = proplists:proplist()</v>
</type>
<desc>
- <p> Sends a SSH Connection Protocol pty_req, to allocate a pseudo tty.
- Should be called by a SSH client process.
- Options are:
- </p>
+ <p>Sends an SSH Connection Protocol <c>pty_req</c>, to allocate a pseudo-terminal.
+ Is to be called by an SSH client process.</p>
+ <p>Options:</p>
<taglist>
<tag>{term, string()}</tag>
- <item>
- Defaults to os:getenv("TERM") or "vt100" if it is undefined.
- </item>
+ <item><p>Defaults to <em>os:getenv("TERM")</em> or <em>vt100</em>
+ if it is undefined.</p></item>
+
<tag>{width, integer()}</tag>
- <item>
- Defaults to 80 if pixel_width is not defined.
- </item>
+ <item><p>Defaults to 80 if <c>pixel_width</c> is not defined.</p></item>
+
<tag>{height, integer()}</tag>
- <item>
- Defaults to 24 if pixel_height is not defined.
- </item>
+ <item><p>Defaults to 24 if <c>pixel_height</c> is not defined.</p></item>
+
<tag>{pixel_width, integer()}</tag>
- <item>
- Is disregarded if width is defined.
- </item>
+ <item><p>Is disregarded if <c>width</c> is defined.</p></item>
+
<tag>{pixel_height, integer()}</tag>
- <item>
- Is disregarded if height is defined.
- </item>
+ <item><p>Is disregarded if <c>height</c> is defined.</p></item>
+
<tag>{pty_opts, [{posix_atom(), integer()}]}</tag>
- <item>
- Option may be an empty list, otherwise
- see possible POSIX names in section 8 in <url href="http://www.ietf.org/rfc/rfc4254.txt"> RFC 4254</url>.
+ <item><p>Option can be an empty list. Otherwise, see possible <em>POSIX</em> names
+ in Section 8 in <url href="http://www.ietf.org/rfc/rfc4254.txt"> RFC 4254</url>.</p>
</item>
</taglist>
-
</desc>
</func>
- <func>
+ <func>
<name>reply_request(ConnectionRef, WantReply, Status, ChannelId) -> ok</name>
- <fsummary>Send status replies to requests that want such replies. </fsummary>
+ <fsummary>Sends status replies to requests that want such replies.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> WantReply = boolean()</v>
- <v> Status = ssh_request_status() </v>
- <v> ChannelId = ssh_channel_id()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>WantReply = boolean()</v>
+ <v>Status = ssh_request_status()</v>
+ <v>ChannelId = ssh_channel_id()</v>
</type>
<desc>
<p>Sends status replies to requests where the requester has
- stated that they want a status report e.i .<c> WantReply = true</c>,
- if <c> WantReply</c> is false calling this function will be a
- "noop". Should be called while handling an ssh connection
- protocol message containing a <c>WantReply</c> boolean
- value.
- </p>
+ stated that it wants a status report, that is, <c>WantReply = true</c>.
+ If <c>WantReply</c> is <c>false</c>, calling this function becomes a
+ "noop". Is to be called while handling an SSH Connection
+ Protocol message containing a <c>WantReply</c> boolean value.</p>
</desc>
</func>
@@ -344,98 +361,97 @@
<name>send(ConnectionRef, ChannelId, Type, Data) -></name>
<name>send(ConnectionRef, ChannelId, Type, Data, TimeOut) ->
ok | {error, timeout} | {error, closed}</name>
- <fsummary>Sends channel data </fsummary>
+ <fsummary>Sends channel data.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Data = binary()</v>
- <v> Type = ssh_data_type_code()</v>
- <v> Timeout = timeout()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Data = binary()</v>
+ <v>Type = ssh_data_type_code()</v>
+ <v>Timeout = timeout()</v>
</type>
<desc>
- <p>Should be called by client- and server channel processes to send data to each other.
+ <p>Is to be called by client- and server-channel processes to send data to each other.
</p>
</desc>
</func>
<func>
<name>send_eof(ConnectionRef, ChannelId) -> ok | {error, closed}</name>
- <fsummary>Sends eof on the channel <c>ChannelId</c>. </fsummary>
+ <fsummary>Sends EOF on channel <c>ChannelId</c>.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
</type>
<desc>
- <p>Sends eof on the channel <c>ChannelId</c>.
- </p>
+ <p>Sends EOF on channel <c>ChannelId</c>.</p>
</desc>
</func>
<func>
- <name>session_channel(ConnectionRef, Timeout) -> </name>
+ <name>session_channel(ConnectionRef, Timeout) -></name>
<name>session_channel(ConnectionRef, InitialWindowSize,
- MaxPacketSize, Timeout) -> {ok, ssh_channel_id()} | {error, Reason}</name>
- <fsummary>Opens a channel for a ssh session. </fsummary>
+ MaxPacketSize, Timeout) -> {ok, ssh_channel_id()} | {error, reason()}</name>
+ <fsummary>Opens a channel for an SSH session.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref()</v>
- <v> InitialWindowSize = integer() </v>
- <v> MaxPacketSize = integer() </v>
- <v> Timeout = timeout()</v>
- <v> Reason = term() </v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>InitialWindowSize = integer()</v>
+ <v>MaxPacketSize = integer()</v>
+ <v>Timeout = timeout()</v>
+ <v>Reason = term()</v>
</type>
<desc>
<p>Opens a channel for an SSH session. The channel id returned from this function
- is the id used as input to the other funtions in this module.
- </p>
+ is the id used as input to the other functions in this module.</p>
</desc>
</func>
<func>
- <name>setenv(ConnectionRef, ChannelId, Var, Value, TimeOut) -> ssh_request_status()</name>
- <fsummary> Environment variables may be passed to the
+ <name>setenv(ConnectionRef, ChannelId, Var, Value, TimeOut) -> ssh_request_status() |
+ {error, reason()}</name>
+ <fsummary>Environment variables can be passed to the
shell/command to be started later.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Var = string()</v>
- <v> Value = string()</v>
- <v> Timeout = timeout()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Var = string()</v>
+ <v>Value = string()</v>
+ <v>Timeout = timeout()</v>
</type>
<desc>
- <p> Environment variables may be passed before starting the
- shell/command. Should be called by a client channel processes.
- </p>
+ <p>Environment variables can be passed before starting the
+ shell/command. Is to be called by a client channel processes.</p>
</desc>
</func>
<func>
- <name>shell(ConnectionRef, ChannelId) -> ssh_request_status()
+ <name>shell(ConnectionRef, ChannelId) -> ssh_request_status() | {error, closed}
</name>
- <fsummary> Requests that the user's default shell (typically
- defined in /etc/passwd in UNIX systems) shall be executed at the server
- end. </fsummary>
+ <fsummary>Requests that the user default shell (typically defined in
+ /etc/passwd in Unix systems) is to be executed at the server end.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
</type>
<desc>
- <p> Should be called by a client channel process to request that the user's default shell (typically
- defined in /etc/passwd in UNIX systems) shall be executed at the server end.
- </p>
+ <p>Is to be called by a client channel process to request that the user default
+ shell (typically defined in /etc/passwd in Unix systems) is executed
+ at the server end.</p>
</desc>
</func>
<func>
- <name>subsystem(ConnectionRef, ChannelId, Subsystem, Timeout) -> ssh_request_status()</name>
- <fsummary> </fsummary>
+ <name>subsystem(ConnectionRef, ChannelId, Subsystem, Timeout) -> ssh_request_status() |
+ {error, reason()}</name>
+ <fsummary>Requests to execute a predefined subsystem on the server.</fsummary>
<type>
- <v> ConnectionRef = ssh_connection_ref() </v>
- <v> ChannelId = ssh_channel_id()</v>
- <v> Subsystem = string()</v>
- <v> Timeout = timeout()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>ChannelId = ssh_channel_id()</v>
+ <v>Subsystem = string()</v>
+ <v>Timeout = timeout()</v>
</type>
<desc>
- <p> Should be called by a client channel process for requesting to execute a predefined subsystem on the server.
+ <p>Is to be called by a client-channel process for requesting to execute a predefined
+ subsystem on the server.
</p>
</desc>
</func>
diff --git a/lib/ssh/doc/src/ssh_server_key_api.xml b/lib/ssh/doc/src/ssh_server_key_api.xml
index f7133e4ba5..73dd90c962 100644
--- a/lib/ssh/doc/src/ssh_server_key_api.xml
+++ b/lib/ssh/doc/src/ssh_server_key_api.xml
@@ -23,68 +23,81 @@
The Initial Developer of the Original Code is Ericsson AB.
</legalnotice>
<title>ssh_server_key_api</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
</header>
<module>ssh_server_key_api</module>
<modulesummary>
-behaviour(ssh_server_key_api).
</modulesummary>
<description>
- <p> Behaviour describing the API for an SSH server's public key handling. By implementing the callbacks defined
- in this behavior it is possible to customize the SSH server's public key
- handling. By default the SSH application implements this behavior
- with help of the standard openssh files, see <seealso marker="SSH_app"> ssh(6)</seealso>.</p>
+ <p>Behaviour describing the API for public key handling of an SSH server. By implementing
+ the callbacks defined in this behavior, the public key handling of an SSH server can
+ be customized. By default the SSH application implements this behavior
+ with help of the standard OpenSSH files,
+ see the <seealso marker="SSH_app"> ssh(6)</seealso> application manual.</p>
</description>
<section>
- <title>DATA TYPES </title>
+ <title>DATA TYPES</title>
- <p>Type definitions that are used more than once in this module
- and/or abstractions to indicate the intended use of the data
- type. For more details on public key data types
- see the <seealso marker="public_key:public_key_records"> public_key user's guide.</seealso>
+ <p>Type definitions that are used more than once in this module,
+ or abstractions to indicate the intended use of the data
+ type, or both. For more details on public key data types,
+ refer to Section 2 Public Key Records in the
+ <seealso marker="public_key:public_key_records"> public_key user's guide</seealso>.
</p>
- <p> boolean() = true | false</p>
- <p> string() = [byte()]</p>
- <p> public_key() = #'RSAPublicKey'{} | {integer(), #'Dss-Parms'{}} | term()</p>
- <p> private_key() = #'RSAPrivateKey'{} | #'DSAPrivateKey'{} | term()</p>
- <p> public_key_algorithm() = 'ssh-rsa' | 'ssh-dss' | atom()</p>
+ <taglist>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
+ <tag><c>string() =</c></tag>
+ <item><p><c>[byte()]</c></p></item>
+ <tag><c>public_key() =</c></tag>
+ <item><p><c>#'RSAPublicKey'{}| {integer(), #'Dss-Parms'{}}| term()</c></p></item>
+ <tag><c>private_key() =</c></tag>
+ <item><p><c>#'RSAPrivateKey'{} | #'DSAPrivateKey'{} | term()</c></p></item>
+ <tag><c>public_key_algorithm() =</c></tag>
+ <item><p><c>'ssh-rsa'| 'ssh-dss' | atom()</c></p></item>
+ </taglist>
</section>
-
+
<funcs>
<func>
<name>Module:host_key(Algorithm, DaemonOptions) ->
{ok, Key} | {error, Reason}</name>
- <fsummary>Fetches the hosts private key </fsummary>
+ <fsummary>Fetches the host’s private key.</fsummary>
<type>
<v>Algorithm = public_key_algorithm()</v>
- <d> Host key algorithm. Should support 'ssh-rsa' | 'ssh-dss' but additional algorithms
+ <d>Host key algorithm. Is to support <c>'ssh-rsa' | 'ssh-dss'</c>, but more algorithms
can be handled.</d>
- <v> DaemonOptions = proplists:proplist() </v>
- <d>Options provided to <seealso marker="ssh#daemon-2">ssh:daemon/[2,3]</seealso></d>
- <v> Key = private_key()</v>
- <d> The private key of the host matching the <c>Algorithm</c></d>
- <v>Reason = term() </v>
+ <v>DaemonOptions = proplists:proplist()</v>
+ <d>Options provided to <seealso marker="ssh#daemon-2">ssh:daemon/[2,3]</seealso>.</d>
+ <v>Key = private_key()</v>
+ <d>Private key of the host matching the <c>Algorithm</c>.</d>
+ <v>Reason = term()</v>
</type>
<desc>
- <p>Fetches the hosts private key</p>
+ <p>Fetches the private key of the host.</p>
</desc>
</func>
<func>
<name>Module:is_auth_key(Key, User, DaemonOptions) -> Result</name>
- <fsummary> Checks if the user key is authorized</fsummary>
+ <fsummary>Checks if the user key is authorized.</fsummary>
<type>
- <v> Key = public_key() </v>
- <d> Normally an RSA or DSA public key but handling of other public keys can be added</d>
- <v> User = string()</v>
- <d> The user owning the public key</d>
- <v> DaemonOptions = proplists:proplist() </v>
- <d> Options provided to <seealso marker="ssh#daemon-2">ssh:daemon/[2,3]</seealso></d>
- <v> Result = boolean()</v>
+ <v>Key = public_key()</v>
+ <d>Normally an RSA or DSA public key, but handling of other public keys can be added</d>
+ <v>User = string()</v>
+ <d>User owning the public key.</d>
+ <v>DaemonOptions = proplists:proplist()</v>
+ <d>Options provided to <seealso marker="ssh#daemon-2">ssh:daemon/[2,3]</seealso>.</d>
+ <v>Result = boolean()</v>
</type>
<desc>
- <p> Checks if the user key is authorized </p>
+ <p>Checks if the user key is authorized.</p>
</desc>
</func>
diff --git a/lib/ssh/doc/src/ssh_sftp.xml b/lib/ssh/doc/src/ssh_sftp.xml
index ab111562f9..fc418bc934 100644
--- a/lib/ssh/doc/src/ssh_sftp.xml
+++ b/lib/ssh/doc/src/ssh_sftp.xml
@@ -23,131 +23,173 @@
<title>ssh_sftp</title>
<prepared>OTP</prepared>
+ <docno></docno>
<date>2005-09-22</date>
+ <rev></rev>
<file>ssh_sftp.sgml</file>
</header>
<module>ssh_sftp</module>
<modulesummary>SFTP client.</modulesummary>
<description>
- <p>This module implements an SFTP (SSH FTP) client. SFTP is a
+ <p>This module implements an SSH FTP (SFTP) client. SFTP is a
secure, encrypted file transfer service available for
SSH.</p>
</description>
<section>
- <title>DATA TYPES </title>
- <p>Type definitions that are used more than once in this module
- and/or abstractions to indicate the intended use of the data type:
+ <title>DATA TYPES</title>
+ <p>Type definitions that are used more than once in this module,
+ or abstractions to indicate the intended use of the data type, or both:
</p>
- <p><c>ssh_connection_ref() - opaque to the user
- returned by ssh:connect/3</c></p>
- <p><c>timeout() = infinity | integer() - in milliseconds.</c></p>
+
+ <taglist>
+ <tag><c>ssh_connection_ref() =</c></tag>
+ <item><p>opaque() - as returned by <c>ssh:connect/3</c></p></item>
+ <tag><c>timeout()</c></tag>
+ <item><p>= <c>infinity | integer() in milliseconds. Default infinity.</c></p></item>
+ </taglist>
</section>
<section>
- <title>TIMEOUTS </title>
- <p>If the request functions for the SFTP channel return {error, timeout}
- it does not guarantee that the request did not reach the server and was
- not performed, it only means that we did not receive an answer from the
- server within the time that was expected.</p>
+ <title>Time-outs</title>
+ <p>If the request functions for the SFTP channel return <c>{error, timeout}</c>,
+ it does not guarantee that the request never reached the server and was
+ not performed. It only means that no answer was received from the
+ server within the expected time.</p>
</section>
<funcs>
+ <func>
+ <name>apread(ChannelPid, Handle, Position, Len) -> {async, N} | {error, Error}</name>
+ <v>ChannelPid = pid()</v>
+ <v>Handle = term()</v>
+ <v>Position = integer()</v>
+ <v>Len = integer()</v>
+ <v>N = term()</v>
+ <v>Reason = term()</v>
+
+ <desc><p>The <c><![CDATA[apread]]></c> function reads from a specified position,
+ combining the <c><![CDATA[position]]></c> and <c><![CDATA[aread]]></c> functions.</p>
+ <p><seealso marker="#apread-4">ssh_sftp:apread/4</seealso></p> </desc>
+ </func>
+
+ <func>
+ <name>apwrite(ChannelPid, Handle, Position, Data) -> ok | {error, Reason}</name>
+ <fsummary>Writes asynchronously to an open file.</fsummary>
+ <type>
+ <v>ChannelPid = pid()</v>
+ <v>Handle = term()</v>
+ <v>Position = integer()</v>
+ <v>Len = integer()</v>
+ <v>Data = binary()</v>
+ <v>Timeout = timeout()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p><c><![CDATA[apwrite]]></c> writes on a specified position, combining
+ the <c><![CDATA[position]]></c> and <c><![CDATA[awrite]]></c> operations.</p>
+ <p><seealso marker="#awrite-3">ssh_sftp:awrite/3</seealso> </p></desc>
+ </func>
+
+ <func>
+ <name>aread(ChannelPid, Handle, Len) -> {async, N} | {error, Error}</name>
+ <fsummary>Reads asynchronously from an open file.</fsummary>
+ <type>
+ <v>ChannelPid = pid()</v>
+ <v>Handle = term()</v>
+ <v>Position = integer()</v>
+ <v>Len = integer()</v>
+ <v>N = term()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>Reads from an open file, without waiting for the result. If the
+ handle is valid, the function returns <c><![CDATA[{async, N}]]></c>, where <c>N</c>
+ is a term guaranteed to be unique between calls of <c><![CDATA[aread]]></c>.
+ The actual data is sent as a message to the calling process. This
+ message has the form <c><![CDATA[{async_reply, N, Result}]]></c>, where
+ <c><![CDATA[Result]]></c> is the result from the read, either <c><![CDATA[{ok, Data}]]></c>,
+ <c><![CDATA[eof]]></c>, or <c><![CDATA[{error, Error}]]></c>.</p>
+ </desc>
+ </func>
+
+
+
<func>
- <name>start_channel(ConnectionRef) -> </name>
- <name>start_channel(ConnectionRef, Options) -> </name>
- <name>start_channel(Host, Options) -></name>
- <name>start_channel(Host, Port, Options) -> {ok, Pid} | {ok, Pid, ConnectionRef} |
- {error, Reason}</name>
- <fsummary>Starts a SFTP client</fsummary>
+ <name>awrite(ChannelPid, Handle, Data) -> ok | {error, Reason}</name>
+ <fsummary>Writes asynchronously to an open file.</fsummary>
<type>
- <v>Host = string()</v>
- <v>ConnectionRef = ssh_connection_ref()</v>
- <v>Port = integer()</v>
- <v>Options = [{Option, Value}]</v>
+ <v>ChannelPid = pid()</v>
+ <v>Handle = term()</v>
+ <v>Position = integer()</v>
+ <v>Len = integer()</v>
+ <v>Data = binary()</v>
+ <v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>If no connection reference is provided, a connection is set
- up and the new connection is returned. An SSH channel process
- is started to handle the communication with the SFTP server.
- The returned pid for this process should be used as input to
- all other API functions in this module.</p>
-
- <p>Options are:</p>
- <taglist>
- <tag><c><![CDATA[{timeout, timeout()}]]></c></tag>
- <item>
- <p>The timeout is passed to the ssh_channel start function,
- and defaults to infinity.</p>
- </item>
- <tag>
- <p><c><![CDATA[{sftp_vsn, integer()}]]></c></p>
- </tag>
- <item>
- <p>
- Desired SFTP protocol version.
- The actual version will be the minimum of
- the desired version and the maximum supported
- versions by the SFTP server.
- </p>
- </item>
- </taglist>
- <p>All other options are directly passed to
- <seealso marker="ssh">ssh:connect/3</seealso> or ignored if a
- connection is already provided. </p>
+ <p>Writes to an open file, without waiting for the result. If the
+ handle is valid, the function returns <c><![CDATA[{async, N}]]></c>, where <c>N</c>
+ is a term guaranteed to be unique between calls of
+ <c><![CDATA[awrite]]></c>. The result of the <c><![CDATA[write]]></c> operation is sent
+ as a message to the calling process. This message has the form
+ <c><![CDATA[{async_reply, N, Result}]]></c>, where <c><![CDATA[Result]]></c> is the result
+ from the write, either <c><![CDATA[ok]]></c>, or <c><![CDATA[{error, Error}]]></c>.</p>
</desc>
</func>
<func>
- <name>stop_channel(ChannelPid) -> ok</name>
- <fsummary>Stops the SFTP client channel.</fsummary>
+ <name>close(ChannelPid, Handle) -></name>
+ <name>close(ChannelPid, Handle, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Closes an open handle.</fsummary>
<type>
<v>ChannelPid = pid()</v>
+ <v>Handle = term()</v>
+ <v>Timeout = timeout()</v>
+ <v>Reason = term()</v>
</type>
<desc>
- <p>Stops an SFTP channel. Does not close the SSH connetion.
- Use <seealso marker="ssh">ssh:close/1</seealso> to close it.</p>
+ <p>Closes a handle to an open file or directory on the server.</p>
</desc>
</func>
-
+
<func>
- <name>read_file(ChannelPid, File) -> </name>
- <name>read_file(ChannelPid, File, Timeout) -> {ok, Data} | {error, Reason}</name>
- <fsummary>Read a file</fsummary>
+ <name>delete(ChannelPid, Name) -></name>
+ <name>delete(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Deletes a file.</fsummary>
<type>
- <v>ChannelPid = pid()</v>
- <v>File = string()</v>
- <v>Data = binary()</v>
+ <v>ChannelPid = pid()</v>
+ <v>Name = string()</v>
<v>Timeout = timeout()</v>
- <v>Reason = term()</v>
+ <v>Reason = term()</v>
</type>
<desc>
- <p>Reads a file from the server, and returns the data in a binary,
- like <c><![CDATA[file:read_file/1]]></c>.</p>
+ <p>Deletes the file specified by <c><![CDATA[Name]]></c>, like
+ <seealso marker="kernel:file#delete-1">file:delete/1</seealso></p>
</desc>
</func>
+
<func>
- <name>write_file(ChannelPid, File, Iolist) -> </name>
- <name>write_file(ChannelPid, File, Iolist, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Write a file</fsummary>
+ <name>del_dir(ChannelPid, Name) -></name>
+ <name>del_dir(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Deletes an empty directory.</fsummary>
<type>
<v>ChannelPid = pid()</v>
- <v>File = string()</v>
- <v>Iolist = iolist()</v>
+ <v>Name = string()</v>
<v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Writes a file to the server, like
- <c><![CDATA[file:write_file/2]]></c>. The file is created if
- it does not exist or is owerwritten if it does.</p>
+ <p>Deletes a directory specified by <c><![CDATA[Name]]></c>.
+ The directory must be empty before it can be successfully deleted.
+ </p>
</desc>
</func>
- <func>
- <name>list_dir(ChannelPid, Path) -> </name>
+
+ <func>
+ <name>list_dir(ChannelPid, Path) -></name>
<name>list_dir(ChannelPid, Path, Timeout) -> {ok, Filenames} | {error, Reason}</name>
- <fsummary>List directory</fsummary>
+ <fsummary>Lists the directory.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Path = string()</v>
@@ -161,10 +203,45 @@
filenames as a list of strings.</p>
</desc>
</func>
+
+ <func>
+ <name>make_dir(ChannelPid, Name) -></name>
+ <name>make_dir(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Creates a directory.</fsummary>
+ <type>
+ <v>ChannelPid = pid()</v>
+ <v>Name = string()</v>
+ <v>Timeout = timeout()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>Creates a directory specified by <c><![CDATA[Name]]></c>. <c><![CDATA[Name]]></c>
+ must be a full path to a new directory. The directory can only be
+ created in an existing directory.</p>
+ </desc>
+ </func>
+
<func>
- <name>open(ChannelPid, File, Mode) -> </name>
+ <name>make_symlink(ChannelPid, Name, Target) -></name>
+ <name>make_symlink(ChannelPid, Name, Target, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Creates a symbolic link.</fsummary>
+ <type>
+ <v>ChannelPid = pid()</v>
+ <v>Name = string()</v>
+ <v>Target = string()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc>
+ <p>Creates a symbolic link pointing to <c><![CDATA[Target]]></c> with the
+ name <c><![CDATA[Name]]></c>, like
+ <seealso marker="kernel:file#make_symlink-2">file:make_symlink/2</seealso></p>
+ </desc>
+ </func>
+
+ <func>
+ <name>open(ChannelPid, File, Mode) -></name>
<name>open(ChannelPid, File, Mode, Timeout) -> {ok, Handle} | {error, Reason}</name>
- <fsummary>Open a file and return a handle</fsummary>
+ <fsummary>Opens a file and returns a handle.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>File = string()</v>
@@ -175,14 +252,14 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Opens a file on the server, and returns a handle that
+ <p>Opens a file on the server and returns a handle, which
can be used for reading or writing.</p>
</desc>
</func>
<func>
- <name>opendir(ChannelPid, Path) -> </name>
+ <name>opendir(ChannelPid, Path) -></name>
<name>opendir(ChannelPid, Path, Timeout) -> {ok, Handle} | {error, Reason}</name>
- <fsummary>Open a directory and return a handle</fsummary>
+ <fsummary>Opens a directory and returns a handle.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Path = string()</v>
@@ -190,7 +267,7 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Opens a handle to a directory on the server, the handle
+ <p>Opens a handle to a directory on the server. The handle
can be used for reading directory contents.</p>
</desc>
</func>
@@ -198,14 +275,15 @@
<func>
<name>open_tar(ChannelPid, Path, Mode) -></name>
<name>open_tar(ChannelPid, Path, Mode, Timeout) -> {ok, Handle} | {error, Reason}</name>
- <fsummary>Opens a tar file on the server to which <v>ChannelPid</v> is connected and returns a handle</fsummary>
+ <fsummary>Opens a tar file on the server to which <c>ChannelPid</c>
+ is connected and returns a handle.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Path = string()</v>
- <v>Mode = [read] | [write] | [read,EncryptOpt] | [write,DecryptOpt] </v>
+ <v>Mode = [read] | [write] | [read,EncryptOpt] | [write,DecryptOpt]</v>
<v>EncryptOpt = {crypto,{InitFun,EncryptFun,CloseFun}}</v>
<v>DecryptOpt = {crypto,{InitFun,DecryptFun}}</v>
- <v>InitFun = (fun() -> {ok,CryptoState}) | (fun() -> {ok,CryptoState,ChunkSize}) </v>
+ <v>InitFun = (fun() -> {ok,CryptoState}) | (fun() -> {ok,CryptoState,ChunkSize})</v>
<v>CryptoState = any()</v>
<v>ChunkSize = undefined | pos_integer()</v>
<v>EncryptFun = (fun(PlainBin,CryptoState) -> EncryptResult)</v>
@@ -219,113 +297,86 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Opens a handle to a tar file on the server associated with <c>ChannelPid</c>. The handle
- can be used for remote tar creation and extraction as defined by the
- <seealso marker="stdlib:erl_tar#init/3">erl_tar:init/3</seealso> function.
+ <p>Opens a handle to a tar file on the server, associated with <c>ChannelPid</c>.
+ The handle can be used for remote tar creation and extraction, as defined by the
+ <seealso marker="stdlib:erl_tar#init-3">erl_tar:init/3</seealso> function.
</p>
- <p>An example of writing and then reading a tar file:</p>
- <code type="none">
- {ok,HandleWrite} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [write]),
- ok = erl_tar:add(HandleWrite, .... ),
- ok = erl_tar:add(HandleWrite, .... ),
- ...
- ok = erl_tar:add(HandleWrite, .... ),
- ok = erl_tar:close(HandleWrite),
-
- %% And for reading
- {ok,HandleRead} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [read]),
- {ok,NameValueList} = erl_tar:extract(HandleRead,[memory]),
- ok = erl_tar:close(HandleRead),
- </code>
-
- <p>The <c>crypto</c> mode option is applied to the generated stream of bytes just prior to sending
- them to the sftp server. This is intended for encryption but could of course be used for other
+
+ <p> For code exampel see Section
+ <seealso marker="using_ssh">SFTP Client with TAR Compression and Encryption</seealso> in
+ the ssh Users Guide. </p>
+
+ <p>The <c>crypto</c> mode option is applied to the generated stream of bytes prior to sending
+ them to the SFTP server. This is intended for encryption but can be used for other
purposes.
</p>
<p>The <c>InitFun</c> is applied once
- prior to any other crypto operation. The returned <c>CryptoState</c> is then folded into
- repeated applications of the <c>EncryptFun</c> or <c>DecryptFun</c>. The binary returned
- from those Funs are sent further to the remote sftp server. Finally - if doing encryption
- - the <c>CloseFun</c> is applied to the last piece of data. The <c>CloseFun</c> is
+ prior to any other <c>crypto</c> operation. The returned <c>CryptoState</c> is then folded into
+ repeated applications of the <c>EncryptFun</c> or <c>DecryptFun</c>. The binary returned
+ from those funs are sent further to the remote SFTP server. Finally, if doing encryption,
+ the <c>CloseFun</c> is applied to the last piece of data. The <c>CloseFun</c> is
responsible for padding (if needed) and encryption of that last piece.
</p>
<p>The <c>ChunkSize</c> defines the size of the <c>PlainBin</c>s that <c>EncodeFun</c> is applied
- to. If the <c>ChunkSize</c> is <c>undefined</c> the size of the <c>PlainBin</c>s varies because
- this is inteded for stream crypto while a fixed <c>ChunkSize</c> is intended for block crypto. It
- is possible to change the <c>ChunkSize</c>s in the return from the <c>EncryptFun</c> or
- <c>DecryptFun</c>. It is in fact possible to change the value between <c>pos_integer()</c> and
- <c>undefined</c>.
+ to. If the <c>ChunkSize</c> is <c>undefined</c>, the size of the <c>PlainBin</c>s varies,
+ because this is intended for stream crypto, whereas a fixed <c>ChunkSize</c> is intended for block crypto.
+ <c>ChunkSize</c>s can be changed in the return from the <c>EncryptFun</c> or
+ <c>DecryptFun</c>. The value can be changed between <c>pos_integer()</c> and <c>undefined</c>.
</p>
- <p>The write and read example above can be extended with encryption and decryption:</p>
- <code type="none">
- %% First three parameters depending on which crypto type we select:
- Key = &lt;&lt;"This is a 256 bit key. abcdefghi">>,
- Ivec0 = crypto:rand_bytes(16),
- DataSize = 1024, % DataSize rem 16 = 0 for aes_cbc
-
- %% Initialization of the CryptoState, in this case it is the Ivector.
- InitFun = fun() -> {ok, Ivec0, DataSize} end,
-
- %% How to encrypt:
- EncryptFun =
- fun(PlainBin,Ivec) ->
- EncryptedBin = crypto:block_encrypt(aes_cbc256, Key, Ivec, PlainBin),
- {ok, EncryptedBin, crypto:next_iv(aes_cbc,EncryptedBin)}
- end,
-
- %% What to do with the very last block:
- CloseFun =
- fun(PlainBin, Ivec) ->
- EncryptedBin = crypto:block_encrypt(aes_cbc256, Key, Ivec,
- pad(16,PlainBin) %% Last chunk
- ),
- {ok, EncryptedBin}
- end,
-
- Cw = {InitFun,EncryptFun,CloseFun},
- {ok,HandleWrite} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [write,{crypto,Cw}]),
- ok = erl_tar:add(HandleWrite, .... ),
- ok = erl_tar:add(HandleWrite, .... ),
- ...
- ok = erl_tar:add(HandleWrite, .... ),
- ok = erl_tar:close(HandleWrite),
-
- %% And for decryption (in this crypto example we could use the same InitFun
- %% as for encryption):
- DecryptFun =
- fun(EncryptedBin,Ivec) ->
- PlainBin = crypto:block_decrypt(aes_cbc256, Key, Ivec, EncryptedBin),
- {ok, PlainBin, crypto:next_iv(aes_cbc,EncryptedBin)}
- end,
-
- Cr = {InitFun,DecryptFun},
- {ok,HandleRead} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [read,{crypto,Cw}]),
- {ok,NameValueList} = erl_tar:extract(HandleRead,[memory]),
- ok = erl_tar:close(HandleRead),
- </code>
+
</desc>
</func>
<func>
- <name>close(ChannelPid, Handle) -> </name>
- <name>close(ChannelPid, Handle, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Close an open handle</fsummary>
+ <name>position(ChannelPid, Handle, Location) -></name>
+ <name>position(ChannelPid, Handle, Location, Timeout) -> {ok, NewPosition | {error, Error}</name>
+ <fsummary>Sets the file position of a file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Handle = term()</v>
+ <v>Location = Offset
+ | {bof, Offset} | {cur, Offset} | {eof, Offset} | bof | cur | eof</v>
+ <v>Offset = integer()</v>
<v>Timeout = timeout()</v>
+ <v>NewPosition = integer()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Closes a handle to an open file or directory on the server.</p>
+ <p>Sets the file position of the file referenced by <c><![CDATA[Handle]]></c>.
+ Returns <c><![CDATA[{ok, NewPosition}]]></c> (as an absolute offset) if
+ successful, otherwise <c><![CDATA[{error, Reason}]]></c>. <c><![CDATA[Location]]></c> is
+ one of the following:</p>
+ <taglist>
+ <tag><c><![CDATA[Offset]]></c></tag>
+ <item>
+ <p>The same as <c><![CDATA[{bof, Offset}]]></c>.</p>
+ </item>
+ <tag><c><![CDATA[{bof, Offset}]]></c></tag>
+ <item>
+ <p>Absolute offset.</p>
+ </item>
+ <tag><c><![CDATA[{cur, Offset}]]></c></tag>
+ <item>
+ <p>Offset from the current position.</p>
+ </item>
+ <tag><c><![CDATA[{eof, Offset}]]></c></tag>
+ <item>
+ <p>Offset from the end of file.</p>
+ </item>
+ <tag><c><![CDATA[bof | cur | eof]]></c></tag>
+ <item>
+ <p>The same as eariler with <c><![CDATA[Offset]]></c> 0,
+ that is, <c><![CDATA[{bof, 0} | {cur, 0} | {eof, 0}]]></c>.
+ </p>
+ </item>
+ </taglist>
</desc>
</func>
+
<func>
- <name>read(ChannelPid, Handle, Len) -> </name>
- <name>read(ChannelPid, Handle, Len, Timeout) -> {ok, Data} | eof | {error, Error}</name>
- <name>pread(ChannelPid, Handle, Position, Len) -> </name>
+ <name>pread(ChannelPid, Handle, Position, Len) -></name>
<name>pread(ChannelPid, Handle, Position, Len, Timeout) -> {ok, Data} | eof | {error, Error}</name>
- <fsummary>Read from an open file</fsummary>
+ <fsummary>Reads from an open file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Handle = term()</v>
@@ -336,47 +387,16 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Reads <c><![CDATA[Len]]></c> bytes from the file referenced by
- <c><![CDATA[Handle]]></c>. Returns <c><![CDATA[{ok, Data}]]></c>, <c><![CDATA[eof]]></c>, or
- <c><![CDATA[{error, Reason}]]></c>. If the file is opened with <c><![CDATA[binary]]></c>,
- <c><![CDATA[Data]]></c> is a binary, otherwise it is a string.</p>
- <p>If the file is read past eof, only the remaining bytes
- will be read and returned. If no bytes are read, <c><![CDATA[eof]]></c>
- is returned.</p>
- <p>The <c><![CDATA[pread]]></c> function reads from a specified position,
- combining the <c><![CDATA[position]]></c> and <c><![CDATA[read]]></c> functions.</p>
- </desc>
- </func>
- <func>
- <name>aread(ChannelPid, Handle, Len) -> {async, N} | {error, Error}</name>
- <name>apread(ChannelPid, Handle, Position, Len) -> {async, N} | {error, Error}</name>
- <fsummary>Read asynchronously from an open file</fsummary>
- <type>
- <v>ChannelPid = pid()</v>
- <v>Handle = term()</v>
- <v>Position = integer()</v>
- <v>Len = integer()</v>
- <v>N = term()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Reads from an open file, without waiting for the result. If the
- handle is valid, the function returns <c><![CDATA[{async, N}]]></c>, where N
- is a term guaranteed to be unique between calls of <c><![CDATA[aread]]></c>.
- The actual data is sent as a message to the calling process. This
- message has the form <c><![CDATA[{async_reply, N, Result}]]></c>, where
- <c><![CDATA[Result]]></c> is the result from the read, either <c><![CDATA[{ok, Data}]]></c>,
- or <c><![CDATA[eof]]></c>, or <c><![CDATA[{error, Error}]]></c>.</p>
- <p>The <c><![CDATA[apread]]></c> function reads from a specified position,
- combining the <c><![CDATA[position]]></c> and <c><![CDATA[aread]]></c> functions.</p>
+ <p>The <c><![CDATA[pread]]></c> function reads from a specified position,
+ combining the <c><![CDATA[position]]></c> and <c><![CDATA[read]]></c> functions.</p>
+ <p><seealso marker="#read-4">ssh_sftp:read/4</seealso></p>
</desc>
</func>
+
<func>
- <name>write(ChannelPid, Handle, Data) -></name>
- <name>write(ChannelPid, Handle, Data, Timeout) -> ok | {error, Error}</name>
- <name>pwrite(ChannelPid, Handle, Position, Data) -> ok </name>
+ <name>pwrite(ChannelPid, Handle, Position, Data) -> ok</name>
<name>pwrite(ChannelPid, Handle, Position, Data, Timeout) -> ok | {error, Error}</name>
- <fsummary>Write to an open file</fsummary>
+ <fsummary>Writes to an open file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Handle = term()</v>
@@ -386,94 +406,59 @@
<v>Reason = term()</v>
</type>
<desc>
- <p>Writes<c><![CDATA[data]]></c> to the file referenced by <c><![CDATA[Handle]]></c>.
- The file should be opened with <c><![CDATA[write]]></c> or <c><![CDATA[append]]></c>
- flag. Returns <c><![CDATA[ok]]></c> if successful or S<c><![CDATA[{error, Reason}]]></c>
- otherwise.</p>
- <p>Typical error reasons are:</p>
- <taglist>
- <tag><c><![CDATA[ebadf]]></c></tag>
- <item>
- <p>The file is not opened for writing.</p>
- </item>
- <tag><c><![CDATA[enospc]]></c></tag>
- <item>
- <p>There is a no space left on the device.</p>
- </item>
- </taglist>
+ <p>The <c><![CDATA[pread]]></c> function writes to a specified position,
+ combining the <c><![CDATA[position]]></c> and <c><![CDATA[write]]></c> functions.</p>
+ <p><seealso marker="#write-3">ssh_sftp:write/3</seealso></p>
</desc>
</func>
- <func>
- <name>awrite(ChannelPid, Handle, Data) -> ok | {error, Reason} </name>
- <name>apwrite(ChannelPid, Handle, Position, Data) -> ok | {error, Reason}</name>
- <fsummary>Write asynchronously to an open file</fsummary>
+
+
+ <func>
+ <name>read(ChannelPid, Handle, Len) -></name>
+ <name>read(ChannelPid, Handle, Len, Timeout) -> {ok, Data} | eof | {error, Error}</name>
+ <fsummary>Reads from an open file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Handle = term()</v>
<v>Position = integer()</v>
<v>Len = integer()</v>
- <v>Data = binary()</v>
<v>Timeout = timeout()</v>
+ <v>Data = string() | binary()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Writes to an open file, without waiting for the result. If the
- handle is valid, the function returns <c><![CDATA[{async, N}]]></c>, where N
- is a term guaranteed to be unique between calls of
- <c><![CDATA[awrite]]></c>. The result of the <c><![CDATA[write]]></c> operation is sent
- as a message to the calling process. This message has the form
- <c><![CDATA[{async_reply, N, Result}]]></c>, where <c><![CDATA[Result]]></c> is the result
- from the write, either <c><![CDATA[ok]]></c>, or <c><![CDATA[{error, Error}]]></c>.</p>
- <p>The <c><![CDATA[apwrite]]></c> writes on a specified position, combining
- the <c><![CDATA[position]]></c> and <c><![CDATA[awrite]]></c> operations.</p>
+ <p>Reads <c><![CDATA[Len]]></c> bytes from the file referenced by
+ <c><![CDATA[Handle]]></c>. Returns <c><![CDATA[{ok, Data}]]></c>, <c><![CDATA[eof]]></c>, or
+ <c><![CDATA[{error, Reason}]]></c>. If the file is opened with <c><![CDATA[binary]]></c>,
+ <c><![CDATA[Data]]></c> is a binary, otherwise it is a string.</p>
+ <p>If the file is read past <c>eof</c>, only the remaining bytes
+ are read and returned. If no bytes are read, <c><![CDATA[eof]]></c>
+ is returned.</p>
</desc>
</func>
- <func>
- <name>position(ChannelPid, Handle, Location) -> </name>
- <name>position(ChannelPid, Handle, Location, Timeout) -> {ok, NewPosition | {error, Error}</name>
- <fsummary>Seek position in open file</fsummary>
+
+ <func>
+ <name>read_file(ChannelPid, File) -></name>
+ <name>read_file(ChannelPid, File, Timeout) -> {ok, Data} | {error, Reason}</name>
+ <fsummary>Reads a file.</fsummary>
<type>
- <v>ChannelPid = pid()</v>
- <v>Handle = term()</v>
- <v>Location = Offset | {bof, Offset} | {cur, Offset} | {eof, Offset} | bof | cur | eof</v>
- <v>Offset = integer()</v>
+ <v>ChannelPid = pid()</v>
+ <v>File = string()</v>
+ <v>Data = binary()</v>
<v>Timeout = timeout()</v>
- <v>NewPosition = integer()</v>
- <v>Reason = term()</v>
+ <v>Reason = term()</v>
</type>
<desc>
- <p>Sets the file position of the file referenced by <c><![CDATA[Handle]]></c>.
- Returns <c><![CDATA[{ok, NewPosition}]]></c> (as an absolute offset) if
- successful, otherwise <c><![CDATA[{error, Reason}]]></c>. <c><![CDATA[Location]]></c> is
- one of the following:</p>
- <taglist>
- <tag><c><![CDATA[Offset]]></c></tag>
- <item>
- <p>The same as <c><![CDATA[{bof, Offset}]]></c>.</p>
- </item>
- <tag><c><![CDATA[{bof, Offset}]]></c></tag>
- <item>
- <p>Absolute offset.</p>
- </item>
- <tag><c><![CDATA[{cur, Offset}]]></c></tag>
- <item>
- <p>Offset from the current position.</p>
- </item>
- <tag><c><![CDATA[{eof, Offset}]]></c></tag>
- <item>
- <p>Offset from the end of file.</p>
- </item>
- <tag><c><![CDATA[bof | cur | eof]]></c></tag>
- <item>
- <p>The same as above with <c><![CDATA[Offset]]></c> 0.</p>
- </item>
- </taglist>
+ <p>Reads a file from the server, and returns the data in a binary,
+ like
+ <seealso marker="kernel:file#read_file-1">file:read_file/1</seealso></p>
</desc>
</func>
- <func>
- <name>read_file_info(ChannelPid, Name) -> </name>
+
+ <func>
+ <name>read_file_info(ChannelPid, Name) -></name>
<name>read_file_info(ChannelPid, Name, Timeout) -> {ok, FileInfo} | {error, Reason}</name>
- <fsummary>Get information about a file</fsummary>
+ <fsummary>Gets information about a file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Name = string()</v>
@@ -484,138 +469,191 @@
</type>
<desc>
<p>Returns a <c><![CDATA[file_info]]></c> record from the file specified by
- <c><![CDATA[Name]]></c> or <c><![CDATA[Handle]]></c>, like <c><![CDATA[file:read_file_info/2]]></c>.</p>
+ <c><![CDATA[Name]]></c> or <c><![CDATA[Handle]]></c>,
+ like <seealso marker="kernel:file#read_file_info-2">file:read_file_info/2</seealso></p>
</desc>
</func>
- <func>
- <name>read_link_info(ChannelPid, Name) -> {ok, FileInfo} | {error, Reason}</name>
- <name>read_link_info(ChannelPid, Name, Timeout) -> {ok, FileInfo} | {error, Reason}</name>
- <fsummary>Get information about a symbolic link</fsummary>
+
+ <func>
+ <name>read_link(ChannelPid, Name) -></name>
+ <name>read_link(ChannelPid, Name, Timeout) -> {ok, Target} | {error, Reason}</name>
+ <fsummary>Reads symbolic link.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Name = string()</v>
- <v>Handle = term()</v>
- <v>Timeout = timeout()</v>
- <v>FileInfo = record()</v>
+ <v>Target = string()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Returns a <c><![CDATA[file_info]]></c> record from the symbolic
- link specified by <c><![CDATA[Name]]></c> or <c><![CDATA[Handle]]></c>, like
- <c><![CDATA[file:read_link_info/2]]></c>.</p>
+ <p>Reads the link target from the symbolic link specified
+ by <c><![CDATA[name]]></c>, like
+ <seealso marker="kernel:file#read_link-1">file:read_link/1</seealso></p>
</desc>
</func>
- <func>
- <name>write_file_info(ChannelPid, Name, Info) -> </name>
- <name>write_file_info(ChannelPid, Name, Info, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Write information for a file</fsummary>
+
+ <func>
+ <name>read_link_info(ChannelPid, Name) -> {ok, FileInfo} | {error, Reason}</name>
+ <name>read_link_info(ChannelPid, Name, Timeout) -> {ok, FileInfo} | {error, Reason}</name>
+ <fsummary>Gets information about a symbolic link.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Name = string()</v>
- <v>Info = record()</v>
+ <v>Handle = term()</v>
<v>Timeout = timeout()</v>
+ <v>FileInfo = record()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Writes file information from a <c><![CDATA[file_info]]></c> record to the
- file specified by <c><![CDATA[Name]]></c>, like <c><![CDATA[file:write_file_info]]></c>.</p>
+ <p>Returns a <c><![CDATA[file_info]]></c> record from the symbolic
+ link specified by <c><![CDATA[Name]]></c> or <c><![CDATA[Handle]]></c>, like
+ <seealso marker="kernel:file#read_link_info-2">file:read_link_info/2</seealso></p>
</desc>
</func>
+
<func>
- <name>read_link(ChannelPid, Name) -> </name>
- <name>read_link(ChannelPid, Name, Timeout) -> {ok, Target} | {error, Reason}</name>
- <fsummary>Read symbolic link</fsummary>
+ <name>rename(ChannelPid, OldName, NewName) -> </name>
+ <name>rename(ChannelPid, OldName, NewName, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Renames a file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
- <v>Name = string()</v>
- <v>Target = string()</v>
+ <v>OldName = string()</v>
+ <v>NewName = string()</v>
+ <v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Reads the link target from the symbolic link specified
- by <c><![CDATA[name]]></c>, like <c><![CDATA[file:read_link/1]]></c>.</p>
+ <p>Renames a file named <c><![CDATA[OldName]]></c> and gives it the name
+ <c><![CDATA[NewName]]></c>, like
+ <seealso marker="kernel:file#rename-2">file:rename/2</seealso></p>
</desc>
</func>
+
<func>
- <name>make_symlink(ChannelPid, Name, Target) -> </name>
- <name>make_symlink(ChannelPid, Name, Target, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Create symbolic link</fsummary>
+ <name>start_channel(ConnectionRef) -></name>
+ <name>start_channel(ConnectionRef, Options) -></name>
+ <name>start_channel(Host, Options) -></name>
+ <name>start_channel(Host, Port, Options) -> {ok, Pid} | {ok, Pid, ConnectionRef} |
+ {error, Reason}</name>
+ <fsummary>Starts an SFTP client.</fsummary>
<type>
- <v>ChannelPid = pid()</v>
- <v>Name = string()</v>
- <v>Target = string()</v>
+ <v>Host = string()</v>
+ <v>ConnectionRef = ssh_connection_ref()</v>
+ <v>Port = integer()</v>
+ <v>Options = [{Option, Value}]</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Creates a symbolic link pointing to <c><![CDATA[Target]]></c> with the
- name <c><![CDATA[Name]]></c>, like <c><![CDATA[file:make_symlink/2]]></c>.</p>
+ <p>If no connection reference is provided, a connection is set
+ up, and the new connection is returned. An SSH channel process
+ is started to handle the communication with the SFTP server.
+ The returned <c>pid</c> for this process is to be used as input to
+ all other API functions in this module.</p>
+
+ <p>Options:</p>
+ <taglist>
+ <tag><c><![CDATA[{timeout, timeout()}]]></c></tag>
+ <item>
+ <p>The time-out is passed to the <c>ssh_channel</c> start function,
+ and defaults to <c>infinity</c>.</p>
+ </item>
+ <tag>
+ <c><![CDATA[{sftp_vsn, integer()}]]></c>
+ </tag>
+ <item>
+ <p>
+ Desired SFTP protocol version.
+ The actual version is the minimum of
+ the desired version and the maximum supported
+ versions by the SFTP server.
+ </p>
+ </item>
+ </taglist>
+ <p>All other options are directly passed to
+ <seealso marker="ssh">ssh:connect/3</seealso> or ignored if a
+ connection is already provided.</p>
</desc>
</func>
- <func>
- <name>rename(ChannelPid, OldName, NewName) -> </name>
- <name>rename(ChannelPid, OldName, NewName, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Rename a file</fsummary>
+
+ <func>
+ <name>stop_channel(ChannelPid) -> ok</name>
+ <fsummary>Stops the SFTP client channel.</fsummary>
<type>
<v>ChannelPid = pid()</v>
- <v>OldName = string()</v>
- <v>NewName = string()</v>
- <v>Timeout = timeout()</v>
- <v>Reason = term()</v>
</type>
<desc>
- <p>Renames a file named <c><![CDATA[OldName]]></c>, and gives it the name
- <c><![CDATA[NewName]]></c>, like <c><![CDATA[file:rename/2]]></c></p>
+ <p>Stops an SFTP channel. Does not close the SSH connection.
+ Use <seealso marker="ssh#close-1">ssh:close/1</seealso> to close it.</p>
</desc>
</func>
+
<func>
- <name>delete(ChannelPid, Name) -> </name>
- <name>delete(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Delete a file</fsummary>
+ <name>write(ChannelPid, Handle, Data) -></name>
+ <name>write(ChannelPid, Handle, Data, Timeout) -> ok | {error, Error}</name>
+ <fsummary>Writes to an open file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
- <v>Name = string()</v>
+ <v>Handle = term()</v>
+ <v>Position = integer()</v>
+ <v>Data = iolist()</v>
<v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Deletes the file specified by <c><![CDATA[Name]]></c>, like
- <c><![CDATA[file:delete/1]]></c></p>
+ <p>Writes <c><![CDATA[data]]></c> to the file referenced by <c><![CDATA[Handle]]></c>.
+ The file is to be opened with <c><![CDATA[write]]></c> or <c><![CDATA[append]]></c>
+ flag. Returns <c><![CDATA[ok]]></c> if successful or <c><![CDATA[{error, Reason}]]></c>
+ otherwise.</p>
+ <p>Typical error reasons:</p>
+ <taglist>
+ <tag><c><![CDATA[ebadf]]></c></tag>
+ <item>
+ <p>File is not opened for writing.</p>
+ </item>
+ <tag><c><![CDATA[enospc]]></c></tag>
+ <item>
+ <p>No space is left on the device.</p>
+ </item>
+ </taglist>
</desc>
</func>
+
<func>
- <name>make_dir(ChannelPid, Name) -> </name>
- <name>make_dir(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Create a directory</fsummary>
+ <name>write_file(ChannelPid, File, Iolist) -></name>
+ <name>write_file(ChannelPid, File, Iolist, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Writes a file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
- <v>Name = string()</v>
+ <v>File = string()</v>
+ <v>Iolist = iolist()</v>
<v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Creates a directory specified by <c><![CDATA[Name]]></c>. <c><![CDATA[Name]]></c> should
- be a full path to a new directory. The directory can only be
- created in an existing directory.</p>
+ <p>Writes a file to the server, like <seealso
+ marker="kernel:file#write_file-2">file:write_file/2</seealso> The
+ file is created if it does not exist. The file is overwritten
+ if it exists.</p>
</desc>
</func>
+
<func>
- <name>del_dir(ChannelPid, Name) -> </name>
- <name>del_dir(ChannelPid, Name, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Delete an empty directory</fsummary>
+ <name>write_file_info(ChannelPid, Name, Info) -></name>
+ <name>write_file_info(ChannelPid, Name, Info, Timeout) -> ok | {error, Reason}</name>
+ <fsummary>Writes information for a file.</fsummary>
<type>
<v>ChannelPid = pid()</v>
<v>Name = string()</v>
+ <v>Info = record()</v>
<v>Timeout = timeout()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p>Deletes a directory specified by <c><![CDATA[Name]]></c>.
- Note that the directory must be empty before it can be successfully deleted
- </p>
+ <p>Writes file information from a <c><![CDATA[file_info]]></c> record to the
+ file specified by <c><![CDATA[Name]]></c>, like
+ <seealso marker="kernel:file#write_file_info-2">file:write_file_info/[2,3]</seealso></p>
</desc>
</func>
-
</funcs>
-
+
</erlref>
diff --git a/lib/ssh/doc/src/ssh_sftpd.xml b/lib/ssh/doc/src/ssh_sftpd.xml
index 81c2acc575..8b2497e6a3 100644
--- a/lib/ssh/doc/src/ssh_sftpd.xml
+++ b/lib/ssh/doc/src/ssh_sftpd.xml
@@ -22,67 +22,73 @@
</legalnotice>
<title>ssh_sftpd</title>
+ <prepared></prepared>
+ <docno></docno>
<date>2005-09-22</date>
+ <rev></rev>
<file>ssh_sftpd.sgml</file>
</header>
<module>ssh_sftpd</module>
- <modulesummary>Specifies the channel process to handle an sftp subsystem.</modulesummary>
+ <modulesummary>Specifies the channel process to handle an SFTP subsystem.</modulesummary>
<description>
- <p>Specifies a channel process to handle a sftp subsystem.</p>
+ <p>Specifies a channel process to handle an SFTP subsystem.</p>
</description>
<section>
- <title>DATA TYPES </title>
- <p><c>subsystem_spec() = {subsystem_name(), {channel_callback(), channel_init_args()}} </c></p>
- <p><c>subsystem_name() = "sftp"</c></p>
- <p><c>channel_callback() = atom()</c> - Name of the erlang module implementing the
- subsystem using the ssh_channel behavior see
- <seealso marker="ssh_channel">ssh_channel(3)</seealso></p>
- <p><c> channel_init_args() = list() - The one given as argument to function
- subsystem_spec/1.</c></p>
+ <title>DATA TYPES</title>
+ <taglist>
+ <tag><c>subsystem_spec() =</c></tag>
+ <item><p><c>{subsystem_name(), {channel_callback(), channel_init_args()}}</c></p></item>
+ <tag><c>subsystem_name() =</c></tag>
+ <item><p><c>"sftp"</c></p></item>
+ <tag><c>channel_callback() =</c></tag>
+ <item><p><c>atom()</c> - Name of the Erlang module implementing the subsystem using the
+ <c>ssh_channel</c> behavior, see the
+ <seealso marker="ssh_channel">ssh_channel(3)</seealso> manual page.</p></item>
+ <tag><c>channel_init_args() =</c></tag>
+ <item><p><c>list()</c> - The one given as argument to function <c>subsystem_spec/1</c>.</p></item>
+ </taglist>
</section>
<funcs>
<func>
<name>subsystem_spec(Options) -> subsystem_spec()</name>
- <fsummary>Returns the subsystem specification that allows an ssh daemon to handle the subsystem "sftp".</fsummary>
+ <fsummary>Returns the subsystem specification that allows an SSH daemon to handle the subsystem "sftp".</fsummary>
<type>
<v>Options = [{Option, Value}]</v>
</type>
<desc>
- <p>Should be used together with ssh:daemon/[1,2,3]</p>
- <p>Options are:</p>
+ <p>Is to be used together with <c>ssh:daemon/[1,2,3]</c></p>
+ <p>Options:</p>
<taglist>
<tag><c><![CDATA[{cwd, String}]]></c></tag>
<item>
- <p>Sets the initial current working directory for the
- server.</p>
+ <p>Sets the initial current working directory for the server.</p>
</item>
<tag><c><![CDATA[{file_handler, CallbackModule}]]></c></tag>
<item>
<p>Determines which module to call for accessing
- the file server. The default value is <c>ssh_sftpd_file</c> that uses the
- <seealso marker="kernel:file">file</seealso> and <seealso marker="stdlib:filelib">filelib</seealso> API:s to access the standard OTP file
- server. This option may be used to plug in
+ the file server. The default value is <c>ssh_sftpd_file</c>, which uses the
+ <seealso marker="kernel:file">file</seealso> and <seealso marker="stdlib:filelib">filelib</seealso>
+ APIs to access the standard OTP file server. This option can be used to plug in
other file servers.</p>
</item>
<tag><c><![CDATA[{max_files, Integer}]]></c></tag>
<item>
<p>The default value is <c>0</c>, which means that there is no upper limit.
- If supplied, the number of filenames returned to the sftp client per <c>READDIR</c>
+ If supplied, the number of filenames returned to the SFTP client per <c>READDIR</c>
request is limited to at most the given value.</p>
</item>
<tag><c><![CDATA[{root, String}]]></c></tag>
<item>
- <p>Sets the sftp root directory. The user will then not be
- able to see any files above this root. If for instance
- the root is set to <c>/tmp</c> the user will see this
- directory as <c>/</c> and if the user does cd <c>/etc</c>
- the user will end up in <c>/tmp/etc</c>.
+ <p>Sets the SFTP root directory. Then the user cannot see any files
+ above this root. If, for example, the root directory is set to <c>/tmp</c>,
+ then the user sees this directory as <c>/</c>. If the user then writes
+ <c>cd /etc</c>, the user moves to <c>/tmp/etc</c>.
</p>
</item>
<tag><c><![CDATA[{sftpd_vsn, integer()}]]></c></tag>
<item>
- <p>Sets the sftp version to use, defaults to 5. Version 6 is under
+ <p>Sets the SFTP version to use. Defaults to 5. Version 6 is under
development and limited.</p>
</item>
</taglist>
diff --git a/lib/ssh/doc/src/usersguide.xml b/lib/ssh/doc/src/usersguide.xml
index 8ab14c2945..a9ed5fe21e 100644
--- a/lib/ssh/doc/src/usersguide.xml
+++ b/lib/ssh/doc/src/usersguide.xml
@@ -23,15 +23,16 @@
<title>SSH User's Guide</title>
<prepared>OTP Team</prepared>
+ <docno></docno>
<date>2012-10-11</date>
+ <rev></rev>
<file>usersguide.xml</file>
</header>
<description>
- <p>The <em>SSH</em> application implements the SSH (Secure Shell) protocol and
- provides an SFTP (Secret File Transfer Protocol) client and server.
+ <p>The Erlang Secure Shell (SSH) application, <c>ssh</c>, implements the SSH Transport Layer Protocol and
+ provides SSH File Transfer Protocol (SFTP) clients and servers.
</p>
</description>
<xi:include href="introduction.xml"/>
- <xi:include href="ssh_protocol.xml"/>
<xi:include href="using_ssh.xml"/>
</part>
diff --git a/lib/ssh/doc/src/using_ssh.xml b/lib/ssh/doc/src/using_ssh.xml
index 9ab71260d3..cd7b64ac43 100644
--- a/lib/ssh/doc/src/using_ssh.xml
+++ b/lib/ssh/doc/src/using_ssh.xml
@@ -22,64 +22,70 @@
</legalnotice>
- <title>Getting started</title>
+ <title>Getting Started</title>
+ <prepared></prepared>
+ <docno></docno>
+ <approved></approved>
+ <date></date>
+ <rev></rev>
<file>using_ssh.xml</file>
</header>
<section>
- <title> General information</title>
- <p>The examples in the following sections use the utility function
- <seealso marker="ssh#start-0"> ssh:start/0 </seealso> that starts
- all needed applications (crypto, public_key and ssh). All examples
- are run in an Erlang shell, or in a bash shell using openssh to
- illustrate how the erlang ssh application can be used. The
- examples are run as the user otptest on a local network where the
- user is authorized to login in over ssh to the host "tarlop". If
- nothing else is stated it is persumed that the otptest user has an
- entry in tarlop's authorized_keys file (may log in via ssh without
- entering a password). Also tarlop is a known host in the user
- otptest's known_hosts file so that host verification can be done
- without user interaction.
+ <title>General Information</title>
+ <p>The following examples use the utility function
+ <seealso marker="ssh#start-0"> ssh:start/0</seealso> to start
+ all needed applications (<c>crypto</c>, <c>public_key</c>, and <c>ssh</c>).
+ All examples are run in an Erlang shell, or in a bash shell, using <em>openssh</em>
+ to illustrate how the <c>ssh</c> application can be used. The
+ examples are run as the user <c>otptest</c> on a local network where the
+ user is authorized to log in over <c>ssh</c> to the host <em>tarlop</em>.
+ </p>
+ <p>If nothing else is stated, it is presumed that the <c>otptest</c> user
+ has an entry in the <em>authorized_keys</em> file of <em>tarlop</em>
+ (allowed to log in over <c>ssh</c> without entering a password).
+ Also, <em>tarlop</em> is a known host in the <c>known_hosts</c>
+ file of the user <c>otptest</c>. This means that host-verification
+ can be done without user-interaction.
</p>
</section>
<section>
- <title>Using the Erlang SSH Terminal Client</title>
+ <title>Using the Erlang ssh Terminal Client</title>
- <p>The user otptest, that has bash as default shell, uses the
- ssh:shell/1 client to connect to the openssh daemon running on a
- host called tarlop. Note that currently this client is very simple
- and you should not be expected to be as fancy as the openssh
- client.</p>
+ <p>The user <c>otptest</c>, which has bash as default shell, uses the
+ <c>ssh:shell/1</c> client to connect to the <em>openssh</em> daemon running on a
+ host called <em>tarlop</em>:</p>
<code type="erl" >
1> ssh:start().
ok
2> {ok, S} = ssh:shell("tarlop").
- >pwd
+ otptest@tarlop:> pwd
/home/otptest
- >exit
+ otptest@tarlop:> exit
logout
3>
</code>
</section>
<section>
- <title>Running an Erlang SSH Daemon </title>
+ <marker id="Running an Erlang ssh Daemon"></marker>
+ <title>Running an Erlang ssh Daemon</title>
- <p> The option system_dir must be a directory containing a host
- key file and it defaults to /etc/ssh. For details see section
+ <p>The <c>system_dir</c> option must be a directory containing a host
+ key file and it defaults to <c>/etc/ssh</c>. For details, see Section
Configuration Files in <seealso
marker="SSH_app">ssh(6)</seealso>.
</p>
- <note><p>Normally the /etc/ssh directory is only readable by root. </p>
+ <note><p>Normally, the <c>/etc/ssh</c> directory is only readable by root.</p>
</note>
- <p> The option user_dir defaults to the users ~/.ssh directory</p>
+ <p>The option <c>user_dir</c> defaults to directory <c>users ~/.ssh</c>.</p>
- <p>In the following example we generate new keys and host keys as
- to be able to run the example without having root privilages</p>
+ <p><em>Step 1.</em> To run the example without root privileges,
+ generate new keys and host keys:</p>
<code>
$bash> ssh-keygen -t rsa -f /tmp/ssh_daemon/ssh_host_rsa_key
@@ -88,19 +94,22 @@
[...]
</code>
- <p>Create the file /tmp/otptest_user/.ssh/authorized_keys and add the content
- of /tmp/otptest_user/.ssh/id_rsa.pub Now we can do</p>
+ <p><em>Step 2.</em> Create the file <c>/tmp/otptest_user/.ssh/authorized_keys</c>
+ and add the content of <c>/tmp/otptest_user/.ssh/id_rsa.pub</c>.</p>
+
+ <p><em>Step 3.</em> Start the Erlang <c>ssh</c> daemon:</p>
<code type="erl">
1> ssh:start().
ok
- 2> {ok, Sshd} = ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
- {user_dir, "/tmp/otptest_user/.ssh"}]).
+ 2> {ok, Sshd} = ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
+ {user_dir, "/tmp/otptest_user/.ssh"}]).
{ok,&lt;0.54.0>}
3>
</code>
- <p>Use the openssh client from a shell to connect to the Erlang ssh daemon.</p>
+ <p><em>Step 4.</em> Use the <em>openssh</em> client from a shell to connect
+ to the Erlang <c>ssh</c> daemon:</p>
<code>
$bash> ssh tarlop -p 8989 -i /tmp/otptest_user/.ssh/id_rsa\
@@ -113,9 +122,12 @@
1>
</code>
- <p>There are two ways of shutting down an SSH daemon</p>
+ <p>There are two ways of shutting down an <c>ssh</c> daemon,
+ see <em>Step 5a</em> and <em>Step 5b</em>.</p>
- <p>1: Stops the listener, but leaves existing connections started by the listener up and running.</p>
+ <p><em>Step 5a.</em> Shut down the Erlang <c>ssh</c> daemon so that it
+ stops the listener but leaves existing connections, started by the listener,
+ operational:</p>
<code type="erl">
3> ssh:stop_listener(Sshd).
@@ -123,7 +135,8 @@
4>
</code>
- <p>2: Stops the listener and all connections started by the listener.</p>
+ <p><em>Step 5b.</em> Shut down the Erlang <c>ssh</c> daemon so that it
+ stops the listener and all connections started by the listener:</p>
<code type="erl">
3> ssh:stop_daemon(Sshd)
@@ -134,17 +147,18 @@
</section>
<section>
- <title>One Time Execution</title>
+ <title>One-Time Execution</title>
- <p>In the following example the Erlang shell is the client process
- that receives the channel replies. </p>
+ <p>In the following example, the Erlang shell is the client process
+ that receives the channel replies.</p>
- <note><p> If you run this example
- in your environment you may get fewer or more messages back as
- this depends on the OS and shell on the machine running the ssh
- daemon. See also <seealso marker="ssh_connection#exec-4">ssh_connection:exec/4</seealso>
+ <note><p>The number of received messages in this example depends on which OS
+ and which shell that is used on the machine running the <c>ssh</c> daemon.
+ See also <seealso marker="ssh_connection#exec-4">ssh_connection:exec/4</seealso>.
</p></note>
+ <p>Do a one-time execution of a remote command over <c>ssh</c>:</p>
+
<code type="erl" >
1> ssh:start().
ok
@@ -162,7 +176,8 @@
6>
</code>
- <p>Note only the channel is closed the connection is still up and can handle other channels</p>
+ <p>Notice that only the channel is closed. The connection is still up and can
+ handle other channels:</p>
<code type="erl" >
6> {ok, NewChannelId} = ssh_connection:session_channel(ConnectionRef, infinity).
@@ -172,19 +187,22 @@
</section>
<section>
- <title>SFTP (SSH File Transport Protocol) server</title>
+ <title>SFTP Server</title>
+
+ <p>Start the Erlang <c>ssh</c> daemon with the SFTP subsystem:</p>
<code type="erl" >
1> ssh:start().
ok
- 2> ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
- {user_dir, "/tmp/otptest_user/.ssh"},
- {subsystems, [ssh_sftpd:subsystem_spec([{cwd, "/tmp/sftp/example"}])]}]).
+ 2> ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
+ {user_dir, "/tmp/otptest_user/.ssh"},
+ {subsystems, [ssh_sftpd:subsystem_spec([{cwd, "/tmp/sftp/example"}])
+ ]}]).
{ok,&lt;0.54.0>}
3>
</code>
- <p> Run the openssh sftp client</p>
+ <p>Run the OpenSSH SFTP client:</p>
<code type="erl">
$bash> sftp -oPort=8989 -o IdentityFile=/tmp/otptest_user/.ssh/id_rsa\
@@ -197,7 +215,9 @@
</section>
<section>
- <title>SFTP (SSH File Transport Protocol) client</title>
+ <title>SFTP Client</title>
+
+ <p>Fetch a file with the Erlang SFTP client:</p>
<code type="erl" >
1> ssh:start().
@@ -210,10 +230,77 @@
</section>
<section>
- <title>Creating a subsystem</title>
+ <title>SFTP Client with TAR Compression and Encryption</title>
+
+ <p>Example of writing and then reading a tar file follows:</p>
+ <code type="erlang">
+ {ok,HandleWrite} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [write]),
+ ok = erl_tar:add(HandleWrite, .... ),
+ ok = erl_tar:add(HandleWrite, .... ),
+ ...
+ ok = erl_tar:add(HandleWrite, .... ),
+ ok = erl_tar:close(HandleWrite),
+
+ %% And for reading
+ {ok,HandleRead} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [read]),
+ {ok,NameValueList} = erl_tar:extract(HandleRead,[memory]),
+ ok = erl_tar:close(HandleRead),
+ </code>
+
+ <p>The previous write and read example can be extended with encryption and decryption as follows:</p>
+ <code type="erlang">
+%% First three parameters depending on which crypto type we select:
+Key = &lt;&lt;"This is a 256 bit key. abcdefghi">>,
+Ivec0 = crypto:rand_bytes(16),
+DataSize = 1024, % DataSize rem 16 = 0 for aes_cbc
+
+%% Initialization of the CryptoState, in this case it is the Ivector.
+InitFun = fun() -> {ok, Ivec0, DataSize} end,
+
+%% How to encrypt:
+EncryptFun =
+ fun(PlainBin,Ivec) ->
+ EncryptedBin = crypto:block_encrypt(aes_cbc256, Key, Ivec, PlainBin),
+ {ok, EncryptedBin, crypto:next_iv(aes_cbc,EncryptedBin)}
+ end,
+
+%% What to do with the very last block:
+CloseFun =
+ fun(PlainBin, Ivec) ->
+ EncryptedBin = crypto:block_encrypt(aes_cbc256, Key, Ivec,
+ pad(16,PlainBin) %% Last chunk
+ ),
+ {ok, EncryptedBin}
+ end,
+
+Cw = {InitFun,EncryptFun,CloseFun},
+{ok,HandleWrite} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [write,{crypto,Cw}]),
+ok = erl_tar:add(HandleWrite, .... ),
+ok = erl_tar:add(HandleWrite, .... ),
+...
+ok = erl_tar:add(HandleWrite, .... ),
+ok = erl_tar:close(HandleWrite),
+
+%% And for decryption (in this crypto example we could use the same InitFun
+%% as for encryption):
+DecryptFun =
+ fun(EncryptedBin,Ivec) ->
+ PlainBin = crypto:block_decrypt(aes_cbc256, Key, Ivec, EncryptedBin),
+ {ok, PlainBin, crypto:next_iv(aes_cbc,EncryptedBin)}
+ end,
+
+Cr = {InitFun,DecryptFun},
+{ok,HandleRead} = ssh_sftp:open_tar(ChannelPid, ?tar_file_name, [read,{crypto,Cw}]),
+{ok,NameValueList} = erl_tar:extract(HandleRead,[memory]),
+ok = erl_tar:close(HandleRead),
+ </code>
+ </section>
+
+ <section>
+ <title>Creating a Subsystem</title>
- <p>A very small SSH subsystem that echos N bytes could be implemented like this.
- See also <seealso marker="ssh_channel"> ssh_channel(3)</seealso> </p>
+ <p>A small <c>ssh</c> subsystem that echoes N bytes can be implemented as shown
+ in the following example:</p>
<code type="erl" >
-module(ssh_echo_server).
@@ -267,14 +354,16 @@ terminate(_Reason, _State) ->
ok.
</code>
- <p>And run like this on the host tarlop with the keys generated in section 3.3</p>
+ <p>The subsystem can be run on the host <em>tarlop</em> with the generated keys,
+ as described in Section <seealso marker="#Running an Erlang ssh Daemon">
+ Running an Erlang ssh Daemon</seealso>:</p>
<code type="erl" >
1> ssh:start().
ok
- 2> ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
- {user_dir, "/tmp/otptest_user/.ssh"}
- {subsystems, [{"echo_n", {ssh_echo_server, [10]}}]}]).
+ 2> ssh:daemon(8989, [{system_dir, "/tmp/ssh_daemon"},
+ {user_dir, "/tmp/otptest_user/.ssh"}
+ {subsystems, [{"echo_n", {ssh_echo_server, [10]}}]}]).
{ok,&lt;0.54.0>}
3>
</code>
@@ -293,6 +382,7 @@ terminate(_Reason, _State) ->
{ssh_msg, &lt;0.57.0>, {closed, 0}}
7> {error, closed} = ssh_connection:send(ConnectionRef, ChannelId, "10", infinity).
</code>
+<p>See also <seealso marker="ssh_channel"> ssh_channel(3)</seealso>.</p>
</section>
diff --git a/lib/ssh/examples/Makefile b/lib/ssh/examples/Makefile
index de019f75b5..9280c42076 100644
--- a/lib/ssh/examples/Makefile
+++ b/lib/ssh/examples/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2005-2012. All Rights Reserved.
+# Copyright Ericsson AB 2005-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@ RELSYSDIR = $(RELEASE_PATH)/lib/ssh-$(VSN)
MODULES = \
- ssh_sample_cli
+ ssh_sample_cli \
+ ssh_device.erl
ERL_FILES= $(MODULES:=.erl)
diff --git a/lib/ssh/examples/ssh_device.erl b/lib/ssh/examples/ssh_device.erl
new file mode 100644
index 0000000000..f6be812915
--- /dev/null
+++ b/lib/ssh/examples/ssh_device.erl
@@ -0,0 +1,62 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(ssh_device).
+
+%% api
+-export([ssh_device/5]).
+
+%%% I wrote this because of i think a fully ssh client sample will be easy to start the ssh module better than
+%%% go though each function file.
+ssh_device(Host, Port, User, Pass, Cmd) ->
+ ssh:start(),
+ case ssh:connect(Host, Port,
+ [{user, User}, {password, Pass},
+ {silently_accept_hosts, true}, {quiet_mode, true}])
+ of
+ {ok, Conn} ->
+ {ok, ChannelId} = ssh_connection:session_channel(Conn,
+ infinity),
+ ssh_connection:exec(Conn, ChannelId, Cmd, infinity),
+ Init_rep = <<>>,
+ wait_for_response(Conn, Host, Init_rep),
+ ssh:close(Conn);
+ {error, nxdomain} ->
+ {error,nxdomain}
+ end.
+
+%%--------------------------------------------------------------------
+%%% Internal application API
+%%--------------------------------------------------------------------
+wait_for_response(Conn, Host, Acc) ->
+ receive
+ {ssh_cm, Conn, Msg} ->
+ case Msg of
+ {closed, _ChannelId} ->
+ {ok,Acc};
+ {data, _, _, A} ->
+ Acc2 = <<Acc/binary, A/binary>>,
+ wait_for_response(Conn, Host, Acc2);
+ _ ->
+ wait_for_response(Conn, Host, Acc)
+ end
+ after
+ 5000 ->
+ {error,timeout}
+ end.
diff --git a/lib/ssh/src/ssh.appup.src b/lib/ssh/src/ssh.appup.src
index b2b2994eed..e76c110c04 100644
--- a/lib/ssh/src/ssh.appup.src
+++ b/lib/ssh/src/ssh.appup.src
@@ -1,7 +1,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -19,61 +19,9 @@
{"%VSN%",
[
- {"3.0.8", [{load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, [ssh_xfer]},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_xfer, soft_purge, soft_purge, []}
- ]},
- {"3.0.7", [{load_module, ssh_auth, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_acceptor, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_channel, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh_info, soft_purge, soft_purge, []},
- {load_module, ssh_message, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_io, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_xfer, soft_purge, soft_purge, [ssh_connection_handler]}]},
- {"3.0.6", [{load_module, ssh_auth, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_acceptor, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_channel, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh_info, soft_purge, soft_purge, []},
- {load_module, ssh_message, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_io, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_xfer, soft_purge, soft_purge, [ssh_connection_handler]}]},
{<<".*">>, [{restart_application, ssh}]}
],
[
- {"3.0.8", [{load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, []},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh, soft_purge, soft_purge, []},
- {load_module, ssh_xfer, soft_purge, soft_purge, []}
- ]},
- {"3.0.7", [{load_module, ssh_auth, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_acceptor, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_channel, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh_info, soft_purge, soft_purge, []},
- {load_module, ssh_message, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_io, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_xfer, soft_purge, soft_purge, [ssh_connection_handler]}]},
- {"3.0.6", [{load_module, ssh_auth, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_acceptor, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_channel, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_connection_handler, soft_purge, soft_purge, []},
- {load_module, ssh_info, soft_purge, soft_purge, []},
- {load_module, ssh_message, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_io, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_sftp, soft_purge, soft_purge, [ssh_connection_handler]},
- {load_module, ssh_xfer, soft_purge, soft_purge, [ssh_connection_handler]}]},
{<<".*">>, [{restart_application, ssh}]}
]
}.
diff --git a/lib/ssh/src/ssh.erl b/lib/ssh/src/ssh.erl
index eae33e3683..18951c8c89 100644
--- a/lib/ssh/src/ssh.erl
+++ b/lib/ssh/src/ssh.erl
@@ -24,10 +24,12 @@
-include("ssh.hrl").
-include("ssh_connect.hrl").
-include_lib("public_key/include/public_key.hrl").
+-include_lib("kernel/include/file.hrl").
-export([start/0, start/1, stop/0, connect/3, connect/4, close/1, connection_info/2,
channel_info/3,
daemon/1, daemon/2, daemon/3,
+ default_algorithms/0,
stop_listener/1, stop_listener/2, stop_daemon/1, stop_daemon/2,
shell/1, shell/2, shell/3]).
@@ -208,6 +210,11 @@ shell(Host, Port, Options) ->
end.
%%--------------------------------------------------------------------
+%%--------------------------------------------------------------------
+default_algorithms() ->
+ ssh_transport:default_algorithms().
+
+%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
fix_idle_time(SshOptions) ->
@@ -259,7 +266,7 @@ do_start_daemon(Host, Port, Options, SocketOptions) ->
end.
handle_options(Opts) ->
- try handle_option(proplists:unfold(Opts), [], []) of
+ try handle_option(algs_compatibility(proplists:unfold(Opts)), [], []) of
{Inet, Ssh} ->
{handle_ip(Inet), Ssh}
catch
@@ -267,6 +274,35 @@ handle_options(Opts) ->
Error
end.
+
+algs_compatibility(Os) ->
+ %% Take care of old options 'public_key_alg' and 'pref_public_key_algs'
+ comp_pk(proplists:get_value(preferred_algorithms,Os),
+ proplists:get_value(pref_public_key_algs,Os),
+ proplists:get_value(public_key_alg, Os),
+ [{K,V} || {K,V} <- Os,
+ K =/= public_key_alg,
+ K =/= pref_public_key_algs]
+ ).
+
+comp_pk(undefined, undefined, undefined, Os) -> Os;
+comp_pk( PrefAlgs, _, _, Os) when PrefAlgs =/= undefined -> Os;
+
+comp_pk(undefined, undefined, ssh_dsa, Os) -> comp_pk(undefined, undefined, 'ssh-dss', Os);
+comp_pk(undefined, undefined, ssh_rsa, Os) -> comp_pk(undefined, undefined, 'ssh-rsa', Os);
+comp_pk(undefined, undefined, PK, Os) ->
+ PKs = [PK | ssh_transport:supported_algorithms(public_key)--[PK]],
+ [{preferred_algorithms, [{public_key,PKs}] } | Os];
+
+comp_pk(undefined, PrefPKs, _, Os) when PrefPKs =/= undefined ->
+ PKs = [case PK of
+ ssh_dsa -> 'ssh-dss';
+ ssh_rsa -> 'ssh-rsa';
+ _ -> PK
+ end || PK <- PrefPKs],
+ [{preferred_algorithms, [{public_key,PKs}]} | Os].
+
+
handle_option([], SocketOptions, SshOptions) ->
{SocketOptions, SshOptions};
handle_option([{system_dir, _} = Opt | Rest], SocketOptions, SshOptions) ->
@@ -279,8 +315,6 @@ handle_option([{silently_accept_hosts, _} = Opt | Rest], SocketOptions, SshOptio
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{user_interaction, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{public_key_alg, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{connect_timeout, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{user, _} = Opt | Rest], SocketOptions, SshOptions) ->
@@ -297,10 +331,6 @@ handle_option([{pwdfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{key_cb, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{role, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{compression, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
%%Backwards compatibility
handle_option([{allow_user_interaction, Value} | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option({user_interaction, Value}) | SshOptions]);
@@ -312,6 +342,8 @@ handle_option([{disconnectfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{failfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
+handle_option([{ssh_msg_debug_fun, _} = Opt | Rest], SocketOptions, SshOptions) ->
+ handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
%%Backwards compatibility should not be underscore between ip and v6 in API
handle_option([{ip_v6_disabled, Value} | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option({ipv6_disabled, Value}) | SshOptions]);
@@ -329,7 +361,9 @@ handle_option([{exec, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{auth_methods, _} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{pref_public_key_algs, _} = Opt | Rest], SocketOptions, SshOptions) ->
+handle_option([{auth_method_kb_interactive_data, _} = Opt | Rest], SocketOptions, SshOptions) ->
+ handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
+handle_option([{preferred_algorithms,_} = Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([{quiet_mode, _} = Opt|Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
@@ -345,32 +379,28 @@ handle_option([{parallel_login, _} = Opt|Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([parallel_login|Rest], SocketOptions, SshOptions) ->
handle_option(Rest, SocketOptions, [handle_ssh_option({parallel_login,true}) | SshOptions]);
+handle_option([{minimal_remote_max_packet_size, _} = Opt|Rest], SocketOptions, SshOptions) ->
+ handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
+handle_option([{id_string, _ID} = Opt|Rest], SocketOptions, SshOptions) ->
+ handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
handle_option([Opt | Rest], SocketOptions, SshOptions) ->
handle_option(Rest, [handle_inet_option(Opt) | SocketOptions], SshOptions).
-handle_ssh_option({system_dir, Value} = Opt) when is_list(Value) ->
+
+handle_ssh_option({minimal_remote_max_packet_size, Value} = Opt) when is_integer(Value), Value >=0 ->
Opt;
+handle_ssh_option({system_dir, Value} = Opt) when is_list(Value) ->
+ check_dir(Opt);
handle_ssh_option({user_dir, Value} = Opt) when is_list(Value) ->
- Opt;
+ check_dir(Opt);
handle_ssh_option({user_dir_fun, Value} = Opt) when is_function(Value) ->
Opt;
handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_boolean(Value) ->
Opt;
handle_ssh_option({user_interaction, Value} = Opt) when is_boolean(Value) ->
Opt;
-handle_ssh_option({public_key_alg, ssh_dsa}) ->
- {public_key_alg, 'ssh-dss'};
-handle_ssh_option({public_key_alg, ssh_rsa}) ->
- {public_key_alg, 'ssh-rsa'};
-handle_ssh_option({public_key_alg, Value} = Opt) when Value == 'ssh-rsa'; Value == 'ssh-dss' ->
- Opt;
-handle_ssh_option({pref_public_key_algs, Value} = Opt) when is_list(Value), length(Value) >= 1 ->
- case handle_pref_algs(Value, []) of
- {true, NewOpts} ->
- NewOpts;
- _ ->
- throw({error, {eoptions, Opt}})
- end;
+handle_ssh_option({preferred_algorithms,[_|_]} = Opt) ->
+ handle_pref_algs(Opt);
handle_ssh_option({connect_timeout, Value} = Opt) when is_integer(Value); Value == infinity ->
Opt;
handle_ssh_option({max_sessions, Value} = Opt) when is_integer(Value), Value>0 ->
@@ -402,6 +432,13 @@ handle_ssh_option({exec, Function} = Opt) when is_function(Function) ->
Opt;
handle_ssh_option({auth_methods, Value} = Opt) when is_list(Value) ->
Opt;
+handle_ssh_option({auth_method_kb_interactive_data, {Name,Instruction,Prompt,Echo}} = Opt) when is_list(Name),
+ is_list(Instruction),
+ is_list(Prompt),
+ is_boolean(Echo) ->
+ Opt;
+handle_ssh_option({auth_method_kb_interactive_data, F} = Opt) when is_function(F,3) ->
+ Opt;
handle_ssh_option({infofun, Value} = Opt) when is_function(Value) ->
Opt;
handle_ssh_option({connectfun, Value} = Opt) when is_function(Value) ->
@@ -410,6 +447,8 @@ handle_ssh_option({disconnectfun , Value} = Opt) when is_function(Value) ->
Opt;
handle_ssh_option({failfun, Value} = Opt) when is_function(Value) ->
Opt;
+handle_ssh_option({ssh_msg_debug_fun, Value} = Opt) when is_function(Value,4) ->
+ Opt;
handle_ssh_option({ipv6_disabled, Value} = Opt) when is_boolean(Value) ->
throw({error, {{ipv6_disabled, Opt}, option_no_longer_valid_use_inet_option_instead}});
@@ -434,6 +473,10 @@ handle_ssh_option({idle_time, Value} = Opt) when is_integer(Value), Value > 0 ->
Opt;
handle_ssh_option({rekey_limit, Value} = Opt) when is_integer(Value) ->
Opt;
+handle_ssh_option({id_string, random}) ->
+ {id_string, {random,2,5}}; %% 2 - 5 random characters
+handle_ssh_option({id_string, ID} = Opt) when is_list(ID) ->
+ Opt;
handle_ssh_option(Opt) ->
throw({error, {eoptions, Opt}}).
@@ -450,23 +493,83 @@ handle_inet_option({reuseaddr, _} = Opt) ->
%% Option verified by inet
handle_inet_option(Opt) ->
Opt.
+
+
%% Check preferred algs
-handle_pref_algs([], Acc) ->
- {true, lists:reverse(Acc)};
-handle_pref_algs([H|T], Acc) ->
- case H of
- ssh_dsa ->
- handle_pref_algs(T, ['ssh-dss'| Acc]);
- ssh_rsa ->
- handle_pref_algs(T, ['ssh-rsa'| Acc]);
- 'ssh-dss' ->
- handle_pref_algs(T, ['ssh-dss'| Acc]);
- 'ssh-rsa' ->
- handle_pref_algs(T, ['ssh-rsa'| Acc]);
- _ ->
- false
+
+handle_pref_algs({preferred_algorithms,Algs}) ->
+ try alg_duplicates(Algs, [], []) of
+ [] ->
+ {preferred_algorithms,
+ [try ssh_transport:supported_algorithms(Key)
+ of
+ DefAlgs -> handle_pref_alg(Key,Vals,DefAlgs)
+ catch
+ _:_ -> throw({error, {{eoptions, {preferred_algorithms,Key}},
+ "Bad preferred_algorithms key"}})
+ end || {Key,Vals} <- Algs]
+ };
+
+ Dups ->
+ throw({error, {{eoptions, {preferred_algorithms,Dups}}, "Duplicates found"}})
+ catch
+ _:_ ->
+ throw({error, {{eoptions, preferred_algorithms}, "Malformed"}})
end.
+alg_duplicates([{K,V}|KVs], Ks, Dups0) ->
+ Dups =
+ case lists:member(K,Ks) of
+ true ->
+ [K|Dups0];
+ false ->
+ Dups0
+ end,
+ case V--lists:usort(V) of
+ [] ->
+ alg_duplicates(KVs, [K|Ks], Dups);
+ Ds ->
+ alg_duplicates(KVs, [K|Ks], Dups++Ds)
+ end;
+alg_duplicates([], _Ks, Dups) ->
+ Dups.
+
+handle_pref_alg(Key,
+ Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}],
+ [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}]
+ ) ->
+ chk_alg_vs(Key, C2Ss, Sup_C2Ss),
+ chk_alg_vs(Key, S2Cs, Sup_S2Cs),
+ {Key, Vs};
+
+handle_pref_alg(Key,
+ Vs=[{server2client,[_|_]},{client2server,[_|_]}],
+ Sup=[{client2server,_},{server2client,_}]
+ ) ->
+ handle_pref_alg(Key, lists:reverse(Vs), Sup);
+
+handle_pref_alg(Key,
+ Vs=[V|_],
+ Sup=[{client2server,_},{server2client,_}]
+ ) when is_atom(V) ->
+ handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup);
+
+handle_pref_alg(Key,
+ Vs=[V|_],
+ Sup=[S|_]
+ ) when is_atom(V), is_atom(S) ->
+ chk_alg_vs(Key, Vs, Sup),
+ {Key, Vs};
+
+handle_pref_alg(Key, Vs, _) ->
+ throw({error, {{eoptions, {preferred_algorithms,[{Key,Vs}]}}, "Badly formed list"}}).
+
+chk_alg_vs(OptKey, Values, SupportedValues) ->
+ case (Values -- SupportedValues) of
+ [] -> Values;
+ Bad -> throw({error, {{eoptions, {OptKey,Bad}}, "Unsupported value(s) found"}})
+ end.
+
handle_ip(Inet) -> %% Default to ipv4
case lists:member(inet, Inet) of
true ->
@@ -479,4 +582,31 @@ handle_ip(Inet) -> %% Default to ipv4
[inet | Inet]
end
end.
-
+
+check_dir({_,Dir} = Opt) ->
+ case directory_exist_readable(Dir) of
+ ok ->
+ Opt;
+ {error,Error} ->
+ throw({error, {eoptions,{Opt,Error}}})
+ end.
+
+directory_exist_readable(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, #file_info{type = directory,
+ access = Access}} ->
+ case Access of
+ read -> ok;
+ read_write -> ok;
+ _ -> {error, eacces}
+ end;
+
+ {ok, #file_info{}}->
+ {error, enotdir};
+
+ {error, Error} ->
+ {error, Error}
+ end.
+
+
+
diff --git a/lib/ssh/src/ssh_acceptor.erl b/lib/ssh/src/ssh_acceptor.erl
index 6c443eeb9c..34988f17b6 100644
--- a/lib/ssh/src/ssh_acceptor.erl
+++ b/lib/ssh/src/ssh_acceptor.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -43,7 +43,7 @@ start_link(Port, Address, SockOpts, Opts, AcceptTimeout) ->
acceptor_init(Parent, Port, Address, SockOpts, Opts, AcceptTimeout) ->
{_, Callback, _} =
proplists:get_value(transport, Opts, {tcp, gen_tcp, tcp_closed}),
- case (catch do_socket_listen(Callback, Port, SockOpts)) of
+ case (catch do_socket_listen(Callback, Port, [{active, false} | SockOpts])) of
{ok, ListenSocket} ->
proc_lib:init_ack(Parent, {ok, self()}),
acceptor_loop(Callback,
diff --git a/lib/ssh/src/ssh_auth.erl b/lib/ssh/src/ssh_auth.erl
index 45c4d52d7e..df9a97c8f8 100644
--- a/lib/ssh/src/ssh_auth.erl
+++ b/lib/ssh/src/ssh_auth.erl
@@ -30,7 +30,8 @@
-export([publickey_msg/1, password_msg/1, keyboard_interactive_msg/1,
service_request_msg/1, init_userauth_request_msg/1,
userauth_request_msg/1, handle_userauth_request/3,
- handle_userauth_info_request/3, handle_userauth_info_response/2
+ handle_userauth_info_request/3, handle_userauth_info_response/2,
+ default_public_key_algorithms/0
]).
%%--------------------------------------------------------------------
@@ -115,33 +116,16 @@ init_userauth_request_msg(#ssh{opts = Opts} = Ssh) ->
service = "ssh-connection",
method = "none",
data = <<>>},
- case proplists:get_value(pref_public_key_algs, Opts, false) of
- false ->
- FirstAlg = proplists:get_value(public_key_alg, Opts, ?PREFERRED_PK_ALG),
- SecondAlg = other_alg(FirstAlg),
- Prefs = method_preference(FirstAlg, SecondAlg),
- ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
- userauth_preference = Prefs,
- userauth_methods = none,
- service = "ssh-connection"});
- Algs ->
- FirstAlg = lists:nth(1, Algs),
- case length(Algs) =:= 2 of
- true ->
- SecondAlg = other_alg(FirstAlg),
- Prefs = method_preference(FirstAlg, SecondAlg),
- ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
- userauth_preference = Prefs,
- userauth_methods = none,
- service = "ssh-connection"});
- _ ->
- Prefs = method_preference(FirstAlg),
- ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
- userauth_preference = Prefs,
- userauth_methods = none,
- service = "ssh-connection"})
- end
- end;
+
+
+ Algs = proplists:get_value(public_key,
+ proplists:get_value(preferred_algorithms, Opts, []),
+ default_public_key_algorithms()),
+ Prefs = method_preference(Algs),
+ ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
+ userauth_preference = Prefs,
+ userauth_methods = none,
+ service = "ssh-connection"});
{error, no_user} ->
ErrStr = "Could not determine the users name",
throw(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_ILLEGAL_USER_NAME,
@@ -259,6 +243,54 @@ handle_userauth_request(#ssh_msg_userauth_request{user = User,
handle_userauth_request(#ssh_msg_userauth_request{user = User,
service = "ssh-connection",
+ method = "keyboard-interactive",
+ data = _},
+ _, #ssh{opts = Opts} = Ssh) ->
+ %% RFC4256
+ %% The data field contains:
+ %% - language tag (deprecated). If =/=[] SHOULD use it however. We skip
+ %% it for simplicity.
+ %% - submethods. "... the user can give a hint of which actual methods
+ %% he wants to use. ...". It's a "MAY use" so we skip
+ %% it. It also needs an understanding between the client
+ %% and the server.
+ %%
+ %% "The server MUST reply with an SSH_MSG_USERAUTH_SUCCESS,
+ %% SSH_MSG_USERAUTH_FAILURE, or SSH_MSG_USERAUTH_INFO_REQUEST message."
+ Default = {"SSH server",
+ "Enter password for \""++User++"\"",
+ "pwd: ",
+ false},
+
+ {Name, Instruction, Prompt, Echo} =
+ case proplists:get_value(auth_method_kb_interactive_data, Opts) of
+ undefined ->
+ Default;
+ {_,_,_,_}=V ->
+ V;
+ F when is_function(F) ->
+ {_,PeerName} = Ssh#ssh.peer,
+ F(PeerName, User, "ssh-connection")
+ end,
+ EchoEnc = case Echo of
+ true -> <<?TRUE>>;
+ false -> <<?FALSE>>
+ end,
+ Msg = #ssh_msg_userauth_info_request{name = unicode:characters_to_list(Name),
+ instruction = unicode:characters_to_list(Instruction),
+ language_tag = "",
+ num_prompts = 1,
+ data = <<?STRING(unicode:characters_to_binary(Prompt)),
+ EchoEnc/binary
+ >>
+ },
+ {not_authorized, {User, undefined},
+ ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
+ opts = [{max_kb_tries,3},{kb_userauth_info_msg,Msg}|Opts]
+ })};
+
+handle_userauth_request(#ssh_msg_userauth_request{user = User,
+ service = "ssh-connection",
method = Other}, _,
#ssh{userauth_supported_methods = Methods} = Ssh) ->
{not_authorized, {User, {authmethod, Other}},
@@ -280,6 +312,38 @@ handle_userauth_info_request(
#ssh_msg_userauth_info_response{num_responses = NumPrompts,
data = Responses}, Ssh)}.
+handle_userauth_info_response(#ssh_msg_userauth_info_response{num_responses = 1,
+ data = <<?UINT32(Sz), Password:Sz/binary>>},
+ #ssh{opts = Opts0,
+ user = User} = Ssh) ->
+ NumTriesLeft = proplists:get_value(max_kb_tries, Opts0, 0) - 1,
+ Opts = lists:keydelete(max_kb_tries,1,Opts0),
+ case check_password(User, unicode:characters_to_list(Password), Opts) of
+ true ->
+ {authorized, User,
+ ssh_transport:ssh_packet(#ssh_msg_userauth_success{}, Ssh)};
+ false when NumTriesLeft > 0 ->
+ UserAuthInfoMsg =
+ (proplists:get_value(kb_userauth_info_msg,Opts))
+ #ssh_msg_userauth_info_request{name = "",
+ instruction =
+ lists:concat(
+ ["Bad user or password, try again. ",
+ integer_to_list(NumTriesLeft),
+ " tries left."])},
+ {not_authorized, {User, undefined},
+ ssh_transport:ssh_packet(UserAuthInfoMsg,
+ Ssh#ssh{opts = [{max_kb_tries,NumTriesLeft}|Opts]})};
+
+ false ->
+ {not_authorized, {User, {error,"Bad user or password"}},
+ ssh_transport:ssh_packet(#ssh_msg_userauth_failure{
+ authentications = "",
+ partial_success = false},
+ Ssh#ssh{opts = lists:keydelete(kb_userauth_info_msg,1,Opts)}
+ )}
+ end;
+
handle_userauth_info_response(#ssh_msg_userauth_info_response{},
_Auth) ->
throw(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_SERVICE_NOT_AVAILABLE,
@@ -287,20 +351,20 @@ handle_userauth_info_response(#ssh_msg_userauth_info_response{},
"keyboard-interactive",
language = "en"}).
+
+default_public_key_algorithms() -> ?PREFERRED_PK_ALGS.
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-method_preference(Alg1, Alg2) ->
- [{"publickey", ?MODULE, publickey_msg, [Alg1]},
- {"publickey", ?MODULE, publickey_msg,[Alg2]},
- {"password", ?MODULE, password_msg, []},
- {"keyboard-interactive", ?MODULE, keyboard_interactive_msg, []}
- ].
-method_preference(Alg1) ->
- [{"publickey", ?MODULE, publickey_msg, [Alg1]},
- {"password", ?MODULE, password_msg, []},
- {"keyboard-interactive", ?MODULE, keyboard_interactive_msg, []}
- ].
+method_preference(Algs) ->
+ lists:foldr(fun(A, Acc) ->
+ [{"publickey", ?MODULE, publickey_msg, [A]} | Acc]
+ end,
+ [{"password", ?MODULE, password_msg, []},
+ {"keyboard-interactive", ?MODULE, keyboard_interactive_msg, []}
+ ],
+ Algs).
user_name(Opts) ->
Env = case os:type() of
@@ -418,10 +482,6 @@ keyboard_interact_fun(KbdInteractFun, Name, Instr, PromptInfos, NumPrompts) ->
language = "en"}})
end.
-other_alg('ssh-rsa') ->
- 'ssh-dss';
-other_alg('ssh-dss') ->
- 'ssh-rsa'.
decode_public_key_v2(<<?UINT32(Len0), _:Len0/binary,
?UINT32(Len1), BinE:Len1/binary,
?UINT32(Len2), BinN:Len2/binary>>
diff --git a/lib/ssh/src/ssh_auth.hrl b/lib/ssh/src/ssh_auth.hrl
index 6cd8e6bf14..764c9f4246 100644
--- a/lib/ssh/src/ssh_auth.hrl
+++ b/lib/ssh/src/ssh_auth.hrl
@@ -23,7 +23,7 @@
-define(SUPPORTED_AUTH_METHODS, "publickey,keyboard-interactive,password").
--define(PREFERRED_PK_ALG, 'ssh-rsa').
+-define(PREFERRED_PK_ALGS, ['ssh-rsa','ssh-dss']).
-define(SSH_MSG_USERAUTH_REQUEST, 50).
-define(SSH_MSG_USERAUTH_FAILURE, 51).
diff --git a/lib/ssh/src/ssh_connection.erl b/lib/ssh/src/ssh_connection.erl
index 01141622d6..d532d41009 100644
--- a/lib/ssh/src/ssh_connection.erl
+++ b/lib/ssh/src/ssh_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -56,8 +56,8 @@
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
--spec session_channel(pid(), timeout()) -> {ok, channel_id()} | {error, term()}.
--spec session_channel(pid(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, term()}.
+-spec session_channel(pid(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
+-spec session_channel(pid(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
%% Description: Opens a channel for a ssh session. A session is a
%% remote execution of a program. The program may be a shell, an
@@ -81,7 +81,8 @@ session_channel(ConnectionHandler, InitialWindowSize,
end.
%%--------------------------------------------------------------------
--spec exec(pid(), channel_id(), string(), timeout()) -> success | failure.
+-spec exec(pid(), channel_id(), string(), timeout()) ->
+ success | failure | {error, timeout | closed}.
%% Description: Will request that the server start the
%% execution of the given command.
@@ -101,8 +102,8 @@ shell(ConnectionHandler, ChannelId) ->
ssh_connection_handler:request(ConnectionHandler, self(), ChannelId,
"shell", false, <<>>, 0).
%%--------------------------------------------------------------------
--spec subsystem(pid(), channel_id(), string(), timeout()) ->
- success | failure | {error, timeout}.
+-spec subsystem(pid(), channel_id(), string(), timeout()) ->
+ success | failure | {error, timeout | closed}.
%%
%% Description: Executes a predefined subsystem.
%%--------------------------------------------------------------------
@@ -142,7 +143,7 @@ send_eof(ConnectionHandler, Channel) ->
ssh_connection_handler:send_eof(ConnectionHandler, Channel).
%%--------------------------------------------------------------------
--spec adjust_window(pid(), channel_id(), integer()) -> ok.
+-spec adjust_window(pid(), channel_id(), integer()) -> ok | {error, closed}.
%%
%%
%% Description: Adjusts the ssh flowcontrol window.
@@ -151,7 +152,8 @@ adjust_window(ConnectionHandler, Channel, Bytes) ->
ssh_connection_handler:adjust_window(ConnectionHandler, Channel, Bytes).
%%--------------------------------------------------------------------
--spec setenv(pid(), channel_id(), string(), string(), timeout()) -> success | failure.
+-spec setenv(pid(), channel_id(), string(), string(), timeout()) ->
+ success | failure | {error, timeout | closed}.
%%
%%
%% Description: Environment variables may be passed to the shell/command to be
@@ -183,22 +185,27 @@ reply_request(_,false, _, _) ->
ok.
%%--------------------------------------------------------------------
--spec ptty_alloc(pid(), channel_id(), proplists:proplist()) -> success | failiure.
+-spec ptty_alloc(pid(), channel_id(), proplists:proplist()) ->
+ success | failiure | {error, closed}.
+-spec ptty_alloc(pid(), channel_id(), proplists:proplist(), timeout()) ->
+ success | failiure | {error, timeout} | {error, closed}.
+
%%
%%
%% Description: Sends a ssh connection protocol pty_req.
%%--------------------------------------------------------------------
ptty_alloc(ConnectionHandler, Channel, Options) ->
ptty_alloc(ConnectionHandler, Channel, Options, infinity).
-ptty_alloc(ConnectionHandler, Channel, Options, TimeOut) ->
+ptty_alloc(ConnectionHandler, Channel, Options0, TimeOut) ->
+ Options = backwards_compatible(Options0, []),
{Width, PixWidth} = pty_default_dimensions(width, Options),
- {Hight, PixHight} = pty_default_dimensions(hight, Options),
+ {Height, PixHeight} = pty_default_dimensions(height, Options),
pty_req(ConnectionHandler, Channel,
- proplists:get_value(term, Options, default_term()),
+ proplists:get_value(term, Options, os:getenv("TERM", ?DEFAULT_TERMINAL)),
proplists:get_value(width, Options, Width),
- proplists:get_value(hight, Options, Hight),
+ proplists:get_value(height, Options, Height),
proplists:get_value(pixel_widh, Options, PixWidth),
- proplists:get_value(pixel_hight, Options, PixHight),
+ proplists:get_value(pixel_height, Options, PixHeight),
proplists:get_value(pty_opts, Options, []), TimeOut
).
%%--------------------------------------------------------------------
@@ -320,9 +327,7 @@ channel_data(ChannelId, DataType, Data,
SendDataType,
SendData)}
end, SendList),
- FlowCtrlMsgs = flow_control(Replies,
- Channel,
- Cache),
+ FlowCtrlMsgs = flow_control(Replies, Channel, Cache),
{{replies, Replies ++ FlowCtrlMsgs}, Connection};
_ ->
gen_fsm:reply(From, {error, closed}),
@@ -464,18 +469,31 @@ handle_msg(#ssh_msg_channel_window_adjust{recipient_channel = ChannelId,
handle_msg(#ssh_msg_channel_open{channel_type = "session" = Type,
sender_channel = RemoteId,
initial_window_size = WindowSz,
- maximum_packet_size = PacketSz}, Connection0, server) ->
-
- try setup_session(Connection0, RemoteId,
- Type, WindowSz, PacketSz) of
- Result ->
- Result
- catch _:_ ->
+ maximum_packet_size = PacketSz},
+ #connection{options = SSHopts} = Connection0,
+ server) ->
+ MinAcceptedPackSz = proplists:get_value(minimal_remote_max_packet_size, SSHopts, 0),
+
+ if
+ MinAcceptedPackSz =< PacketSz ->
+ try setup_session(Connection0, RemoteId,
+ Type, WindowSz, PacketSz) of
+ Result ->
+ Result
+ catch _:_ ->
+ FailMsg = channel_open_failure_msg(RemoteId,
+ ?SSH_OPEN_CONNECT_FAILED,
+ "Connection refused", "en"),
+ {{replies, [{connection_reply, FailMsg}]},
+ Connection0}
+ end;
+
+ MinAcceptedPackSz > PacketSz ->
FailMsg = channel_open_failure_msg(RemoteId,
- ?SSH_OPEN_CONNECT_FAILED,
- "Connection refused", "en"),
- {{replies, [{connection_reply, FailMsg}]},
- Connection0}
+ ?SSH_OPEN_ADMINISTRATIVELY_PROHIBITED,
+ lists:concat(["Maximum packet size below ",MinAcceptedPackSz,
+ " not supported"]), "en"),
+ {{replies, [{connection_reply, FailMsg}]}, Connection0}
end;
handle_msg(#ssh_msg_channel_open{channel_type = "session",
@@ -495,41 +513,57 @@ handle_msg(#ssh_msg_channel_open{channel_type = "forwarded-tcpip" = Type,
initial_window_size = RWindowSz,
maximum_packet_size = RPacketSz,
data = Data},
- #connection{channel_cache = Cache} = Connection0, server) ->
+ #connection{channel_cache = Cache,
+ options = SSHopts} = Connection0, server) ->
<<?UINT32(ALen), Address:ALen/binary, ?UINT32(Port),
?UINT32(OLen), Orig:OLen/binary, ?UINT32(OrigPort)>> = Data,
- case bound_channel(Address, Port, Connection0) of
- undefined ->
+ MinAcceptedPackSz = proplists:get_value(minimal_remote_max_packet_size, SSHopts, 0),
+
+ if
+ MinAcceptedPackSz =< RPacketSz ->
+ case bound_channel(Address, Port, Connection0) of
+ undefined ->
+ FailMsg = channel_open_failure_msg(RemoteId,
+ ?SSH_OPEN_CONNECT_FAILED,
+ "Connection refused", "en"),
+ {{replies,
+ [{connection_reply, FailMsg}]}, Connection0};
+ ChannelPid ->
+ {ChannelId, Connection1} = new_channel_id(Connection0),
+ LWindowSz = ?DEFAULT_WINDOW_SIZE,
+ LPacketSz = ?DEFAULT_PACKET_SIZE,
+ Channel = #channel{type = Type,
+ sys = "none",
+ user = ChannelPid,
+ local_id = ChannelId,
+ recv_window_size = LWindowSz,
+ recv_packet_size = LPacketSz,
+ send_window_size = RWindowSz,
+ send_packet_size = RPacketSz,
+ send_buf = queue:new()
+ },
+ ssh_channel:cache_update(Cache, Channel),
+ OpenConfMsg = channel_open_confirmation_msg(RemoteId, ChannelId,
+ LWindowSz, LPacketSz),
+ {OpenMsg, Connection} =
+ reply_msg(Channel, Connection1,
+ {open, Channel, {forwarded_tcpip,
+ decode_ip(Address), Port,
+ decode_ip(Orig), OrigPort}}),
+ {{replies, [{connection_reply, OpenConfMsg},
+ OpenMsg]}, Connection}
+ end;
+
+ MinAcceptedPackSz > RPacketSz ->
FailMsg = channel_open_failure_msg(RemoteId,
- ?SSH_OPEN_CONNECT_FAILED,
- "Connection refused", "en"),
- {{replies,
- [{connection_reply, FailMsg}]}, Connection0};
- ChannelPid ->
- {ChannelId, Connection1} = new_channel_id(Connection0),
- LWindowSz = ?DEFAULT_WINDOW_SIZE,
- LPacketSz = ?DEFAULT_PACKET_SIZE,
- Channel = #channel{type = Type,
- sys = "none",
- user = ChannelPid,
- local_id = ChannelId,
- recv_window_size = LWindowSz,
- recv_packet_size = LPacketSz,
- send_window_size = RWindowSz,
- send_packet_size = RPacketSz},
- ssh_channel:cache_update(Cache, Channel),
- OpenConfMsg = channel_open_confirmation_msg(RemoteId, ChannelId,
- LWindowSz, LPacketSz),
- {OpenMsg, Connection} =
- reply_msg(Channel, Connection1,
- {open, Channel, {forwarded_tcpip,
- decode_ip(Address), Port,
- decode_ip(Orig), OrigPort}}),
- {{replies, [{connection_reply, OpenConfMsg},
- OpenMsg]}, Connection}
+ ?SSH_OPEN_ADMINISTRATIVELY_PROHIBITED,
+ lists:concat(["Maximum packet size below ",MinAcceptedPackSz,
+ " not supported"]), "en"),
+ {{replies, [{connection_reply, FailMsg}]}, Connection0}
end;
+
handle_msg(#ssh_msg_channel_open{channel_type = "forwarded-tcpip",
sender_channel = RemoteId},
Connection, client) ->
@@ -911,7 +945,8 @@ start_channel(Cb, Id, Args, SubSysSup, Exec) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-setup_session(#connection{channel_cache = Cache} = Connection0,
+setup_session(#connection{channel_cache = Cache
+ } = Connection0,
RemoteId,
Type, WindowSize, PacketSize) ->
{ChannelId, Connection} = new_channel_id(Connection0),
@@ -923,6 +958,7 @@ setup_session(#connection{channel_cache = Cache} = Connection0,
recv_packet_size = ?DEFAULT_PACKET_SIZE,
send_window_size = WindowSize,
send_packet_size = PacketSize,
+ send_buf = queue:new(),
remote_id = RemoteId
},
ssh_channel:cache_update(Cache, Channel),
@@ -1018,63 +1054,74 @@ request_reply_or_data(#channel{local_id = ChannelId, user = ChannelPid},
update_send_window(Channel, _, undefined,
#connection{channel_cache = Cache}) ->
- do_update_send_window(Channel, Channel#channel.send_buf, Cache);
+ do_update_send_window(Channel, Cache);
-update_send_window(Channel, DataType, Data,
+update_send_window(#channel{send_buf = SendBuffer} = Channel, DataType, Data,
#connection{channel_cache = Cache}) ->
- do_update_send_window(Channel, Channel#channel.send_buf ++ [{DataType, Data}], Cache).
-
-do_update_send_window(Channel0, Buf0, Cache) ->
- {Buf1, NewSz, Buf2} = get_window(Buf0,
- Channel0#channel.send_packet_size,
- Channel0#channel.send_window_size),
+ do_update_send_window(Channel#channel{send_buf = queue:in({DataType, Data}, SendBuffer)},
+ Cache).
- Channel = Channel0#channel{send_window_size = NewSz, send_buf = Buf2},
+do_update_send_window(Channel0, Cache) ->
+ {SendMsgs, Channel} = get_window(Channel0, []),
ssh_channel:cache_update(Cache, Channel),
- {Buf1, Channel}.
-
-get_window(Bs, PSz, WSz) ->
- get_window(Bs, PSz, WSz, []).
-
-get_window(Bs, _PSz, 0, Acc) ->
- {lists:reverse(Acc), 0, Bs};
-get_window([B0 = {DataType, Bin} | Bs], PSz, WSz, Acc) ->
- BSz = size(Bin),
- if BSz =< WSz -> %% will fit into window
- if BSz =< PSz -> %% will fit into a packet
- get_window(Bs, PSz, WSz-BSz, [B0|Acc]);
- true -> %% split into packet size
- <<Bin1:PSz/binary, Bin2/binary>> = Bin,
- get_window([setelement(2, B0, Bin2) | Bs],
- PSz, WSz-PSz,
- [{DataType, Bin1}|Acc])
+ {SendMsgs, Channel}.
+
+get_window(#channel{send_window_size = 0
+ } = Channel, Acc) ->
+ {lists:reverse(Acc), Channel};
+get_window(#channel{send_packet_size = 0
+ } = Channel, Acc) ->
+ {lists:reverse(Acc), Channel};
+get_window(#channel{send_buf = Buffer,
+ send_packet_size = PacketSize,
+ send_window_size = WindowSize0
+ } = Channel, Acc0) ->
+ case queue:out(Buffer) of
+ {{value, {_, Data} = Msg}, NewBuffer} ->
+ case handle_send_window(Msg, size(Data), PacketSize, WindowSize0, Acc0) of
+ {WindowSize, Acc, {_, <<>>}} ->
+ {lists:reverse(Acc), Channel#channel{send_window_size = WindowSize,
+ send_buf = NewBuffer}};
+ {WindowSize, Acc, Rest} ->
+ get_window(Channel#channel{send_window_size = WindowSize,
+ send_buf = queue:in_r(Rest, NewBuffer)}, Acc)
end;
- WSz =< PSz -> %% use rest of window
- <<Bin1:WSz/binary, Bin2/binary>> = Bin,
- get_window([setelement(2, B0, Bin2) | Bs],
- PSz, WSz-WSz,
- [{DataType, Bin1}|Acc]);
- true -> %% use packet size
- <<Bin1:PSz/binary, Bin2/binary>> = Bin,
- get_window([setelement(2, B0, Bin2) | Bs],
- PSz, WSz-PSz,
- [{DataType, Bin1}|Acc])
+ {empty, NewBuffer} ->
+ {[], Channel#channel{send_buf = NewBuffer}}
+ end.
+
+handle_send_window(Msg = {Type, Data}, Size, PacketSize, WindowSize, Acc) when Size =< WindowSize ->
+ case Size =< PacketSize of
+ true ->
+ {WindowSize - Size, [Msg | Acc], {Type, <<>>}};
+ false ->
+ <<Msg1:PacketSize/binary, Msg2/binary>> = Data,
+ {WindowSize - PacketSize, [{Type, Msg1} | Acc], {Type, Msg2}}
end;
-get_window([], _PSz, WSz, Acc) ->
- {lists:reverse(Acc), WSz, []}.
+handle_send_window({Type, Data}, _, PacketSize, WindowSize, Acc) when WindowSize =< PacketSize ->
+ <<Msg1:WindowSize/binary, Msg2/binary>> = Data,
+ {WindowSize - WindowSize, [{Type, Msg1} | Acc], {Type, Msg2}};
+handle_send_window({Type, Data}, _, PacketSize, WindowSize, Acc) ->
+ <<Msg1:PacketSize/binary, Msg2/binary>> = Data,
+ {WindowSize - PacketSize, [{Type, Msg1} | Acc], {Type, Msg2}}.
flow_control(Channel, Cache) ->
flow_control([window_adjusted], Channel, Cache).
-
+
flow_control([], Channel, Cache) ->
ssh_channel:cache_update(Cache, Channel),
[];
-
flow_control([_|_], #channel{flow_control = From,
- send_buf = []} = Channel, Cache) when From =/= undefined ->
- [{flow_control, Cache, Channel, From, ok}];
+ send_buf = Buffer} = Channel, Cache) when From =/= undefined ->
+ case queue:is_empty(Buffer) of
+ true ->
+ ssh_channel:cache_update(Cache, Channel#channel{flow_control = undefined}),
+ [{flow_control, Cache, Channel, From, ok}];
+ false ->
+ []
+ end;
flow_control(_,_,_) ->
- [].
+ [].
pty_req(ConnectionHandler, Channel, Term, Width, Height,
PixWidth, PixHeight, PtyOpts, TimeOut) ->
@@ -1294,10 +1341,11 @@ decode_ip(Addr) when is_binary(Addr) ->
{ok,A} -> A
end.
-default_term() ->
- case os:getenv("TERM") of
- false ->
- ?DEFAULT_TERMINAL;
- Str when is_list(Str)->
- Str
- end.
+backwards_compatible([], Acc) ->
+ Acc;
+backwards_compatible([{hight, Value} | Rest], Acc) ->
+ backwards_compatible(Rest, [{height, Value} | Acc]);
+backwards_compatible([{pixel_hight, Value} | Rest], Acc) ->
+ backwards_compatible(Rest, [{height, Value} | Acc]);
+backwards_compatible([Value| Rest], Acc) ->
+ backwards_compatible(Rest, [ Value | Acc]).
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index fdb9d3b3e6..ab1fc93a1b 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,7 +33,7 @@
-include("ssh_transport.hrl").
-include("ssh_auth.hrl").
-include("ssh_connect.hrl").
-
+-compile(export_all).
-export([start_link/3]).
%% Internal application API
@@ -70,6 +70,8 @@
undecoded_packet_length, % integer()
key_exchange_init_msg, % #ssh_msg_kexinit{}
renegotiate = false, % boolean()
+ last_size_rekey = 0,
+ event_queue = [],
connection_queue,
address,
port,
@@ -82,6 +84,11 @@
{next_state, state_name(), term(), timeout()} |
{stop, term(), term()}.
+-type gen_fsm_sync_return() :: {next_state, state_name(), term()} |
+ {next_state, state_name(), term(), timeout()} |
+ {reply, term(), state_name(), term()} |
+ {stop, term(), term(), term()}.
+
%%====================================================================
%% Internal application API
%%====================================================================
@@ -289,8 +296,13 @@ renegotiate_data(ConnectionHandler) ->
-spec close(pid(), channel_id()) -> ok.
%%--------------------------------------------------------------------
close(ConnectionHandler, ChannelId) ->
- sync_send_all_state_event(ConnectionHandler, {close, ChannelId}).
-
+ case sync_send_all_state_event(ConnectionHandler, {close, ChannelId}) of
+ ok ->
+ ok;
+ {error, closed} ->
+ ok
+ end.
+
%%--------------------------------------------------------------------
-spec stop(pid()) -> ok | {error, term()}.
%%--------------------------------------------------------------------
@@ -321,22 +333,25 @@ info(ConnectionHandler, ChannelProcess) ->
hello(socket_control, #state{socket = Socket, ssh_params = Ssh} = State) ->
VsnMsg = ssh_transport:hello_version_msg(string_version(Ssh)),
send_msg(VsnMsg, State),
- {ok, [{recbuf, Size}]} = inet:getopts(Socket, [recbuf]),
- inet:setopts(Socket, [{packet, line}, {active, once}, {recbuf, ?MAX_PROTO_VERSION}]),
- {next_state, hello, State#state{recbuf = Size}};
+ case getopt(recbuf, Socket) of
+ {ok, Size} ->
+ inet:setopts(Socket, [{packet, line}, {active, once}, {recbuf, ?MAX_PROTO_VERSION}]),
+ {next_state, hello, State#state{recbuf = Size}};
+ {error, Reason} ->
+ {stop, {shutdown, Reason}, State}
+ end;
hello({info_line, _Line},#state{role = client, socket = Socket} = State) ->
%% The server may send info lines before the version_exchange
inet:setopts(Socket, [{active, once}]),
{next_state, hello, State};
-hello({info_line, _Line},#state{role = server} = State) ->
- DisconnectMsg =
- #ssh_msg_disconnect{code =
- ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Did not receive expected protocol version exchange",
- language = "en"},
- handle_disconnect(DisconnectMsg, State);
+hello({info_line, _Line},#state{role = server,
+ socket = Socket,
+ transport_cb = Transport } = State) ->
+ %% as openssh
+ Transport:send(Socket, "Protocol mismatch."),
+ {stop, {shutdown,"Protocol mismatch in version exchange."}, State};
hello({version_exchange, Version}, #state{ssh_params = Ssh0,
socket = Socket,
@@ -427,9 +442,7 @@ key_exchange(#ssh_msg_kex_dh_gex_reply{} = Msg,
new_keys(#ssh_msg_newkeys{} = Msg, #state{ssh_params = Ssh0} = State0) ->
{ok, Ssh} = ssh_transport:handle_new_keys(Msg, Ssh0),
- {NextStateName, State} =
- after_new_keys(State0#state{ssh_params = Ssh}),
- {next_state, NextStateName, next_packet(State)}.
+ after_new_keys(next_packet(State0#state{ssh_params = Ssh})).
%%--------------------------------------------------------------------
-spec userauth(#ssh_msg_service_request{} | #ssh_msg_service_accept{} |
@@ -491,10 +504,21 @@ userauth(#ssh_msg_userauth_info_request{} = Msg,
{next_state, userauth, next_packet(State#state{ssh_params = Ssh})};
userauth(#ssh_msg_userauth_info_response{} = Msg,
- #state{ssh_params = #ssh{role = server} = Ssh0} = State) ->
- {ok, {Reply, Ssh}} = ssh_auth:handle_userauth_info_response(Msg, Ssh0),
- send_msg(Reply, State),
- {next_state, userauth, next_packet(State#state{ssh_params = Ssh})};
+ #state{ssh_params = #ssh{role = server,
+ peer = {_, Address}} = Ssh0,
+ opts = Opts, starter = Pid} = State) ->
+ case ssh_auth:handle_userauth_info_response(Msg, Ssh0) of
+ {authorized, User, {Reply, Ssh}} ->
+ send_msg(Reply, State),
+ Pid ! ssh_connected,
+ connected_fun(User, Address, "keyboard-interactive", Opts),
+ {next_state, connected,
+ next_packet(State#state{auth_user = User, ssh_params = Ssh})};
+ {not_authorized, {User, Reason}, {Reply, Ssh}} ->
+ retry_fun(User, Address, Reason, Opts),
+ send_msg(Reply, State),
+ {next_state, userauth, next_packet(State#state{ssh_params = Ssh})}
+ end;
userauth(#ssh_msg_userauth_success{}, #state{ssh_params = #ssh{role = client} = Ssh,
starter = Pid} = State) ->
@@ -553,11 +577,13 @@ userauth(#ssh_msg_userauth_banner{message = Msg},
-spec connected({#ssh_msg_kexinit{}, binary()}, %%| %% #ssh_msg_kexdh_init{},
#state{}) -> gen_fsm_state_return().
%%--------------------------------------------------------------------
-connected({#ssh_msg_kexinit{}, _Payload} = Event, State) ->
- kexinit(Event, State#state{renegotiate = true}).
-%% ;
-%% connected(#ssh_msg_kexdh_init{} = Event, State) ->
-%% key_exchange(Event, State#state{renegotiate = true}).
+connected({#ssh_msg_kexinit{}, _Payload} = Event, #state{ssh_params = Ssh0} = State0) ->
+ {KeyInitMsg, SshPacket, Ssh} = ssh_transport:key_exchange_init_msg(Ssh0),
+ State = State0#state{ssh_params = Ssh,
+ key_exchange_init_msg = KeyInitMsg,
+ renegotiate = true},
+ send_msg(SshPacket, State),
+ kexinit(Event, State).
%%--------------------------------------------------------------------
-spec handle_event(#ssh_msg_disconnect{} | #ssh_msg_ignore{} | #ssh_msg_debug{} |
@@ -575,44 +601,17 @@ handle_event(#ssh_msg_disconnect{description = Desc} = DisconnectMsg, _StateName
handle_event(#ssh_msg_ignore{}, StateName, State) ->
{next_state, StateName, next_packet(State)};
-handle_event(#ssh_msg_debug{always_display = true, message = DbgMsg},
- StateName, State) ->
- io:format("DEBUG: ~p\n", [DbgMsg]),
- {next_state, StateName, next_packet(State)};
-
-handle_event(#ssh_msg_debug{}, StateName, State) ->
+handle_event(#ssh_msg_debug{always_display = Display, message = DbgMsg, language=Lang},
+ StateName, #state{opts = Opts} = State) ->
+ F = proplists:get_value(ssh_msg_debug_fun, Opts,
+ fun(_ConnRef, _AlwaysDisplay, _Msg, _Language) -> ok end
+ ),
+ catch F(self(), Display, DbgMsg, Lang),
{next_state, StateName, next_packet(State)};
handle_event(#ssh_msg_unimplemented{}, StateName, State) ->
{next_state, StateName, next_packet(State)};
-handle_event({adjust_window, ChannelId, Bytes}, StateName,
- #state{connection_state =
- #connection{channel_cache = Cache}} = State0) ->
- State =
- case ssh_channel:cache_lookup(Cache, ChannelId) of
- #channel{recv_window_size = WinSize, remote_id = Id} = Channel ->
- ssh_channel:cache_update(Cache, Channel#channel{recv_window_size =
- WinSize + Bytes}),
- Msg = ssh_connection:channel_adjust_window_msg(Id, Bytes),
- send_replies([{connection_reply, Msg}], State0);
- undefined ->
- State0
- end,
- {next_state, StateName, next_packet(State)};
-
-handle_event({reply_request, success, ChannelId}, StateName,
- #state{connection_state =
- #connection{channel_cache = Cache}} = State0) ->
- State = case ssh_channel:cache_lookup(Cache, ChannelId) of
- #channel{remote_id = RemoteId} ->
- Msg = ssh_connection:channel_success_msg(RemoteId),
- send_replies([{connection_reply, Msg}], State0);
- undefined ->
- State0
- end,
- {next_state, StateName, State};
-
handle_event(renegotiate, connected, #state{ssh_params = Ssh0}
= State) ->
{KeyInitMsg, SshPacket, Ssh} = ssh_transport:key_exchange_init_msg(Ssh0),
@@ -624,13 +623,13 @@ handle_event(renegotiate, connected, #state{ssh_params = Ssh0}
renegotiate = true})};
handle_event(renegotiate, StateName, State) ->
- timer:apply_after(?REKEY_TIMOUT, gen_fsm, send_all_state_event, [self(), renegotiate]),
- %% Allready in keyexcahange so ignore
+ %% Already in key-exchange so safe to ignore
{next_state, StateName, State};
%% Rekey due to sent data limit reached?
handle_event(data_size, connected, #state{ssh_params = Ssh0} = State) ->
- {ok, [{send_oct,Sent}]} = inet:getstat(State#state.socket, [send_oct]),
+ {ok, [{send_oct,Sent0}]} = inet:getstat(State#state.socket, [send_oct]),
+ Sent = Sent0 - State#state.last_size_rekey,
MaxSent = proplists:get_value(rekey_limit, State#state.opts, 1024000000),
timer:apply_after(?REKEY_DATA_TIMOUT, gen_fsm, send_all_state_event, [self(), data_size]),
case Sent >= MaxSent of
@@ -640,11 +639,44 @@ handle_event(data_size, connected, #state{ssh_params = Ssh0} = State) ->
{next_state, kexinit,
next_packet(State#state{ssh_params = Ssh,
key_exchange_init_msg = KeyInitMsg,
- renegotiate = true})};
+ renegotiate = true,
+ last_size_rekey = Sent0})};
_ ->
{next_state, connected, next_packet(State)}
end;
handle_event(data_size, StateName, State) ->
+ %% Already in key-exchange so safe to ignore
+ {next_state, StateName, State};
+
+handle_event(Event, StateName, State) when StateName /= connected ->
+ Events = [{event, Event} | State#state.event_queue],
+ {next_state, StateName, State#state{event_queue = Events}};
+
+handle_event({adjust_window, ChannelId, Bytes}, StateName,
+ #state{connection_state =
+ #connection{channel_cache = Cache}} = State0) ->
+ State =
+ case ssh_channel:cache_lookup(Cache, ChannelId) of
+ #channel{recv_window_size = WinSize, remote_id = Id} = Channel ->
+ ssh_channel:cache_update(Cache, Channel#channel{recv_window_size =
+ WinSize + Bytes}),
+ Msg = ssh_connection:channel_adjust_window_msg(Id, Bytes),
+ send_replies([{connection_reply, Msg}], State0);
+ undefined ->
+ State0
+ end,
+ {next_state, StateName, next_packet(State)};
+
+handle_event({reply_request, success, ChannelId}, StateName,
+ #state{connection_state =
+ #connection{channel_cache = Cache}} = State0) ->
+ State = case ssh_channel:cache_lookup(Cache, ChannelId) of
+ #channel{remote_id = RemoteId} ->
+ Msg = ssh_connection:channel_success_msg(RemoteId),
+ send_replies([{connection_reply, Msg}], State0);
+ undefined ->
+ State0
+ end,
{next_state, StateName, State};
handle_event({request, ChannelPid, ChannelId, Type, Data}, StateName, State0) ->
@@ -675,8 +707,62 @@ handle_event({unknown, Data}, StateName, State) ->
sockname]} | {channel_info, channel_id(), [recv_window |
send_window]} |
{close, channel_id()} | stop, term(), state_name(), #state{})
- -> gen_fsm_state_return().
+ -> gen_fsm_sync_return().
%%--------------------------------------------------------------------
+handle_sync_event(get_print_info, _From, StateName, State) ->
+ Reply =
+ try
+ {inet:sockname(State#state.socket),
+ inet:peername(State#state.socket)
+ }
+ of
+ {{ok,Local}, {ok,Remote}} -> {{Local,Remote},io_lib:format("statename=~p",[StateName])};
+ _ -> {{"-",0},"-"}
+ catch
+ _:_ -> {{"?",0},"?"}
+ end,
+ {reply, Reply, StateName, State};
+
+handle_sync_event({connection_info, Options}, _From, StateName, State) ->
+ Info = ssh_info(Options, State, []),
+ {reply, Info, StateName, State};
+
+handle_sync_event({channel_info, ChannelId, Options}, _From, StateName,
+ #state{connection_state = #connection{channel_cache = Cache}} = State) ->
+ case ssh_channel:cache_lookup(Cache, ChannelId) of
+ #channel{} = Channel ->
+ Info = ssh_channel_info(Options, Channel, []),
+ {reply, Info, StateName, State};
+ undefined ->
+ {reply, [], StateName, State}
+ end;
+
+handle_sync_event({info, ChannelPid}, _From, StateName,
+ #state{connection_state =
+ #connection{channel_cache = Cache}} = State) ->
+ Result = ssh_channel:cache_foldl(
+ fun(Channel, Acc) when ChannelPid == all;
+ Channel#channel.user == ChannelPid ->
+ [Channel | Acc];
+ (_, Acc) ->
+ Acc
+ end, [], Cache),
+ {reply, {ok, Result}, StateName, State};
+
+handle_sync_event(stop, _, _StateName, #state{connection_state = Connection0,
+ role = Role} = State0) ->
+ {disconnect, _Reason, {{replies, Replies}, Connection}} =
+ ssh_connection:handle_msg(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_BY_APPLICATION,
+ description = "User closed down connection",
+ language = "en"}, Connection0, Role),
+ State = send_replies(Replies, State0),
+ {stop, normal, ok, State#state{connection_state = Connection}};
+
+
+handle_sync_event(Event, From, StateName, State) when StateName /= connected ->
+ Events = [{sync, Event, From} | State#state.event_queue],
+ {next_state, StateName, State#state{event_queue = Events}};
+
handle_sync_event({request, ChannelPid, ChannelId, Type, Data, Timeout}, From, StateName, State0) ->
{{replies, Replies}, State1} = handle_request(ChannelPid,
ChannelId, Type, Data,
@@ -746,7 +832,9 @@ handle_sync_event({open, ChannelPid, Type, InitialWindowSize, MaxPacketSize, Dat
user = ChannelPid,
local_id = ChannelId,
recv_window_size = InitialWindowSize,
- recv_packet_size = MaxPacketSize},
+ recv_packet_size = MaxPacketSize,
+ send_buf = queue:new()
+ },
ssh_channel:cache_update(Cache, Channel),
State = add_request(true, ChannelId, From, State2),
start_timeout(ChannelId, From, Timeout),
@@ -777,46 +865,6 @@ handle_sync_event({recv_window, ChannelId}, _From, StateName,
end,
{reply, Reply, StateName, next_packet(State)};
-handle_sync_event(get_print_info, _From, StateName, State) ->
- Reply =
- try
- {inet:sockname(State#state.socket),
- inet:peername(State#state.socket)
- }
- of
- {{ok,Local}, {ok,Remote}} -> {{Local,Remote},io_lib:format("statename=~p",[StateName])};
- _ -> {{"-",0},"-"}
- catch
- _:_ -> {{"?",0},"?"}
- end,
- {reply, Reply, StateName, State};
-
-handle_sync_event({connection_info, Options}, _From, StateName, State) ->
- Info = ssh_info(Options, State, []),
- {reply, Info, StateName, State};
-
-handle_sync_event({channel_info, ChannelId, Options}, _From, StateName,
- #state{connection_state = #connection{channel_cache = Cache}} = State) ->
- case ssh_channel:cache_lookup(Cache, ChannelId) of
- #channel{} = Channel ->
- Info = ssh_channel_info(Options, Channel, []),
- {reply, Info, StateName, State};
- undefined ->
- {reply, [], StateName, State}
- end;
-
-handle_sync_event({info, ChannelPid}, _From, StateName,
- #state{connection_state =
- #connection{channel_cache = Cache}} = State) ->
- Result = ssh_channel:cache_foldl(
- fun(Channel, Acc) when ChannelPid == all;
- Channel#channel.user == ChannelPid ->
- [Channel | Acc];
- (_, Acc) ->
- Acc
- end, [], Cache),
- {reply, {ok, Result}, StateName, State};
-
handle_sync_event({close, ChannelId}, _, StateName,
#state{connection_state =
#connection{channel_cache = Cache}} = State0) ->
@@ -831,19 +879,7 @@ handle_sync_event({close, ChannelId}, _, StateName,
undefined ->
State0
end,
- {reply, ok, StateName, next_packet(State)};
-
-handle_sync_event(stop, _, _StateName, #state{connection_state = Connection0,
- role = Role,
- opts = Opts} = State0) ->
- {disconnect, Reason, {{replies, Replies}, Connection}} =
- ssh_connection:handle_msg(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_BY_APPLICATION,
- description = "User closed down connection",
- language = "en"}, Connection0, Role),
- State = send_replies(Replies, State0),
- SSHOpts = proplists:get_value(ssh_opts, Opts),
- disconnect_fun(Reason, SSHOpts),
- {stop, normal, ok, State#state{connection_state = Connection}}.
+ {reply, ok, StateName, next_packet(State)}.
%%--------------------------------------------------------------------
-spec handle_info({atom(), port(), binary()} | {atom(), port()} |
@@ -1131,54 +1167,38 @@ init_ssh(server = Role, Vsn, Version, Options, Socket) ->
supported_host_keys(client, _, Options) ->
try
- case extract_algs(proplists:get_value(pref_public_key_algs, Options, false), []) of
- false ->
- ["ssh-rsa", "ssh-dss"];
- Algs ->
- Algs
+ case proplists:get_value(public_key,
+ proplists:get_value(preferred_algorithms,Options,[])
+ ) of
+ undefined ->
+ ssh_auth:default_public_key_algorithms();
+ L ->
+ L -- (L--ssh_auth:default_public_key_algorithms())
end
+ of
+ [] ->
+ {stop, {shutdown, "No public key algs"}};
+ Algs ->
+ [atom_to_list(A) || A<-Algs]
catch
exit:Reason ->
{stop, {shutdown, Reason}}
end;
supported_host_keys(server, KeyCb, Options) ->
- lists:foldl(fun(Type, Acc) ->
- case available_host_key(KeyCb, Type, Options) of
- {error, _} ->
- Acc;
- Alg ->
- [Alg | Acc]
- end
- end, [],
- %% Prefered alg last so no need to reverse
- ["ssh-dss", "ssh-rsa"]).
-extract_algs(false, _) ->
- false;
-extract_algs([],[]) ->
- false;
-extract_algs([], NewList) ->
- lists:reverse(NewList);
-extract_algs([H|T], NewList) ->
- case H of
- 'ssh-dss' ->
- extract_algs(T, ["ssh-dss"|NewList]);
- 'ssh-rsa' ->
- extract_algs(T, ["ssh-rsa"|NewList])
- end.
-available_host_key(KeyCb, "ssh-dss"= Alg, Opts) ->
- case KeyCb:host_key('ssh-dss', Opts) of
- {ok, _} ->
- Alg;
- Other ->
- Other
- end;
-available_host_key(KeyCb, "ssh-rsa" = Alg, Opts) ->
- case KeyCb:host_key('ssh-rsa', Opts) of
- {ok, _} ->
- Alg;
- Other ->
- Other
- end.
+ Algs=
+ [atom_to_list(A) || A <- proplists:get_value(public_key,
+ proplists:get_value(preferred_algorithms,Options,[]),
+ ssh_auth:default_public_key_algorithms()
+ ),
+ available_host_key(KeyCb, A, Options)
+ ],
+ Algs.
+
+
+%% Alg :: atom()
+available_host_key(KeyCb, Alg, Opts) ->
+ element(1, catch KeyCb:host_key(Alg, Opts)) == ok.
+
send_msg(Msg, #state{socket = Socket, transport_cb = Transport}) ->
Transport:send(Socket, Msg).
@@ -1204,7 +1224,11 @@ sync_send_all_state_event(FsmPid, Event) ->
sync_send_all_state_event(FsmPid, Event, infinity).
sync_send_all_state_event(FsmPid, Event, Timeout) ->
- try gen_fsm:sync_send_all_state_event(FsmPid, Event, Timeout)
+ try gen_fsm:sync_send_all_state_event(FsmPid, Event, Timeout) of
+ {closed, _Channel} ->
+ {error, closed};
+ Result ->
+ Result
catch
exit:{noproc, _} ->
{error, closed};
@@ -1232,10 +1256,9 @@ event(Event, StateName, State) ->
handle_disconnect(DisconnectMsg, State);
throw:{ErrorToDisplay, #ssh_msg_disconnect{} = DisconnectMsg} ->
handle_disconnect(DisconnectMsg, State, ErrorToDisplay);
- _:Error ->
- log_error(Error),
+ _:_ ->
handle_disconnect(#ssh_msg_disconnect{code = error_code(StateName),
- description = "Internal error",
+ description = "Invalid state",
language = "en"}, State)
end.
error_code(key_exchange) ->
@@ -1249,7 +1272,6 @@ generate_event(<<?BYTE(Byte), _/binary>> = Msg, StateName,
#state{
role = Role,
starter = User,
- opts = Opts,
renegotiate = Renegotiation,
connection_state = Connection0} = State0, EncData)
when Byte == ?SSH_MSG_GLOBAL_REQUEST;
@@ -1269,8 +1291,17 @@ generate_event(<<?BYTE(Byte), _/binary>> = Msg, StateName,
ConnectionMsg = ssh_message:decode(Msg),
State1 = generate_event_new_state(State0, EncData),
try ssh_connection:handle_msg(ConnectionMsg, Connection0, Role) of
- {{replies, Replies}, Connection} ->
- State = send_replies(Replies, State1#state{connection_state = Connection}),
+ {{replies, Replies0}, Connection} ->
+ if StateName == connected ->
+ Replies = Replies0,
+ State2 = State1;
+ true ->
+ {ConnReplies, Replies} =
+ lists:splitwith(fun not_connected_filter/1, Replies0),
+ Q = State1#state.event_queue ++ ConnReplies,
+ State2 = State1#state{ event_queue = Q }
+ end,
+ State = send_replies(Replies, State2#state{connection_state = Connection}),
{next_state, StateName, next_packet(State)};
{noreply, Connection} ->
{next_state, StateName, next_packet(State1#state{connection_state = Connection})};
@@ -1280,21 +1311,17 @@ generate_event(<<?BYTE(Byte), _/binary>> = Msg, StateName,
User ! {self(), not_connected, Reason},
{stop, {shutdown, normal},
next_packet(State#state{connection_state = Connection})};
- {disconnect, Reason, {{replies, Replies}, Connection}} ->
+ {disconnect, _Reason, {{replies, Replies}, Connection}} ->
State = send_replies(Replies, State1#state{connection_state = Connection}),
- SSHOpts = proplists:get_value(ssh_opts, Opts),
- disconnect_fun(Reason, SSHOpts),
{stop, {shutdown, normal}, State#state{connection_state = Connection}}
catch
_:Error ->
- {disconnect, Reason, {{replies, Replies}, Connection}} =
+ {disconnect, _Reason, {{replies, Replies}, Connection}} =
ssh_connection:handle_msg(
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_BY_APPLICATION,
description = "Internal error",
language = "en"}, Connection0, Role),
State = send_replies(Replies, State1#state{connection_state = Connection}),
- SSHOpts = proplists:get_value(ssh_opts, Opts),
- disconnect_fun(Reason, SSHOpts),
{stop, {shutdown, Error}, State#state{connection_state = Connection}}
end;
@@ -1443,15 +1470,43 @@ next_packet(#state{socket = Socket} = State) ->
State.
after_new_keys(#state{renegotiate = true} = State) ->
- {connected, State#state{renegotiate = false}};
+ State1 = State#state{renegotiate = false, event_queue = []},
+ lists:foldr(fun after_new_keys_events/2, {next_state, connected, State1}, State#state.event_queue);
after_new_keys(#state{renegotiate = false,
ssh_params = #ssh{role = client} = Ssh0} = State) ->
{Msg, Ssh} = ssh_auth:service_request_msg(Ssh0),
send_msg(Msg, State),
- {userauth, State#state{ssh_params = Ssh}};
+ {next_state, userauth, State#state{ssh_params = Ssh}};
after_new_keys(#state{renegotiate = false,
ssh_params = #ssh{role = server}} = State) ->
- {userauth, State}.
+ {next_state, userauth, State}.
+
+after_new_keys_events({sync, _Event, From}, {stop, _Reason, _StateData}=Terminator) ->
+ gen_fsm:reply(From, {error, closed}),
+ Terminator;
+after_new_keys_events(_, {stop, _Reason, _StateData}=Terminator) ->
+ Terminator;
+after_new_keys_events({sync, Event, From}, {next_state, StateName, StateData}) ->
+ case handle_sync_event(Event, From, StateName, StateData) of
+ {reply, Reply, NextStateName, NewStateData} ->
+ gen_fsm:reply(From, Reply),
+ {next_state, NextStateName, NewStateData};
+ {next_state, NextStateName, NewStateData}->
+ {next_state, NextStateName, NewStateData};
+ {stop, Reason, Reply, NewStateData} ->
+ gen_fsm:reply(From, Reply),
+ {stop, Reason, NewStateData}
+ end;
+after_new_keys_events({event, Event}, {next_state, StateName, StateData}) ->
+ case handle_event(Event, StateName, StateData) of
+ {next_state, NextStateName, NewStateData}->
+ {next_state, NextStateName, NewStateData};
+ {stop, Reason, NewStateData} ->
+ {stop, Reason, NewStateData}
+ end;
+after_new_keys_events({connection_reply, _Data} = Reply, {StateName, State}) ->
+ NewState = send_replies([Reply], State),
+ {next_state, StateName, NewState}.
handle_ssh_packet_data(RemainingSshPacketLen, DecData, EncData, StateName,
State) ->
@@ -1513,12 +1568,14 @@ handle_disconnect(#ssh_msg_disconnect{} = DisconnectMsg, State, Error) ->
handle_disconnect(Type, #ssh_msg_disconnect{description = Desc} = Msg, #state{connection_state = Connection0, role = Role} = State0) ->
{disconnect, _, {{replies, Replies}, Connection}} = ssh_connection:handle_msg(Msg, Connection0, Role),
State = send_replies(disconnect_replies(Type, Msg, Replies), State0),
+ disconnect_fun(Desc, State#state.opts),
{stop, {shutdown, Desc}, State#state{connection_state = Connection}}.
handle_disconnect(Type, #ssh_msg_disconnect{description = Desc} = Msg, #state{connection_state = Connection0,
role = Role} = State0, ErrorMsg) ->
{disconnect, _, {{replies, Replies}, Connection}} = ssh_connection:handle_msg(Msg, Connection0, Role),
State = send_replies(disconnect_replies(Type, Msg, Replies), State0),
+ disconnect_fun(Desc, State#state.opts),
{stop, {shutdown, {Desc, ErrorMsg}}, State#state{connection_state = Connection}}.
disconnect_replies(own, Msg, Replies) ->
@@ -1612,6 +1669,11 @@ log_error(Reason) ->
error_logger:error_report(Report),
"Internal error".
+not_connected_filter({connection_reply, _Data}) ->
+ true;
+not_connected_filter(_) ->
+ false.
+
send_replies([], State) ->
State;
send_replies([{connection_reply, Data} | Rest], #state{ssh_params = Ssh0} = State) ->
@@ -1632,6 +1694,8 @@ send_reply({flow_control, Cache, Channel, From, Msg}) ->
send_reply({flow_control, From, Msg}) ->
gen_fsm:reply(From, Msg).
+disconnect_fun({disconnect,Msg}, Opts) ->
+ disconnect_fun(Msg, Opts);
disconnect_fun(_, undefined) ->
ok;
disconnect_fun(Reason, Opts) ->
@@ -1702,10 +1766,19 @@ handshake(Pid, Ref, Timeout) ->
{error, Reason}
after Timeout ->
stop(Pid),
- {error, Timeout}
+ {error, timeout}
end.
start_timeout(_,_, infinity) ->
ok;
start_timeout(Channel, From, Time) ->
erlang:send_after(Time, self(), {timeout, {Channel, From}}).
+
+getopt(Opt, Socket) ->
+ case inet:getopts(Socket, [Opt]) of
+ {ok, [{Opt, Value}]} ->
+ {ok, Value};
+ Other ->
+ {error, {unexpected_getopts_return, Other}}
+ end.
+
diff --git a/lib/ssh/src/ssh_info.erl b/lib/ssh/src/ssh_info.erl
index 9ed598b3ab..9c79d773a7 100644
--- a/lib/ssh/src/ssh_info.erl
+++ b/lib/ssh/src/ssh_info.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -27,18 +27,21 @@
-compile(export_all).
print() ->
+ print(user).
+
+print(D) ->
try supervisor:which_children(ssh_sup)
of
_ ->
- io:nl(),
- print_general(),
- io:nl(),
- underline("Client part", $=),
- print_clients(),
- io:nl(),
- underline("Server part", $=),
- print_servers(),
- io:nl(),
+ io:nl(D),
+ print_general(D),
+ io:nl(D),
+ underline(D, "Client part", $=),
+ print_clients(D),
+ io:nl(D),
+ underline(D, "Server part", $=),
+ print_servers(D),
+ io:nl(D),
%% case os:type() of
%% {unix,_} ->
%% io:nl(),
@@ -50,90 +53,95 @@ print() ->
%% catch io:format(os:cmd("netstat -tpn"));
%% _ -> ok
%% end,
- underline("Supervisors", $=),
- walk_sups(ssh_sup),
- io:nl()
+ underline(D, "Supervisors", $=),
+ walk_sups(D, ssh_sup),
+ io:nl(D)
catch
_:_ ->
- io:format("Ssh not found~n",[])
+ io:format(D,"Ssh not found~n",[])
end.
%%%================================================================
-print_general() ->
+print_general(D) ->
{_Name, Slogan, Ver} = lists:keyfind(ssh,1,application:which_applications()),
- underline(io_lib:format("~s ~s", [Slogan, Ver]), $=),
- io:format('This printout is generated ~s. ~n',[datetime()]).
+ underline(D, io_lib:format("~s ~s", [Slogan, Ver]), $=),
+ io:format(D, 'This printout is generated ~s. ~n',[datetime()]).
%%%================================================================
-print_clients() ->
+print_clients(D) ->
+ PrintClient = fun(X) -> print_client(D,X) end,
try
- lists:foreach(fun print_client/1, supervisor:which_children(sshc_sup))
+ lists:foreach(PrintClient, supervisor:which_children(sshc_sup))
catch
C:E ->
- io:format('***FAILED: ~p:~p~n',[C,E])
+ io:format(D, '***FAILED: ~p:~p~n',[C,E])
end.
-print_client({undefined,Pid,supervisor,[ssh_connection_handler]}) ->
+print_client(D, {undefined,Pid,supervisor,[ssh_connection_handler]}) ->
{{Local,Remote},_Str} = ssh_connection_handler:get_print_info(Pid),
- io:format(" Local=~s Remote=~s~n",[fmt_host_port(Local),fmt_host_port(Remote)]);
-print_client(Other) ->
- io:format(" [[Other 1: ~p]]~n",[Other]).
+ io:format(D, " Local=~s Remote=~s~n",[fmt_host_port(Local),fmt_host_port(Remote)]);
+print_client(D, Other) ->
+ io:format(D, " [[Other 1: ~p]]~n",[Other]).
%%%================================================================
-print_servers() ->
+print_servers(D) ->
+ PrintServer = fun(X) -> print_server(D,X) end,
try
- lists:foreach(fun print_server/1, supervisor:which_children(sshd_sup))
+ lists:foreach(PrintServer, supervisor:which_children(sshd_sup))
catch
C:E ->
- io:format('***FAILED: ~p:~p~n',[C,E])
+ io:format(D, '***FAILED: ~p:~p~n',[C,E])
end.
-print_server({{server,ssh_system_sup,LocalHost,LocalPort},Pid,supervisor,[ssh_system_sup]}) when is_pid(Pid) ->
- io:format('Local=~s (~p children)~n',[fmt_host_port({LocalHost,LocalPort}),
- ssh_acceptor:number_of_connections(Pid)]),
- lists:foreach(fun print_system_sup/1, supervisor:which_children(Pid));
-print_server(Other) ->
- io:format(" [[Other 2: ~p]]~n",[Other]).
+print_server(D, {{server,ssh_system_sup,LocalHost,LocalPort},Pid,supervisor,[ssh_system_sup]}) when is_pid(Pid) ->
+ io:format(D, 'Local=~s (~p children)~n',[fmt_host_port({LocalHost,LocalPort}),
+ ssh_acceptor:number_of_connections(Pid)]),
+ PrintSystemSup = fun(X) -> print_system_sup(D,X) end,
+ lists:foreach(PrintSystemSup, supervisor:which_children(Pid));
+print_server(D, Other) ->
+ io:format(D, " [[Other 2: ~p]]~n",[Other]).
-print_system_sup({Ref,Pid,supervisor,[ssh_subsystem_sup]}) when is_reference(Ref),
+print_system_sup(D, {Ref,Pid,supervisor,[ssh_subsystem_sup]}) when is_reference(Ref),
is_pid(Pid) ->
- lists:foreach(fun print_channels/1, supervisor:which_children(Pid));
-print_system_sup({{ssh_acceptor_sup,LocalHost,LocalPort}, Pid,supervisor, [ssh_acceptor_sup]}) when is_pid(Pid) ->
- io:format(" [Acceptor for ~s]~n",[fmt_host_port({LocalHost,LocalPort})]);
-print_system_sup(Other) ->
- io:format(" [[Other 3: ~p]]~n",[Other]).
-
-print_channels({{server,ssh_channel_sup,_,_},Pid,supervisor,[ssh_channel_sup]}) when is_pid(Pid) ->
- lists:foreach(fun print_channel/1, supervisor:which_children(Pid));
-print_channels(Other) ->
- io:format(" [[Other 4: ~p]]~n",[Other]).
-
-
-print_channel({Ref,Pid,worker,[ssh_channel]}) when is_reference(Ref),
- is_pid(Pid) ->
+ PrintChannels = fun(X) -> print_channels(D,X) end,
+ lists:foreach(PrintChannels, supervisor:which_children(Pid));
+print_system_sup(D, {{ssh_acceptor_sup,LocalHost,LocalPort}, Pid,supervisor, [ssh_acceptor_sup]}) when is_pid(Pid) ->
+ io:format(D, " [Acceptor for ~s]~n",[fmt_host_port({LocalHost,LocalPort})]);
+print_system_sup(D, Other) ->
+ io:format(D, " [[Other 3: ~p]]~n",[Other]).
+
+print_channels(D, {{server,ssh_channel_sup,_,_},Pid,supervisor,[ssh_channel_sup]}) when is_pid(Pid) ->
+ PrintChannel = fun(X) -> print_channel(D,X) end,
+ lists:foreach(PrintChannel, supervisor:which_children(Pid));
+print_channels(D, Other) ->
+ io:format(D, " [[Other 4: ~p]]~n",[Other]).
+
+
+print_channel(D, {Ref,Pid,worker,[ssh_channel]}) when is_reference(Ref),
+ is_pid(Pid) ->
{{ConnManager,ChannelID}, Str} = ssh_channel:get_print_info(Pid),
{{Local,Remote},StrM} = ssh_connection_handler:get_print_info(ConnManager),
- io:format(' ch ~p: ~s ~s',[ChannelID, StrM, Str]),
- io:format(" Local=~s Remote=~s~n",[fmt_host_port(Local),fmt_host_port(Remote)]);
-print_channel(Other) ->
- io:format(" [[Other 5: ~p]]~n",[Other]).
+ io:format(D, ' ch ~p: ~s ~s',[ChannelID, StrM, Str]),
+ io:format(D, " Local=~s Remote=~s~n",[fmt_host_port(Local),fmt_host_port(Remote)]);
+print_channel(D, Other) ->
+ io:format(D, " [[Other 5: ~p]]~n",[Other]).
%%%================================================================
-define(inc(N), (N+4)).
-walk_sups(StartPid) ->
- io:format("Start at ~p, ~s.~n",[StartPid,dead_or_alive(StartPid)]),
- walk_sups(children(StartPid), _Indent=?inc(0)).
+walk_sups(D, StartPid) ->
+ io:format(D, "Start at ~p, ~s.~n",[StartPid,dead_or_alive(StartPid)]),
+ walk_sups(D, children(StartPid), _Indent=?inc(0)).
-walk_sups([H={_,Pid,SupOrWorker,_}|T], Indent) ->
- indent(Indent), io:format('~200p ~p is ~s~n',[H,Pid,dead_or_alive(Pid)]),
+walk_sups(D, [H={_,Pid,SupOrWorker,_}|T], Indent) ->
+ indent(D, Indent), io:format(D, '~200p ~p is ~s~n',[H,Pid,dead_or_alive(Pid)]),
case SupOrWorker of
- supervisor -> walk_sups(children(Pid), ?inc(Indent));
+ supervisor -> walk_sups(D, children(Pid), ?inc(Indent));
_ -> ok
end,
- walk_sups(T, Indent);
-walk_sups([], _) ->
+ walk_sups(D, T, Indent);
+walk_sups(_D, [], _) ->
ok.
dead_or_alive(Name) when is_atom(Name) ->
@@ -149,7 +157,7 @@ dead_or_alive(Pid) when is_pid(Pid) ->
_ -> "alive"
end.
-indent(I) -> io:format('~*c',[I,$ ]).
+indent(D, I) -> io:format(D,'~*c',[I,$ ]).
children(Pid) ->
Parent = self(),
@@ -166,20 +174,20 @@ children(Pid) ->
end.
%%%================================================================
-underline(Str) ->
- underline(Str, $-).
+underline(D, Str) ->
+ underline(D, Str, $-).
-underline(Str, LineChar) ->
+underline(D, Str, LineChar) ->
Len = lists:flatlength(Str),
- io:format('~s~n',[Str]),
- line(Len,LineChar).
+ io:format(D, '~s~n',[Str]),
+ line(D,Len,LineChar).
-line(Len, Char) ->
- io:format('~*c~n', [Len,Char]).
+line(D, Len, Char) ->
+ io:format(D, '~*c~n', [Len,Char]).
datetime() ->
- {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(now()),
+ {{YYYY,MM,DD}, {H,M,S}} = calendar:now_to_universal_time(erlang:timestamp()),
lists:flatten(io_lib:format('~4w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w UTC',[YYYY,MM,DD, H,M,S])).
@@ -188,6 +196,6 @@ fmt_host_port({Host,Port}) -> io_lib:format('~s:~p',[Host,Port]).
-nyi() ->
- io:format('Not yet implemented~n',[]),
+nyi(D) ->
+ io:format(D,'Not yet implemented~n',[]),
nyi.
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index 613f8f25b2..bab688f226 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -111,7 +111,7 @@ start_channel(Cm, Opts) when is_pid(Cm) ->
TimeOut
end;
{error, Reason} ->
- {error, Reason};
+ {error, format_channel_start_error(Reason)};
ignore ->
{error, ignore}
end;
@@ -136,7 +136,7 @@ start_channel(Host, Port, Opts) ->
TimeOut
end;
{error, Reason} ->
- {error, Reason};
+ {error, format_channel_start_error(Reason)};
ignore ->
{error, ignore}
end;
@@ -491,9 +491,9 @@ init([Cm, ChannelId, Options]) ->
inf = new_inf(),
opts = Options}};
failure ->
- {stop, "server failed to start sftp subsystem"};
+ {stop, {shutdown, "server failed to start sftp subsystem"}};
Error ->
- {stop, Error}
+ {stop, {shutdown, Error}}
end.
%%--------------------------------------------------------------------
@@ -508,12 +508,12 @@ init([Cm, ChannelId, Options]) ->
%%--------------------------------------------------------------------
handle_call({{timeout, infinity}, wait_for_version_negotiation}, From,
#state{xf = #ssh_xfer{vsn = undefined} = Xf} = State) ->
- {noreply, State#state{xf = Xf#ssh_xfer{vsn = From}}};
+ {noreply, State#state{xf = Xf#ssh_xfer{vsn = {wait, From, undefined}}}};
handle_call({{timeout, Timeout}, wait_for_version_negotiation}, From,
#state{xf = #ssh_xfer{vsn = undefined} = Xf} = State) ->
- timer:send_after(Timeout, {timeout, undefined, From}),
- {noreply, State#state{xf = Xf#ssh_xfer{vsn = From}}};
+ TRef = erlang:send_after(Timeout, self(), {timeout, undefined, From}),
+ {noreply, State#state{xf = Xf#ssh_xfer{vsn = {wait, From, TRef}}}};
handle_call({_, wait_for_version_negotiation}, _, State) ->
{reply, ok, State};
@@ -865,7 +865,12 @@ do_handle_reply(#state{xf = Xf} = State,
case Xf#ssh_xfer.vsn of
undefined ->
ok;
- From ->
+ {wait, From, TRef} ->
+ if is_reference(TRef) ->
+ erlang:cancel_timer(TRef);
+ true ->
+ ok
+ end,
ssh_channel:reply(From, ok)
end,
State#state{xf = Xf#ssh_xfer{vsn = Version, ext = Ext}, rep_buf = Rest};
@@ -1412,3 +1417,8 @@ open_buf1(Pid, BufInfo0, FileOpTimeout, CryptoState, ChunkSize) ->
BufHandle = make_ref(),
call(Pid, {put_bufinf,BufHandle,BufInfo}, FileOpTimeout),
{ok,BufHandle}.
+
+format_channel_start_error({shutdown, Reason}) ->
+ Reason;
+format_channel_start_error(Reason) ->
+ Reason.
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index 52665635f0..04ae6b11e2 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -559,56 +559,73 @@ stat(ReqId, RelPath, State0=#state{file_handler=FileMod,
send_status({error, E}, ReqId, State1)
end.
-decode_4_open_flag(create_new) ->
- [write];
-decode_4_open_flag(create_truncate) ->
- [write];
-decode_4_open_flag(truncate_existing) ->
- [write];
-decode_4_open_flag(open_existing) ->
- [read].
-
-decode_4_flags([OpenFlag | Flags]) ->
- decode_4_flags(Flags, decode_4_open_flag(OpenFlag)).
-
-decode_4_flags([], Flags) ->
- Flags;
-decode_4_flags([append_data|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([append_data_atomic|R], _Flags) ->
- decode_4_flags(R, [append]);
-decode_4_flags([_|R], Flags) ->
- decode_4_flags(R, Flags).
-
-decode_4_access_flag(read_data) ->
- [read];
-decode_4_access_flag(list_directory) ->
- [read];
-decode_4_access_flag(write_data) ->
- [write];
-decode_4_access_flag(add_file) ->
- [write];
-decode_4_access_flag(add_subdirectory) ->
- [read];
-decode_4_access_flag(append_data) ->
- [append];
-decode_4_access_flag(write_attributes) ->
- [write];
-decode_4_access_flag(_) ->
- [read].
-
-decode_4_acess([_ | _] = Flags) ->
+sftp_to_erlang_flag(read, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(write, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(append, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ append;
+sftp_to_erlang_flag(creat, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(trunc, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ write;
+sftp_to_erlang_flag(excl, Vsn) when Vsn == 3;
+ Vsn == 4 ->
+ read;
+sftp_to_erlang_flag(create_new, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(create_truncate, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(open_existing, Vsn) when Vsn > 4 ->
+ read;
+sftp_to_erlang_flag(open_or_create, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(truncate_existing, Vsn) when Vsn > 4 ->
+ write;
+sftp_to_erlang_flag(append_data, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(append_data_atomic, Vsn) when Vsn > 4 ->
+ append;
+sftp_to_erlang_flag(_, _) ->
+ read.
+
+sftp_to_erlang_flags(Flags, Vsn) ->
lists:map(fun(Flag) ->
- [decode_4_access_flag(Flag)]
- end, Flags);
-decode_4_acess([]) ->
- [].
+ sftp_to_erlang_flag(Flag, Vsn)
+ end, Flags).
+
+sftp_to_erlang_access_flag(read_data, _) ->
+ read;
+sftp_to_erlang_access_flag(list_directory, _) ->
+ read;
+sftp_to_erlang_access_flag(write_data, _) ->
+ write;
+sftp_to_erlang_access_flag(append_data, _) ->
+ append;
+sftp_to_erlang_access_flag(add_subdirectory, _) ->
+ read;
+sftp_to_erlang_access_flag(add_file, _) ->
+ write;
+sftp_to_erlang_access_flag(write_attributes, _) ->
+ write;
+sftp_to_erlang_access_flag(_, _) ->
+ read.
+sftp_to_erlang_access_flags(Flags, Vsn) ->
+ lists:map(fun(Flag) ->
+ sftp_to_erlang_access_flag(Flag, Vsn)
+ end, Flags).
open(Vsn, ReqId, Data, State) when Vsn =< 3 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(PFlags),
_Attrs/binary>> = Data,
Path = unicode:characters_to_list(BPath),
- Flags = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn)),
do_open(ReqId, State, Path, Flags);
open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
<<?UINT32(BLen), BPath:BLen/binary, ?UINT32(Access),
@@ -616,15 +633,12 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
Path = unicode:characters_to_list(BPath),
FlagBits = ssh_xfer:decode_open_flags(Vsn, PFlags),
AcessBits = ssh_xfer:decode_ace_mask(Access),
- %% TODO: This is to make sure the Access flags are not ignored
- %% but this should be thought through better. This solution should
- %% be considered a hack in order to buy some time. At least
- %% it works better than when the Access flags where totally ignored.
- %% A better solution may need some code refactoring that we do
- %% not have time for right now.
- AcessFlags = decode_4_acess(AcessBits),
- Flags = lists:append(lists:umerge(
- [[decode_4_flags(FlagBits)] | AcessFlags])),
+ %% TODO: There are still flags that are not
+ %% fully handled as SSH_FXF_ACCESS_TEXT_MODE and
+ %% a lot a ACE flags, the later we may not need
+ %% to understand as they are NFS flags
+ AcessFlags = sftp_to_erlang_access_flags(AcessBits, Vsn),
+ Flags = lists:usort(sftp_to_erlang_flags(FlagBits, Vsn) ++ AcessFlags),
do_open(ReqId, State, Path, Flags).
do_open(ReqId, State0, Path, Flags) ->
diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl
index 76fa776113..ea9bca2390 100644
--- a/lib/ssh/src/ssh_transport.erl
+++ b/lib/ssh/src/ssh_transport.erl
@@ -31,6 +31,8 @@
-export([versions/2, hello_version_msg/1]).
-export([next_seqnum/1, decrypt_first_block/2, decrypt_blocks/3,
+ supported_algorithms/0, supported_algorithms/1,
+ default_algorithms/0, default_algorithms/1,
is_valid_mac/3,
handle_hello_version/1,
key_exchange_init_msg/1,
@@ -42,14 +44,98 @@
unpack/3, decompress/2, ssh_packet/2, pack/2, msg_data/1,
sign/3, verify/4]).
+%%%----------------------------------------------------------------------------
+%%%
+%%% There is a difference between supported and default algorithms. The
+%%% SUPPORTED algorithms can be handled (maybe untested...). The DEFAULT ones
+%%% are announced in ssh_msg_kexinit and in ssh:default_algorithms/0 to the
+%%% user.
+%%%
+%%% A supported algorithm can be requested in the option 'preferred_algorithms',
+%%% but may give unexpected results because of being promoted to default.
+%%%
+%%% This makes it possible to add experimental algorithms (in supported_algorithms)
+%%% and test them without letting the default users know about them.
+%%%
+
+default_algorithms() -> [{K,default_algorithms(K)} || K <- algo_classes()].
+
+algo_classes() -> [kex, public_key, cipher, mac, compression].
+
+default_algorithms(compression) ->
+ %% Do not announce '[email protected]' because there seem to be problems
+ supported_algorithms(compression, same(['[email protected]']));
+default_algorithms(Alg) ->
+ supported_algorithms(Alg).
+
+
+supported_algorithms() -> [{K,supported_algorithms(K)} || K <- algo_classes()].
+
+supported_algorithms(kex) ->
+ ['diffie-hellman-group1-sha1'];
+supported_algorithms(public_key) ->
+ ssh_auth:default_public_key_algorithms();
+supported_algorithms(cipher) ->
+ Supports = crypto:supports(),
+ CipherAlgos = [{aes_ctr, 'aes128-ctr'}, {aes_cbc128, 'aes128-cbc'}, {des3_cbc, '3des-cbc'}],
+ Algs = [SshAlgo ||
+ {CryptoAlgo, SshAlgo} <- CipherAlgos,
+ lists:member(CryptoAlgo, proplists:get_value(ciphers, Supports, []))],
+ same(Algs);
+supported_algorithms(mac) ->
+ Supports = crypto:supports(),
+ HashAlgos = [{sha256, 'hmac-sha2-256'}, {sha, 'hmac-sha1'}],
+ Algs = [SshAlgo ||
+ {CryptoAlgo, SshAlgo} <- HashAlgos,
+ lists:member(CryptoAlgo, proplists:get_value(hashs, Supports, []))],
+ same(Algs);
+supported_algorithms(compression) ->
+ same(['none','zlib','[email protected]']).
+
+
+supported_algorithms(Key, [{client2server,BL1},{server2client,BL2}]) ->
+ [{client2server,As1},{server2client,As2}] = supported_algorithms(Key),
+ [{client2server,As1--BL1},{server2client,As2--BL2}];
+supported_algorithms(Key, BlackList) ->
+ supported_algorithms(Key) -- BlackList.
+
+
+
+
+same(Algs) -> [{client2server,Algs}, {server2client,Algs}].
+
+
+%%%----------------------------------------------------------------------------
versions(client, Options)->
Vsn = proplists:get_value(vsn, Options, ?DEFAULT_CLIENT_VERSION),
- Version = format_version(Vsn),
- {Vsn, Version};
+ {Vsn, format_version(Vsn, software_version(Options))};
versions(server, Options) ->
Vsn = proplists:get_value(vsn, Options, ?DEFAULT_SERVER_VERSION),
- Version = format_version(Vsn),
- {Vsn, Version}.
+ {Vsn, format_version(Vsn, software_version(Options))}.
+
+software_version(Options) ->
+ case proplists:get_value(id_string, Options) of
+ undefined ->
+ "Erlang"++ssh_vsn();
+ {random,Nlo,Nup} ->
+ random_id(Nlo,Nup);
+ ID ->
+ ID
+ end.
+
+ssh_vsn() ->
+ try {ok,L} = application:get_all_key(ssh),
+ proplists:get_value(vsn,L,"")
+ of
+ "" -> "";
+ VSN when is_list(VSN) -> "/" ++ VSN;
+ _ -> ""
+ catch
+ _:_ -> ""
+ end.
+
+random_id(Nlo, Nup) ->
+ [crypto:rand_uniform($a,$z+1) || _<- lists:duplicate(crypto:rand_uniform(Nlo,Nup+1),x) ].
hello_version_msg(Data) ->
[Data,"\r\n"].
@@ -77,9 +163,9 @@ is_valid_mac(Mac, Data, #ssh{recv_mac = Algorithm,
yes_no(Ssh, Prompt) ->
(Ssh#ssh.io_cb):yes_no(Prompt, Ssh).
-format_version({Major,Minor}) ->
+format_version({Major,Minor}, SoftwareVersion) ->
"SSH-" ++ integer_to_list(Major) ++ "." ++
- integer_to_list(Minor) ++ "-Erlang".
+ integer_to_list(Minor) ++ "-" ++ SoftwareVersion.
handle_hello_version(Version) ->
try
@@ -106,62 +192,45 @@ key_exchange_init_msg(Ssh0) ->
kex_init(#ssh{role = Role, opts = Opts, available_host_keys = HostKeyAlgs}) ->
Random = ssh_bits:random(16),
- Compression = case proplists:get_value(compression, Opts, none) of
- openssh_zlib -> ["[email protected]", "none"];
- zlib -> ["zlib", "none"];
- none -> ["none", "zlib"]
- end,
- kexinit_messsage(Role, Random, Compression, HostKeyAlgs).
+ PrefAlgs =
+ case proplists:get_value(preferred_algorithms,Opts) of
+ undefined ->
+ default_algorithms();
+ Algs0 ->
+ Algs0
+ end,
+ kexinit_message(Role, Random, PrefAlgs, HostKeyAlgs).
key_init(client, Ssh, Value) ->
Ssh#ssh{c_keyinit = Value};
key_init(server, Ssh, Value) ->
Ssh#ssh{s_keyinit = Value}.
-available_ssh_algos() ->
- Supports = crypto:supports(),
- CipherAlgos = [{aes_ctr, "aes128-ctr"}, {aes_cbc128, "aes128-cbc"}, {des3_cbc, "3des-cbc"}],
- Ciphers = [SshAlgo ||
- {CryptoAlgo, SshAlgo} <- CipherAlgos,
- lists:member(CryptoAlgo, proplists:get_value(ciphers, Supports, []))],
- HashAlgos = [{sha256, "hmac-sha2-256"}, {sha, "hmac-sha1"}],
- Hashs = [SshAlgo ||
- {CryptoAlgo, SshAlgo} <- HashAlgos,
- lists:member(CryptoAlgo, proplists:get_value(hashs, Supports, []))],
- {Ciphers, Hashs}.
-
-kexinit_messsage(client, Random, Compression, HostKeyAlgs) ->
- {CipherAlgs, HashAlgs} = available_ssh_algos(),
- #ssh_msg_kexinit{
- cookie = Random,
- kex_algorithms = ["diffie-hellman-group1-sha1"],
- server_host_key_algorithms = HostKeyAlgs,
- encryption_algorithms_client_to_server = CipherAlgs,
- encryption_algorithms_server_to_client = CipherAlgs,
- mac_algorithms_client_to_server = HashAlgs,
- mac_algorithms_server_to_client = HashAlgs,
- compression_algorithms_client_to_server = Compression,
- compression_algorithms_server_to_client = Compression,
- languages_client_to_server = [],
- languages_server_to_client = []
- };
-kexinit_messsage(server, Random, Compression, HostKeyAlgs) ->
- {CipherAlgs, HashAlgs} = available_ssh_algos(),
+kexinit_message(_Role, Random, Algs, HostKeyAlgs) ->
#ssh_msg_kexinit{
cookie = Random,
- kex_algorithms = ["diffie-hellman-group1-sha1"],
+ kex_algorithms = to_strings( get_algs(kex,Algs) ),
server_host_key_algorithms = HostKeyAlgs,
- encryption_algorithms_client_to_server = CipherAlgs,
- encryption_algorithms_server_to_client = CipherAlgs,
- mac_algorithms_client_to_server = HashAlgs,
- mac_algorithms_server_to_client = HashAlgs,
- compression_algorithms_client_to_server = Compression,
- compression_algorithms_server_to_client = Compression,
+ encryption_algorithms_client_to_server = c2s(cipher,Algs),
+ encryption_algorithms_server_to_client = s2c(cipher,Algs),
+ mac_algorithms_client_to_server = c2s(mac,Algs),
+ mac_algorithms_server_to_client = s2c(mac,Algs),
+ compression_algorithms_client_to_server = c2s(compression,Algs),
+ compression_algorithms_server_to_client = s2c(compression,Algs),
languages_client_to_server = [],
languages_server_to_client = []
}.
+c2s(Key, Algs) -> x2y(client2server, Key, Algs).
+s2c(Key, Algs) -> x2y(server2client, Key, Algs).
+
+x2y(DirectionKey, Key, Algs) -> to_strings(proplists:get_value(DirectionKey, get_algs(Key,Algs))).
+
+get_algs(Key, Algs) -> proplists:get_value(Key, Algs, default_algorithms(Key)).
+
+to_strings(L) -> lists:map(fun erlang:atom_to_list/1, L).
+
new_keys_message(Ssh0) ->
{SshPacket, Ssh} =
ssh_packet(#ssh_msg_newkeys{}, Ssh0),
@@ -218,20 +287,30 @@ key_exchange_first_msg('diffie-hellman-group-exchange-sha1', Ssh0) ->
handle_kexdh_init(#ssh_msg_kexdh_init{e = E}, Ssh0) ->
{G, P} = dh_group1(),
- {Private, Public} = dh_gen_key(G, P, 1024),
- K = ssh_math:ipow(E, Private, P),
- Key = get_host_key(Ssh0),
- H = kex_h(Ssh0, Key, E, Public, K),
- H_SIG = sign_host_key(Ssh0, Key, H),
- {SshPacket, Ssh1} = ssh_packet(#ssh_msg_kexdh_reply{public_host_key = Key,
- f = Public,
- h_sig = H_SIG
- }, Ssh0),
-
- {ok, SshPacket, Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}},
- shared_secret = K,
- exchanged_hash = H,
- session_id = sid(Ssh1, H)}}.
+ if
+ 1=<E, E=<(P-1) ->
+ {Private, Public} = dh_gen_key(G, P, 1024),
+ K = ssh_math:ipow(E, Private, P),
+ Key = get_host_key(Ssh0),
+ H = kex_h(Ssh0, Key, E, Public, K),
+ H_SIG = sign_host_key(Ssh0, Key, H),
+ {SshPacket, Ssh1} = ssh_packet(#ssh_msg_kexdh_reply{public_host_key = Key,
+ f = Public,
+ h_sig = H_SIG
+ }, Ssh0),
+
+ {ok, SshPacket, Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}},
+ shared_secret = K,
+ exchanged_hash = H,
+ session_id = sid(Ssh1, H)}};
+ true ->
+ Error = {error,bad_e_from_peer},
+ Disconnect = #ssh_msg_disconnect{
+ code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
+ description = "Key exchange failed, 'f' out of bounds",
+ language = "en"},
+ throw({Error, Disconnect})
+ end.
handle_kex_dh_gex_group(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0) ->
{Private, Public} = dh_gen_key(G,P,1024),
@@ -255,7 +334,7 @@ handle_new_keys(#ssh_msg_newkeys{}, Ssh0) ->
%% %% Select algorithms
handle_kexdh_reply(#ssh_msg_kexdh_reply{public_host_key = HostKey, f = F,
h_sig = H_SIG},
- #ssh{keyex_key = {{Private, Public}, {_G, P}}} = Ssh0) ->
+ #ssh{keyex_key = {{Private, Public}, {_G, P}}} = Ssh0) when 1=<F, F=<(P-1)->
K = ssh_math:ipow(F, Private, P),
H = kex_h(Ssh0, HostKey, Public, F, K),
@@ -271,7 +350,15 @@ handle_kexdh_reply(#ssh_msg_kexdh_reply{public_host_key = HostKey, f = F,
description = "Key exchange failed",
language = "en"},
throw({Error, Disconnect})
- end.
+ end;
+handle_kexdh_reply(#ssh_msg_kexdh_reply{}, _SSH) ->
+ Error = {error,bad_f_from_peer},
+ Disconnect = #ssh_msg_disconnect{
+ code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
+ description = "Key exchange failed, 'f' out of bounds",
+ language = "en"},
+ throw({Error, Disconnect}).
+
handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request{min = _Min,
n = _NBits,
@@ -408,6 +495,7 @@ select_algorithm(Role, Client, Server) ->
decompress = Decompression,
c_lng = C_Lng,
s_lng = S_Lng},
+%%ct:pal("~p~n Client=~p~n Server=~p~n Alg=~p~n",[Role,Client,Server,Alg]),
{ok, Alg}.
select_encrypt_decrypt(client, Client, Server) ->
@@ -497,10 +585,15 @@ alg_final(SSH0) ->
{ok,SSH6} = decompress_final(SSH5),
SSH6.
-select_all(CL, SL) ->
+select_all(CL, SL) when length(CL) + length(SL) < 50 ->
A = CL -- SL, %% algortihms only used by client
%% algorithms used by client and server (client pref)
- lists:map(fun(ALG) -> list_to_atom(ALG) end, (CL -- A)).
+ lists:map(fun(ALG) -> list_to_atom(ALG) end, (CL -- A));
+select_all(_CL, _SL) ->
+ throw(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
+ description = "Too many algorithms",
+ language = "en"}).
+
select([], []) ->
none;
diff --git a/lib/ssh/test/Makefile b/lib/ssh/test/Makefile
index 740dbd0235..39b2f57d26 100644
--- a/lib/ssh/test/Makefile
+++ b/lib/ssh/test/Makefile
@@ -40,7 +40,8 @@ MODULES= \
ssh_connection_SUITE \
ssh_echo_server \
ssh_peername_sockname_server \
- ssh_test_cli
+ ssh_test_cli \
+ ssh_relay
HRL_FILES_NEEDED_IN_TEST= \
$(ERL_TOP)/lib/ssh/src/ssh.hrl \
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 415cb9fc9c..406c0c071e 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -23,12 +23,14 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/inet.hrl").
+-include_lib("kernel/include/file.hrl").
%% Note: This directive should only be used in test suites.
-compile(export_all).
-define(NEWLINE, <<"\r\n">>).
+-define(REKEY_DATA_TMO, 65000).
%%--------------------------------------------------------------------
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
@@ -44,12 +46,27 @@ all() ->
{group, dsa_pass_key},
{group, rsa_pass_key},
{group, internal_error},
+ {group, renegotiate},
daemon_already_started,
server_password_option,
server_userpassword_option,
+ {group, dir_options},
double_close,
ssh_connect_timeout,
ssh_connect_arg4_timeout,
+ packet_size_zero,
+ ssh_daemon_minimal_remote_max_packet_size_option,
+ ssh_msg_debug_fun_option_client,
+ ssh_msg_debug_fun_option_server,
+ disconnectfun_option_server,
+ disconnectfun_option_client,
+ preferred_algorithms,
+ id_string_no_opt_client,
+ id_string_own_string_client,
+ id_string_random_client,
+ id_string_no_opt_server,
+ id_string_own_string_server,
+ id_string_random_server,
{group, hardening_tests}
].
@@ -59,6 +76,7 @@ groups() ->
{dsa_pass_key, [], [pass_phrase]},
{rsa_pass_key, [], [pass_phrase]},
{internal_error, [], [internal_error]},
+ {renegotiate, [], [rekey, rekey_limit, renegotiate1, renegotiate2]},
{hardening_tests, [], [ssh_connect_nonegtimeout_connected_parallel,
ssh_connect_nonegtimeout_connected_sequential,
ssh_connect_negtimeout_parallel,
@@ -67,19 +85,21 @@ groups() ->
max_sessions_ssh_connect_sequential,
max_sessions_sftp_start_channel_parallel,
max_sessions_sftp_start_channel_sequential
- ]}
+ ]},
+ {dir_options, [], [user_dir_option,
+ system_dir_option]}
].
basic_tests() ->
[send, close, peername_sockname,
exec, exec_compressed, shell, cli, known_hosts,
- idle_time, rekey, openssh_zlib_basic_test,
- misc_ssh_options, inet_option].
+ idle_time, openssh_zlib_basic_test, misc_ssh_options, inet_option].
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+ catch crypto:stop(),
case catch crypto:start() of
ok ->
Config;
@@ -118,6 +138,30 @@ init_per_group(internal_error, Config) ->
ssh_test_lib:setup_dsa(DataDir, PrivDir),
file:delete(filename:join(PrivDir, "system/ssh_host_dsa_key")),
Config;
+init_per_group(dir_options, Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ %% Make unreadable dir:
+ Dir_unreadable = filename:join(PrivDir, "unread"),
+ ok = file:make_dir(Dir_unreadable),
+ {ok,F1} = file:read_file_info(Dir_unreadable),
+ ok = file:write_file_info(Dir_unreadable,
+ F1#file_info{mode = F1#file_info.mode band (bnot 8#00444)}),
+ %% Make readable file:
+ File_readable = filename:join(PrivDir, "file"),
+ ok = file:write_file(File_readable, <<>>),
+ %% Check:
+ case {file:read_file_info(Dir_unreadable),
+ file:read_file_info(File_readable)} of
+ {{ok, #file_info{type=directory, access=Md}},
+ {ok, #file_info{type=regular, access=Mf}}} when Md=/=read, Md=/=read_write ->
+ %% Save:
+ [{unreadable_dir, Dir_unreadable},
+ {readable_file, File_readable}
+ | Config];
+ X ->
+ ct:log("#file_info : ~p",[X]),
+ {skip, "File or dir mode settings failed"}
+ end;
init_per_group(_, Config) ->
Config.
@@ -277,7 +321,7 @@ exec_compressed(Config) when is_list(Config) ->
UserDir = ?config(priv_dir, Config),
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},{user_dir, UserDir},
- {compression, zlib},
+ {preferred_algorithms,[{compression, [zlib]}]},
{failfun, fun ssh_test_lib:failfun/2}]),
ConnectionRef =
@@ -323,25 +367,175 @@ idle_time(Config) ->
rekey() ->
[{doc, "Idle timeout test"}].
rekey(Config) ->
- SystemDir = filename:join(?config(priv_dir, Config), system),
+ SystemDir = ?config(data_dir, Config),
UserDir = ?config(priv_dir, Config),
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
- {user_dir, UserDir},
+ {user_dir, UserDir},
{failfun, fun ssh_test_lib:failfun/2},
+ {user_passwords,
+ [{"simon", "says"}]},
{rekey_limit, 0}]),
+
ConnectionRef =
ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
{user_dir, UserDir},
+ {user, "simon"},
+ {password, "says"},
{user_interaction, false},
{rekey_limit, 0}]),
receive
- after 200000 ->
+ after ?REKEY_DATA_TMO ->
%%By this time rekeying would have been done
ssh:close(ConnectionRef),
ssh:stop_daemon(Pid)
end.
%%--------------------------------------------------------------------
+rekey_limit() ->
+ [{doc, "Test rekeying by data volume"}].
+rekey_limit(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ UserDir = ?config(priv_dir, Config),
+ DataFile = filename:join(UserDir, "rekey.data"),
+
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords,
+ [{"simon", "says"}]}]),
+ {ok, SftpPid, ConnectionRef} =
+ ssh_sftp:start_channel(Host, Port, [{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user, "simon"},
+ {password, "says"},
+ {rekey_limit, 2500},
+ {user_interaction, false},
+ {silently_accept_hosts, true}]),
+
+ Kex1 = get_kex_init(ConnectionRef),
+
+ ct:sleep(?REKEY_DATA_TMO),
+ Kex1 = get_kex_init(ConnectionRef),
+
+ Data = lists:duplicate(9000,1),
+ ok = ssh_sftp:write_file(SftpPid, DataFile, Data),
+
+ ct:sleep(?REKEY_DATA_TMO),
+ Kex2 = get_kex_init(ConnectionRef),
+
+ false = (Kex2 == Kex1),
+
+ ct:sleep(?REKEY_DATA_TMO),
+ Kex2 = get_kex_init(ConnectionRef),
+
+ ok = ssh_sftp:write_file(SftpPid, DataFile, "hi\n"),
+
+ ct:sleep(?REKEY_DATA_TMO),
+ Kex2 = get_kex_init(ConnectionRef),
+
+ false = (Kex2 == Kex1),
+
+ ct:sleep(?REKEY_DATA_TMO),
+ Kex2 = get_kex_init(ConnectionRef),
+
+
+ ssh_sftp:stop_channel(SftpPid),
+ ssh:close(ConnectionRef),
+ ssh:stop_daemon(Pid).
+
+%%--------------------------------------------------------------------
+renegotiate1() ->
+ [{doc, "Test rekeying with simulataneous send request"}].
+renegotiate1(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ UserDir = ?config(priv_dir, Config),
+ DataFile = filename:join(UserDir, "renegotiate1.data"),
+
+ {Pid, Host, DPort} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords,
+ [{"simon", "says"}]}]),
+ RPort = ssh_test_lib:inet_port(),
+
+ {ok,RelayPid} = ssh_relay:start_link({0,0,0,0}, RPort, Host, DPort),
+
+ {ok, SftpPid, ConnectionRef} =
+ ssh_sftp:start_channel(Host, RPort, [{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user, "simon"},
+ {password, "says"},
+ {user_interaction, false},
+ {silently_accept_hosts, true}]),
+
+ Kex1 = get_kex_init(ConnectionRef),
+
+ {ok, Handle} = ssh_sftp:open(SftpPid, DataFile, [write]),
+
+ ok = ssh_sftp:write(SftpPid, Handle, "hi\n"),
+
+ ssh_relay:hold(RelayPid, rx, 20, 1000),
+ ssh_connection_handler:renegotiate(ConnectionRef),
+ spawn(fun() -> ok=ssh_sftp:write(SftpPid, Handle, "another hi\n") end),
+
+ ct:sleep(2000),
+
+ Kex2 = get_kex_init(ConnectionRef),
+
+ false = (Kex2 == Kex1),
+
+ ssh_relay:stop(RelayPid),
+ ssh_sftp:stop_channel(SftpPid),
+ ssh:close(ConnectionRef),
+ ssh:stop_daemon(Pid).
+
+%%--------------------------------------------------------------------
+renegotiate2() ->
+ [{doc, "Test rekeying with inflight messages from peer"}].
+renegotiate2(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ UserDir = ?config(priv_dir, Config),
+ DataFile = filename:join(UserDir, "renegotiate1.data"),
+
+ {Pid, Host, DPort} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords,
+ [{"simon", "says"}]}]),
+ RPort = ssh_test_lib:inet_port(),
+
+ {ok,RelayPid} = ssh_relay:start_link({0,0,0,0}, RPort, Host, DPort),
+
+ {ok, SftpPid, ConnectionRef} =
+ ssh_sftp:start_channel(Host, RPort, [{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user, "simon"},
+ {password, "says"},
+ {user_interaction, false},
+ {silently_accept_hosts, true}]),
+
+ Kex1 = get_kex_init(ConnectionRef),
+
+ {ok, Handle} = ssh_sftp:open(SftpPid, DataFile, [write]),
+
+ ok = ssh_sftp:write(SftpPid, Handle, "hi\n"),
+
+ ssh_relay:hold(RelayPid, rx, 20, infinity),
+ spawn(fun() -> ok=ssh_sftp:write(SftpPid, Handle, "another hi\n") end),
+ %% need a small pause here to ensure ssh_sftp:write is executed
+ ct:sleep(10),
+ ssh_connection_handler:renegotiate(ConnectionRef),
+ ssh_relay:release(RelayPid, rx),
+
+ ct:sleep(2000),
+
+ Kex2 = get_kex_init(ConnectionRef),
+
+ false = (Kex2 == Kex1),
+
+ ssh_relay:stop(RelayPid),
+ ssh_sftp:stop_channel(SftpPid),
+ ssh:close(ConnectionRef),
+ ssh:stop_daemon(Pid).
+
+%%--------------------------------------------------------------------
shell() ->
[{doc, "Test that ssh:shell/2 works"}].
shell(Config) when is_list(Config) ->
@@ -486,6 +680,205 @@ server_userpassword_option(Config) when is_list(Config) ->
ssh:stop_daemon(Pid).
%%--------------------------------------------------------------------
+system_dir_option(Config) ->
+ DirUnread = proplists:get_value(unreadable_dir,Config),
+ FileRead = proplists:get_value(readable_file,Config),
+
+ case ssh_test_lib:daemon([{system_dir, DirUnread}]) of
+ {error,{eoptions,{{system_dir,DirUnread},eacces}}} ->
+ ok;
+ {Pid1,_Host1,Port1} when is_pid(Pid1),is_integer(Port1) ->
+ ssh:stop_daemon(Pid1),
+ ct:fail("Didn't detect that dir is unreadable", [])
+ end,
+
+ case ssh_test_lib:daemon([{system_dir, FileRead}]) of
+ {error,{eoptions,{{system_dir,FileRead},enotdir}}} ->
+ ok;
+ {Pid2,_Host2,Port2} when is_pid(Pid2),is_integer(Port2) ->
+ ssh:stop_daemon(Pid2),
+ ct:fail("Didn't detect that option is a plain file", [])
+ end.
+
+
+user_dir_option(Config) ->
+ DirUnread = proplists:get_value(unreadable_dir,Config),
+ FileRead = proplists:get_value(readable_file,Config),
+ %% Any port will do (beware, implementation knowledge!):
+ Port = 65535,
+
+ case ssh:connect("localhost", Port, [{user_dir, DirUnread}]) of
+ {error,{eoptions,{{user_dir,DirUnread},eacces}}} ->
+ ok;
+ {error,econnrefused} ->
+ ct:fail("Didn't detect that dir is unreadable", [])
+ end,
+
+ case ssh:connect("localhost", Port, [{user_dir, FileRead}]) of
+ {error,{eoptions,{{user_dir,FileRead},enotdir}}} ->
+ ok;
+ {error,econnrefused} ->
+ ct:fail("Didn't detect that option is a plain file", [])
+ end.
+
+%%--------------------------------------------------------------------
+ssh_msg_debug_fun_option_client() ->
+ [{doc, "validate client that uses the 'ssh_msg_debug_fun' option"}].
+ssh_msg_debug_fun_option_client(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ SysDir = ?config(data_dir, Config),
+
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
+ {user_dir, UserDir},
+ {password, "morot"},
+ {failfun, fun ssh_test_lib:failfun/2}]),
+ Parent = self(),
+ DbgFun = fun(ConnRef,Displ,Msg,Lang) -> Parent ! {msg_dbg,{ConnRef,Displ,Msg,Lang}} end,
+
+ ConnectionRef =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user, "foo"},
+ {password, "morot"},
+ {user_dir, UserDir},
+ {user_interaction, false},
+ {ssh_msg_debug_fun,DbgFun}]),
+ %% Beware, implementation knowledge:
+ gen_fsm:send_all_state_event(ConnectionRef,{ssh_msg_debug,false,<<"Hello">>,<<>>}),
+ receive
+ {msg_dbg,X={ConnectionRef,false,<<"Hello">>,<<>>}} ->
+ ct:log("Got expected dbg msg ~p",[X]),
+ ssh:stop_daemon(Pid);
+ {msg_dbg,X={_,false,<<"Hello">>,<<>>}} ->
+ ct:log("Got dbg msg but bad ConnectionRef (~p expected) ~p",[ConnectionRef,X]),
+ ssh:stop_daemon(Pid),
+ {fail, "Bad ConnectionRef received"};
+ {msg_dbg,X} ->
+ ct:log("Got bad dbg msg ~p",[X]),
+ ssh:stop_daemon(Pid),
+ {fail,"Bad msg received"}
+ after 1000 ->
+ ssh:stop_daemon(Pid),
+ {fail,timeout}
+ end.
+
+%%--------------------------------------------------------------------
+ssh_msg_debug_fun_option_server() ->
+ [{doc, "validate client that uses the 'ssh_msg_debug_fun' option"}].
+ssh_msg_debug_fun_option_server(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ SysDir = ?config(data_dir, Config),
+
+ Parent = self(),
+ DbgFun = fun(ConnRef,Displ,Msg,Lang) -> Parent ! {msg_dbg,{ConnRef,Displ,Msg,Lang}} end,
+ ConnFun = fun(_,_,_) -> Parent ! {connection_pid,self()} end,
+
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
+ {user_dir, UserDir},
+ {password, "morot"},
+ {failfun, fun ssh_test_lib:failfun/2},
+ {connectfun, ConnFun},
+ {ssh_msg_debug_fun, DbgFun}]),
+ _ConnectionRef =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user, "foo"},
+ {password, "morot"},
+ {user_dir, UserDir},
+ {user_interaction, false}]),
+ receive
+ {connection_pid,Server} ->
+ %% Beware, implementation knowledge:
+ gen_fsm:send_all_state_event(Server,{ssh_msg_debug,false,<<"Hello">>,<<>>}),
+ receive
+ {msg_dbg,X={_,false,<<"Hello">>,<<>>}} ->
+ ct:log("Got expected dbg msg ~p",[X]),
+ ssh:stop_daemon(Pid);
+ {msg_dbg,X} ->
+ ct:log("Got bad dbg msg ~p",[X]),
+ ssh:stop_daemon(Pid),
+ {fail,"Bad msg received"}
+ after 3000 ->
+ ssh:stop_daemon(Pid),
+ {fail,timeout2}
+ end
+ after 3000 ->
+ ssh:stop_daemon(Pid),
+ {fail,timeout1}
+ end.
+
+%%--------------------------------------------------------------------
+disconnectfun_option_server(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ SysDir = ?config(data_dir, Config),
+
+ Parent = self(),
+ DisConnFun = fun(Reason) -> Parent ! {disconnect,Reason} end,
+
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
+ {user_dir, UserDir},
+ {password, "morot"},
+ {failfun, fun ssh_test_lib:failfun/2},
+ {disconnectfun, DisConnFun}]),
+ ConnectionRef =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user, "foo"},
+ {password, "morot"},
+ {user_dir, UserDir},
+ {user_interaction, false}]),
+ ssh:close(ConnectionRef),
+ receive
+ {disconnect,Reason} ->
+ ct:log("Server detected disconnect: ~p",[Reason]),
+ ssh:stop_daemon(Pid),
+ ok
+ after 3000 ->
+ receive
+ X -> ct:log("received ~p",[X])
+ after 0 -> ok
+ end,
+ {fail,"Timeout waiting for disconnect"}
+ end.
+
+%%--------------------------------------------------------------------
+disconnectfun_option_client(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ SysDir = ?config(data_dir, Config),
+
+ Parent = self(),
+ DisConnFun = fun(Reason) -> Parent ! {disconnect,Reason} end,
+
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
+ {user_dir, UserDir},
+ {password, "morot"},
+ {failfun, fun ssh_test_lib:failfun/2}]),
+ _ConnectionRef =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user, "foo"},
+ {password, "morot"},
+ {user_dir, UserDir},
+ {user_interaction, false},
+ {disconnectfun, DisConnFun}]),
+ ssh:stop_daemon(Pid),
+ receive
+ {disconnect,Reason} ->
+ ct:log("Client detected disconnect: ~p",[Reason]),
+ ok
+ after 3000 ->
+ receive
+ X -> ct:log("received ~p",[X])
+ after 0 -> ok
+ end,
+ {fail,"Timeout waiting for disconnect"}
+ end.
+
+%%--------------------------------------------------------------------
known_hosts() ->
[{doc, "check that known_hosts is updated correctly"}].
known_hosts(Config) when is_list(Config) ->
@@ -715,7 +1108,7 @@ ssh_connect_arg4_timeout(_Config) ->
%% try to connect with a timeout, but "supervise" it
Client = spawn(fun() ->
- T0 = now(),
+ T0 = erlang:monotonic_time(),
Rc = ssh:connect("localhost",Port,[],Timeout),
ct:log("Client ssh:connect got ~p",[Rc]),
Parent ! {done,self(),Rc,T0}
@@ -723,16 +1116,22 @@ ssh_connect_arg4_timeout(_Config) ->
%% Wait for client reaction on the connection try:
receive
- {done, Client, {error,_E}, T0} ->
- Msp = ms_passed(T0, now()),
+ {done, Client, {error,timeout}, T0} ->
+ Msp = ms_passed(T0),
exit(Server,hasta_la_vista___baby),
Low = 0.9*Timeout,
High = 1.1*Timeout,
- ct:log("Timeout limits: ~p--~p, timeout was ~p, expected ~p",[Low,High,Msp,Timeout]),
+ ct:log("Timeout limits: ~.4f - ~.4f ms, timeout "
+ "was ~.4f ms, expected ~p ms",[Low,High,Msp,Timeout]),
if
Low<Msp, Msp<High -> ok;
true -> {fail, "timeout not within limits"}
end;
+
+ {done, Client, {error,Other}, _T0} ->
+ ct:log("Error message \"~p\" from the client is unexpected.",[{error,Other}]),
+ {fail, "Unexpected error message"};
+
{done, Client, {ok,_Ref}, _T0} ->
{fail,"ssh-connected ???"}
after
@@ -742,13 +1141,187 @@ ssh_connect_arg4_timeout(_Config) ->
{fail, "Didn't timeout"}
end.
+%% Help function, elapsed milliseconds since T0
+ms_passed(T0) ->
+ %% OTP 18
+ erlang:convert_time_unit(erlang:monotonic_time() - T0,
+ native,
+ micro_seconds) / 1000.
+
+%%--------------------------------------------------------------------
+packet_size_zero(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+
+ {Server, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords, [{"vego", "morot"}]}]),
+ Conn =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user_dir, UserDir},
+ {user_interaction, false},
+ {user, "vego"},
+ {password, "morot"}]),
+
+ {ok,Chan} = ssh_connection:session_channel(Conn, 1000, _MaxPacketSize=0, 60000),
+ ok = ssh_connection:shell(Conn, Chan),
+
+ ssh:close(Conn),
+ ssh:stop_daemon(Server),
+
+ receive
+ {ssh_cm,Conn,{data,Chan,_Type,_Msg1}} = M ->
+ ct:pal("Got ~p",[M]),
+ ct:fail(doesnt_obey_max_packet_size_0)
+ after 5000 ->
+ ok
+ end.
+
+%%--------------------------------------------------------------------
+ssh_daemon_minimal_remote_max_packet_size_option(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+
+ {Server, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords, [{"vego", "morot"}]},
+ {failfun, fun ssh_test_lib:failfun/2},
+ {minimal_remote_max_packet_size, 14}]),
+ Conn =
+ ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user_dir, UserDir},
+ {user_interaction, false},
+ {user, "vego"},
+ {password, "morot"}]),
+
+ %% Try the limits of the minimal_remote_max_packet_size:
+ {ok, _ChannelId} = ssh_connection:session_channel(Conn, 100, 14, infinity),
+ {open_error,_,"Maximum packet size below 14 not supported",_} =
+ ssh_connection:session_channel(Conn, 100, 13, infinity),
+
+ ssh:close(Conn),
+ ssh:stop_daemon(Server).
+
+%%--------------------------------------------------------------------
+%% This test try every algorithm by connecting to an Erlang server
+preferred_algorithms(Config) ->
+ SystemDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+
+ {Server, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {user_passwords, [{"vego", "morot"}]},
+ {failfun, fun ssh_test_lib:failfun/2}]),
+ Available = ssh:default_algorithms(),
+ Tests = [[{Tag,[Alg]}] || {Tag, SubAlgs} <- Available,
+ is_atom(hd(SubAlgs)),
+ Alg <- SubAlgs]
+ ++ [[{Tag,[{T1,[A1]},{T2,[A2]}]}] || {Tag, [{T1,As1},{T2,As2}]} <- Available,
+ A1 <- As1,
+ A2 <- As2],
+ ct:log("TESTS: ~p",[Tests]),
+ [connect_exec_channel(Host,Port,PrefAlgs) || PrefAlgs <- Tests],
+ ssh:stop_daemon(Server).
+
+
+connect_exec_channel(_Host, Port, Algs) ->
+ ct:log("Try ~p",[Algs]),
+ ConnectionRef = ssh_test_lib:connect(Port, [{silently_accept_hosts, true},
+ {user_interaction, false},
+ {user, "vego"},
+ {password, "morot"},
+ {preferred_algorithms,Algs}
+ ]),
+ chan_exec(ConnectionRef, "2*21.", <<"42\n">>),
+ ssh:close(ConnectionRef).
+
+chan_exec(ConnectionRef, Cmnd, Expected) ->
+ {ok, ChannelId0} = ssh_connection:session_channel(ConnectionRef, infinity),
+ success = ssh_connection:exec(ConnectionRef, ChannelId0,Cmnd, infinity),
+ Data0 = {ssh_cm, ConnectionRef, {data, ChannelId0, 0, Expected}},
+ case ssh_test_lib:receive_exec_result(Data0) of
+ expected ->
+ ssh_test_lib:receive_exec_end(ConnectionRef, ChannelId0);
+ {unexpected_msg,{ssh_cm, ConnectionRef, {exit_status, ChannelId0, 0}}
+ = ExitStatus0} ->
+ ct:pal("0: Collected data ~p", [ExitStatus0]),
+ ssh_test_lib:receive_exec_result(Data0,
+ ConnectionRef, ChannelId0);
+ Other0 ->
+ ct:fail(Other0)
+ end.
+
+%%--------------------------------------------------------------------
+id_string_no_opt_client(Config) ->
+ {Server, _Host, Port} = fake_daemon(Config),
+ {error,_} = ssh:connect("localhost", Port, [], 1000),
+ receive
+ {id,Server,"SSH-2.0-Erlang/"++Vsn} ->
+ true = expected_ssh_vsn(Vsn);
+ {id,Server,Other} ->
+ ct:fail("Unexpected id: ~s.",[Other])
+ after 5000 ->
+ {fail,timeout}
+ end.
+
+%%--------------------------------------------------------------------
+id_string_own_string_client(Config) ->
+ {Server, _Host, Port} = fake_daemon(Config),
+ {error,_} = ssh:connect("localhost", Port, [{id_string,"Pelle"}], 1000),
+ receive
+ {id,Server,"SSH-2.0-Pelle\r\n"} ->
+ ok;
+ {id,Server,Other} ->
+ ct:fail("Unexpected id: ~s.",[Other])
+ after 5000 ->
+ {fail,timeout}
+ end.
+
+%%--------------------------------------------------------------------
+id_string_random_client(Config) ->
+ {Server, _Host, Port} = fake_daemon(Config),
+ {error,_} = ssh:connect("localhost", Port, [{id_string,random}], 1000),
+ receive
+ {id,Server,Id="SSH-2.0-Erlang"++_} ->
+ ct:fail("Unexpected id: ~s.",[Id]);
+ {id,Server,Rnd="SSH-2.0-"++_} ->
+ ct:log("Got correct ~s",[Rnd]);
+ {id,Server,Id} ->
+ ct:fail("Unexpected id: ~s.",[Id])
+ after 5000 ->
+ {fail,timeout}
+ end.
+
+%%--------------------------------------------------------------------
+id_string_no_opt_server(Config) ->
+ {_Server, Host, Port} = std_daemon(Config, []),
+ {ok,S1}=gen_tcp:connect(Host,Port,[{active,false},{packet,line}]),
+ {ok,"SSH-2.0-Erlang/"++Vsn} = gen_tcp:recv(S1, 0, 2000),
+ true = expected_ssh_vsn(Vsn).
+
+%%--------------------------------------------------------------------
+id_string_own_string_server(Config) ->
+ {_Server, Host, Port} = std_daemon(Config, [{id_string,"Olle"}]),
+ {ok,S1}=gen_tcp:connect(Host,Port,[{active,false},{packet,line}]),
+ {ok,"SSH-2.0-Olle\r\n"} = gen_tcp:recv(S1, 0, 2000).
-%% Help function
-%% N2-N1
-ms_passed(N1={_,_,M1}, N2={_,_,M2}) ->
- {0,{0,Min,Sec}} = calendar:time_difference(calendar:now_to_local_time(N1),
- calendar:now_to_local_time(N2)),
- 1000 * (Min*60 + Sec + (M2-M1)/1000000).
+%%--------------------------------------------------------------------
+id_string_random_server(Config) ->
+ {_Server, Host, Port} = std_daemon(Config, [{id_string,random}]),
+ {ok,S1}=gen_tcp:connect(Host,Port,[{active,false},{packet,line}]),
+ {ok,"SSH-2.0-"++Rnd} = gen_tcp:recv(S1, 0, 2000),
+ case Rnd of
+ "Erlang"++_ -> ct:log("Id=~p",[Rnd]),
+ {fail,got_default_id};
+ "Olle\r\n" -> {fail,got_previous_tests_value};
+ _ -> ct:log("Got ~s.",[Rnd])
+ end.
%%--------------------------------------------------------------------
ssh_connect_negtimeout_parallel(Config) -> ssh_connect_negtimeout(Config,true).
@@ -854,12 +1427,15 @@ openssh_zlib_basic_test(Config) ->
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
{user_dir, UserDir},
+ {preferred_algorithms,[{compression, ['[email protected]']}]},
{failfun, fun ssh_test_lib:failfun/2}]),
ConnectionRef =
ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
{user_dir, UserDir},
{user_interaction, false},
- {compression, openssh_zlib}]),
+ {preferred_algorithms,[{compression, ['[email protected]',
+ none]}]}
+ ]),
ok = ssh:close(ConnectionRef),
ssh:stop_daemon(Pid).
@@ -964,7 +1540,7 @@ max_sessions(Config, ParallelLogin, Connect0) when is_function(Connect0,2) ->
%% Due to timing the error message may or may not be delivered to
%% the "tcp-application" before the socket closed message is recived
-check_error("Internal error") ->
+check_error("Invalid state") ->
ok;
check_error("Connection closed") ->
ok;
@@ -1029,3 +1605,62 @@ do_shell(IO, Shell) ->
%% {'EXIT', Shell, killed} ->
%% ok
%% end.
+
+
+std_daemon(Config, ExtraOpts) ->
+ SystemDir = ?config(data_dir, Config),
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ {_Server, _Host, _Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
+ {user_dir, UserDir},
+ {failfun, fun ssh_test_lib:failfun/2} | ExtraOpts]).
+
+expected_ssh_vsn(Str) ->
+ try
+ {ok,L} = application:get_all_key(ssh),
+ proplists:get_value(vsn,L,"")++"\r\n"
+ of
+ Str -> true;
+ "\r\n" -> true;
+ _ -> false
+ catch
+ _:_ -> true %% ssh not started so we dont't know
+ end.
+
+
+fake_daemon(_Config) ->
+ Parent = self(),
+ %% start the server
+ Server = spawn(fun() ->
+ {ok,Sl} = gen_tcp:listen(0,[{packet,line}]),
+ {ok,{Host,Port}} = inet:sockname(Sl),
+ ct:log("fake_daemon listening on ~p:~p~n",[Host,Port]),
+ Parent ! {sockname,self(),Host,Port},
+ Rsa = gen_tcp:accept(Sl),
+ ct:log("Server gen_tcp:accept got ~p",[Rsa]),
+ {ok,S} = Rsa,
+ receive
+ {tcp, S, Id} -> Parent ! {id,self(),Id}
+ end
+ end),
+ %% Get listening host and port
+ receive
+ {sockname,Server,ServerHost,ServerPort} -> {Server, ServerHost, ServerPort}
+ end.
+
+%% get_kex_init - helper function to get key_exchange_init_msg
+get_kex_init(Conn) ->
+ %% First, validate the key exchange is complete (StateName == connected)
+ {connected,S} = sys:get_state(Conn),
+ %% Next, walk through the elements of the #state record looking
+ %% for the #ssh_msg_kexinit record. This method is robust against
+ %% changes to either record. The KEXINIT message contains a cookie
+ %% unique to each invocation of the key exchange procedure (RFC4253)
+ SL = tuple_to_list(S),
+ case lists:keyfind(ssh_msg_kexinit, 1, SL) of
+ false ->
+ throw(not_found);
+ KexInit ->
+ KexInit
+ end.
diff --git a/lib/ssh/test/ssh_connection_SUITE.erl b/lib/ssh/test/ssh_connection_SUITE.erl
index 85bd2c75d4..f0c337cf2f 100644
--- a/lib/ssh/test/ssh_connection_SUITE.erl
+++ b/lib/ssh/test/ssh_connection_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -21,6 +21,7 @@
-module(ssh_connection_SUITE).
-include_lib("common_test/include/ct.hrl").
+-include_lib("ssh/src/ssh_connect.hrl").
-compile(export_all).
@@ -37,7 +38,6 @@
all() ->
[
{group, openssh},
- {group, openssh_payload},
interrupted_send,
start_shell,
start_shell_exec,
@@ -46,7 +46,8 @@ all() ->
gracefull_invalid_start,
gracefull_invalid_long_start,
gracefull_invalid_long_start_no_nl,
- stop_listener
+ stop_listener,
+ start_subsystem_on_closed_channel
].
groups() ->
[{openssh, [], payload() ++ ptty()}].
@@ -64,6 +65,7 @@ ptty() ->
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+ catch crypto:stop(),
case catch crypto:start() of
ok ->
Config;
@@ -75,12 +77,13 @@ end_per_suite(_Config) ->
crypto:stop().
%%--------------------------------------------------------------------
-init_per_group(openssh, _Config) ->
+init_per_group(openssh, Config) ->
case gen_tcp:connect("localhost", 22, []) of
{error,econnrefused} ->
{skip,"No openssh deamon"};
{ok, Socket} ->
- gen_tcp:close(Socket)
+ gen_tcp:close(Socket),
+ ssh_test_lib:openssh_sanity_check(Config)
end;
init_per_group(_, Config) ->
Config.
@@ -92,7 +95,7 @@ end_per_group(_, Config) ->
init_per_testcase(_TestCase, Config) ->
%% To make sure we start clean as it is not certain that
%% end_per_testcase will be run!
- ssh:stop(),
+ end_per_testcase(Config),
ssh:start(),
Config.
@@ -269,7 +272,7 @@ ptty_alloc(Config) when is_list(Config) ->
{user_interaction, false}]),
{ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity),
success = ssh_connection:ptty_alloc(ConnectionRef, ChannelId,
- [{term, default_term()}, {width, 70}, {high, 20}]),
+ [{term, os:getenv("TERM", ?DEFAULT_TERMINAL)}, {width, 70}, {height, 20}]),
ssh:close(ConnectionRef).
@@ -282,7 +285,7 @@ ptty_alloc_pixel(Config) when is_list(Config) ->
{user_interaction, false}]),
{ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity),
success = ssh_connection:ptty_alloc(ConnectionRef, ChannelId,
- [{term, default_term()}, {pixel_widh, 630}, {pixel_hight, 470}]),
+ [{term, os:getenv("TERM", ?DEFAULT_TERMINAL)}, {pixel_widh, 630}, {pixel_hight, 470}]),
ssh:close(ConnectionRef).
%%--------------------------------------------------------------------
@@ -576,6 +579,31 @@ stop_listener(Config) when is_list(Config) ->
ct:fail({unexpected, Error})
end.
+start_subsystem_on_closed_channel(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ UserDir = filename:join(PrivDir, nopubkey), % to make sure we don't use public-key-auth
+ file:make_dir(UserDir),
+ SysDir = ?config(data_dir, Config),
+ {Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SysDir},
+ {user_dir, UserDir},
+ {password, "morot"},
+ {subsystems, [{"echo_n", {ssh_echo_server, [4000000]}}]}]),
+
+ ConnectionRef = ssh_test_lib:connect(Host, Port, [{silently_accept_hosts, true},
+ {user, "foo"},
+ {password, "morot"},
+ {user_interaction, false},
+ {user_dir, UserDir}]),
+
+ {ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity),
+
+ ok = ssh_connection:close(ConnectionRef, ChannelId),
+
+ {error, closed} = ssh_connection:subsystem(ConnectionRef, ChannelId, "echo_n", infinity),
+
+ ssh:close(ConnectionRef),
+ ssh:stop_daemon(Pid).
+
%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
@@ -622,11 +650,3 @@ ssh_exec(Cmd) ->
spawn(fun() ->
io:format(Cmd ++ "\n")
end).
-
-default_term() ->
- case os:getenv("TERM") of
- false ->
- "vt100";
- Str when is_list(Str)->
- Str
- end.
diff --git a/lib/ssh/test/ssh_relay.erl b/lib/ssh/test/ssh_relay.erl
new file mode 100644
index 0000000000..a4f2bad2e2
--- /dev/null
+++ b/lib/ssh/test/ssh_relay.erl
@@ -0,0 +1,407 @@
+%%%-------------------------------------------------------------------
+%%% @author Simon Cornish <[email protected]>
+%%% @copyright (C) 2015, Simon Cornish
+%%% @doc
+%%% Provide manipulatable TCP-level relaying for testing SSH
+%%% @end
+%%% Created : 7 May 2015 by Simon Cornish <[email protected]>
+%%%-------------------------------------------------------------------
+-module(ssh_relay).
+
+-behaviour(gen_server).
+
+%% API
+-export([start_link/4]).
+-export([stop/1]).
+-export([hold/4, release/2, release_next/3]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(hold, {
+ port,
+ n,
+ tmo,
+ tref,
+ q = []
+ }).
+
+-record(state, {
+ local_addr,
+ local_port,
+ peer_addr,
+ peer_port,
+ lpid,
+ local,
+ peer,
+ tx_hold,
+ rx_hold
+ }).
+
+-define(ACCEPT_TMO, 200).
+%%%===================================================================
+%%% API
+%%%===================================================================
+%%--------------------------------------------------------------------
+%% @doc
+%% Hold N (or 'all') messages in given direction.
+%% Messages will be released after the N+1th message or
+%% Tmo ms or 'infinity'
+%%
+%% Dir is 'tx' for direction local -> peer
+%% and 'rx' for direction peer -> local
+%%
+%% An Error, ealready, is returned if there is already a hold
+%% in the given direction
+%%
+%% @spec hold(Srv, Dir, N, Tmo) -> ok | {error, Error}
+%% @end
+%%--------------------------------------------------------------------
+hold(Srv, Dir, N, Tmo) ->
+ gen_server:call(Srv, {hold, Dir, N, Tmo}).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Release all held messages in given direction.
+%%
+%% An Error, enoent, is returned if there is no hold
+%% in the given direction
+%%
+%% @spec release(Srv, Dir) -> ok | {error, Error}
+%% @end
+%%--------------------------------------------------------------------
+release(Srv, Dir) ->
+ gen_server:call(Srv, {release, Dir}).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Release all held messages in given direction after the
+%% next message in the trigger direction
+%%
+%% An Error, enoent, is returned if there is no hold
+%% in the given direction
+%%
+%% @spec release_next(Srv, Dir, TriggerDir) -> ok | {error, Error}
+%% @end
+%%--------------------------------------------------------------------
+release_next(Srv, Dir, TriggerDir) ->
+ gen_server:call(Srv, {release_next, Dir, TriggerDir}).
+
+%%--------------------------------------------------------------------
+%% @doc
+%% Starts the server
+%%
+%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
+%% @end
+%%--------------------------------------------------------------------
+start_link(ListenAddr, ListenPort, PeerAddr, PeerPort) ->
+ gen_server:start_link(?MODULE, [ListenAddr, ListenPort, PeerAddr, PeerPort], []).
+
+stop(Srv) ->
+ unlink(Srv),
+ Srv ! stop.
+
+%%%===================================================================
+%%% gen_server callbacks
+%%%===================================================================
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Initializes the server
+%%
+%% @spec init(Args) -> {ok, State} |
+%% {ok, State, Timeout} |
+%% ignore |
+%% {stop, Reason}
+%% @end
+%%--------------------------------------------------------------------
+init([ListenAddr, ListenPort, PeerAddr, PeerPort | Options]) ->
+ IfAddr = case ListenAddr of
+ {0,0,0,0} ->
+ [];
+ _ ->
+ [{ifaddr, ListenAddr}]
+ end,
+ case gen_tcp:listen(ListenPort, [{reuseaddr, true}, {backlog, 1}, {active, false}, binary | IfAddr]) of
+ {ok, LSock} ->
+ Parent = self(),
+ {LPid, _LMod} = spawn_monitor(fun() -> listen(Parent, LSock) end),
+ S = #state{local_addr = ListenAddr,
+ local_port = ListenPort,
+ lpid = LPid,
+ peer_addr = PeerAddr,
+ peer_port = PeerPort
+ },
+ {ok, S};
+ Error ->
+ {stop, Error}
+ end.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Handling call messages
+%%
+%% @spec handle_call(Request, From, State) ->
+%% {reply, Reply, State} |
+%% {reply, Reply, State, Timeout} |
+%% {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, Reply, State} |
+%% {stop, Reason, State}
+%% @end
+%%--------------------------------------------------------------------
+handle_call({hold, Dir, N, Tmo}, _From, State) ->
+ case Dir of
+ tx ->
+ do_hold(#state.tx_hold, State#state.peer, N, Tmo, State);
+ rx ->
+ do_hold(#state.rx_hold, State#state.local, N, Tmo, State);
+ _ ->
+ {reply, {error, einval}, State}
+ end;
+handle_call({release, Dir}, _From, State) ->
+ case Dir of
+ tx ->
+ do_release(#state.tx_hold, State);
+ rx ->
+ do_release(#state.rx_hold, State);
+ _ ->
+ {reply, {error, einval}, State}
+ end;
+handle_call({release_next, _Dir, _TriggerDir}, _From, State) ->
+ {reply, {error, nyi}, State};
+
+handle_call(Request, _From, State) ->
+ Reply = {unhandled, Request},
+ {reply, Reply, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Handling cast messages
+%%
+%% @spec handle_cast(Msg, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% @end
+%%--------------------------------------------------------------------
+handle_cast(_Msg, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Handling all non call/cast messages
+%%
+%% @spec handle_info(Info, State) -> {noreply, State} |
+%% {noreply, State, Timeout} |
+%% {stop, Reason, State}
+%% @end
+%%--------------------------------------------------------------------
+handle_info({tcp, Local, Data}, S) when S#state.local == Local ->
+ S1 = do_local(Data, S),
+ {noreply, S1};
+
+handle_info({tcp_error, Local, Error}, S) when S#state.local == Local ->
+ S1 = do_local({error, Error}, S),
+ {noreply, S1};
+
+handle_info({tcp_closed, Local}, S) when S#state.local == Local ->
+ S1 = do_local(closed, S),
+ {noreply, S1};
+
+handle_info({tcp, Peer, Data}, S) when S#state.peer == Peer ->
+ S1 = do_peer(Data, S),
+ {noreply, S1};
+
+handle_info({tcp_error, Peer, Error}, S) when S#state.peer == Peer ->
+ S1 = do_peer({error, Error}, S),
+ {noreply, S1};
+
+handle_info({tcp_closed, Peer}, S) when S#state.peer == Peer ->
+ S1 = do_peer(closed, S),
+ {noreply, S1};
+
+handle_info({accept, Local}, S) ->
+ S1 = do_accept(Local, S),
+ {noreply, S1};
+
+handle_info({activate, Local}, State) ->
+ inet:setopts(Local, [{active, true}]),
+ {noreply, State};
+
+handle_info({release, Pos}, S) ->
+ {reply, _, S1} = do_release(Pos,S),
+ {noreply, S1};
+
+handle_info(stop, State) ->
+ {stop, normal, State};
+
+handle_info({'DOWN', _Ref, _process, LPid, Reason}, S) when S#state.lpid == LPid ->
+ io:format("Acceptor has finished: ~p~n", [Reason]),
+ {noreply, S};
+
+handle_info(_Info, State) ->
+ io:format("Unhandled info: ~p~n", [_Info]),
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% This function is called by a gen_server when it is about to
+%% terminate. It should be the opposite of Module:init/1 and do any
+%% necessary cleaning up. When it returns, the gen_server terminates
+%% with Reason. The return value is ignored.
+%%
+%% @spec terminate(Reason, State) -> void()
+%% @end
+%%--------------------------------------------------------------------
+terminate(_Reason, _State) ->
+ ok.
+
+%%--------------------------------------------------------------------
+%% @private
+%% @doc
+%% Convert process state when code is changed
+%%
+%% @spec code_change(OldVsn, State, Extra) -> {ok, NewState}
+%% @end
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%%===================================================================
+%%% Internal functions
+%%%===================================================================
+do_hold(Pos, _Port, _N, _Tmo, S) when element(Pos, S) /= undefined ->
+ {reply, {error, ealready}, S};
+do_hold(Pos, Port, N, Tmo, S) ->
+ TRef = if is_integer(Tmo) andalso Tmo > 0 ->
+ erlang:send_after(Tmo, self(), {release, Pos});
+ true ->
+ undefined
+ end,
+ Hold = #hold{port = Port, n = N, tmo = Tmo, tref = TRef},
+ {reply, ok, setelement(Pos, S, Hold)}.
+
+do_release(HPos, S) when element(HPos, S) == undefined ->
+ {reply, {error, enoent}, S};
+do_release(HPos, S) ->
+ #hold{port = Port, tref = TRef, q = Q} = element(HPos, S),
+ lists:foreach(fun(M) -> gen_tcp:send(Port, M), erlang:yield() end, Q),
+ catch erlang:cancel_timer(TRef),
+ receive
+ {release, HPos} -> ok
+ after 0 ->
+ ok
+ end,
+ {reply, ok, setelement(HPos, S, undefined)}.
+
+listen(Parent, LSock) ->
+ monitor(process, Parent),
+ do_listen(Parent, LSock).
+
+do_listen(Parent, LSock) ->
+ %% So annoying there is no select-like sematic for this
+ case gen_tcp:accept(LSock, ?ACCEPT_TMO) of
+ {ok, Sock} ->
+ Parent ! {accept, Sock},
+ gen_tcp:controlling_process(Sock, Parent),
+ Parent ! {activate, Sock},
+ do_flush(Parent, Sock),
+ gen_tcp:close(LSock);
+ {error, timeout} ->
+ receive
+ DOWN when element(1, DOWN) == 'DOWN' ->
+ ok;
+ stop ->
+ ok
+ after 1 ->
+ do_listen(Parent, LSock)
+ end;
+ Error ->
+ gen_tcp:close(LSock),
+ exit({accept,Error})
+ end.
+
+do_flush(Parent, Sock) ->
+ receive
+ {Tcp, Sock, _} = Msg when Tcp == tcp; Tcp == tcp_error ->
+ Parent ! Msg,
+ do_flush(Parent, Sock);
+ {tcp_closed, Sock} = Msg ->
+ Parent ! Msg,
+ do_flush(Parent, Sock)
+ after 1 ->
+ ok
+ end.
+
+do_accept(Local, S) ->
+ case gen_tcp:connect(S#state.peer_addr, S#state.peer_port, [{active, true}, binary]) of
+ {ok, Peer} ->
+ S#state{local = Local, peer = Peer};
+ Error ->
+ exit({connect, Error})
+ end.
+
+do_local(Data, S) when is_binary(Data) ->
+ TxH = S#state.tx_hold,
+ if TxH == undefined ->
+ gen_tcp:send(S#state.peer, Data),
+ S;
+ TxH#hold.n == 0 ->
+ lists:foreach(fun(M) -> gen_tcp:send(S#state.peer, M) end, TxH#hold.q),
+ gen_tcp:send(S#state.peer, Data),
+ catch erlang:cancel_timer(TxH#hold.tref),
+ TxP = #state.tx_hold,
+ receive
+ {release, TxP} ->
+ ok
+ after 0 ->
+ ok
+ end,
+ S#state{tx_hold = undefined};
+ true ->
+ Q = TxH#hold.q ++ [Data],
+ N = if is_integer(TxH#hold.n) ->
+ TxH#hold.n -1;
+ true ->
+ TxH#hold.n
+ end,
+ S#state{tx_hold = TxH#hold{q = Q, n = N}}
+ end;
+do_local(Error, _S) ->
+ exit({local, Error}).
+
+do_peer(Data, S) when is_binary(Data) ->
+ RxH = S#state.rx_hold,
+ if RxH == undefined ->
+ gen_tcp:send(S#state.local, Data),
+ S;
+ RxH#hold.n == 0 ->
+ lists:foreach(fun(M) -> gen_tcp:send(S#state.local, M) end, RxH#hold.q),
+ gen_tcp:send(S#state.local, Data),
+ catch erlang:cancel_timer(RxH#hold.tref),
+ RxP = #state.rx_hold,
+ receive
+ {release, RxP} ->
+ ok
+ after 0 ->
+ ok
+ end,
+ S#state{rx_hold = undefined};
+ true ->
+ Q = RxH#hold.q ++ [Data],
+ N = if is_integer(RxH#hold.n) ->
+ RxH#hold.n -1;
+ true ->
+ RxH#hold.n
+ end,
+ S#state{rx_hold = RxH#hold{q = Q, n = N}}
+ end;
+do_peer(Error, _S) ->
+ exit({peer, Error}).
+
diff --git a/lib/ssh/test/ssh_sftp_SUITE.erl b/lib/ssh/test/ssh_sftp_SUITE.erl
index cb74a27638..850b1cbf6b 100644
--- a/lib/ssh/test/ssh_sftp_SUITE.erl
+++ b/lib/ssh/test/ssh_sftp_SUITE.erl
@@ -49,6 +49,7 @@ all() ->
init_per_suite(Config) ->
+ catch crypto:stop(),
case (catch crypto:start()) of
ok ->
ssh:start(),
diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl
index 7b22e45d5e..925b02a437 100644
--- a/lib/ssh/test/ssh_sftpd_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -56,7 +56,8 @@ all() ->
retrieve_attributes,
set_attributes,
links,
- ver3_rename,
+ ver3_rename,
+ ver3_open_flags,
relpath,
sshd_read_file,
ver6_basic].
@@ -67,6 +68,7 @@ groups() ->
%%--------------------------------------------------------------------
init_per_suite(Config) ->
+ catch crypto:stop(),
case (catch crypto:start()) of
ok ->
DataDir = ?config(data_dir, Config),
@@ -193,6 +195,39 @@ open_close_file(Config) when is_list(Config) ->
?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
?SSH_FXF_OPEN_EXISTING).
+ver3_open_flags() ->
+ [{doc, "Test open flags"}].
+ver3_open_flags(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ FileName = filename:join(PrivDir, "not_exist.txt"),
+ {Cm, Channel} = ?config(sftp, Config),
+ ReqId = 0,
+
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), Handle/binary>>, _} =
+ open_file_v3(FileName, Cm, Channel, ReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_TRUNC),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(Handle, ReqId,
+ Cm, Channel),
+
+ NewFileName = filename:join(PrivDir, "not_exist2.txt"),
+ NewReqId = ReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId), NewHandle/binary>>, _} =
+ open_file_v3(NewFileName, Cm, Channel, NewReqId,
+ ?SSH_FXF_CREAT bor ?SSH_FXF_EXCL),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle, NewReqId,
+ Cm, Channel),
+
+ NewFileName1 = filename:join(PrivDir, "test.txt"),
+ NewReqId1 = NewReqId + 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(NewReqId1), NewHandle1/binary>>, _} =
+ open_file_v3(NewFileName1, Cm, Channel, NewReqId1,
+ ?SSH_FXF_READ bor ?SSH_FXF_WRITE bor ?SSH_FXF_APPEND),
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(NewReqId1),
+ ?UINT32(?SSH_FX_OK), _/binary>>, _} = close(NewHandle1, NewReqId1,
+ Cm, Channel).
+
%%--------------------------------------------------------------------
open_close_dir() ->
[{doc,"Test SSH_FXP_OPENDIR and SSH_FXP_CLOSE commands"}].
@@ -662,6 +697,16 @@ open_file(File, Cm, Channel, ReqId, Access, Flags) ->
?SSH_FXP_OPEN, Data/binary>>),
reply(Cm, Channel).
+open_file_v3(File, Cm, Channel, ReqId, Flags) ->
+
+ Data = list_to_binary([?uint32(ReqId),
+ ?binary(list_to_binary(File)),
+ ?uint32(Flags),
+ ?REG_ATTERS]),
+ Size = 1 + size(Data),
+ ssh_connection:send(Cm, Channel, <<?UINT32(Size),
+ ?SSH_FXP_OPEN, Data/binary>>),
+ reply(Cm, Channel).
close(Handle, ReqId, Cm , Channel) ->
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
index cc34cc0793..eac7575486 100644
--- a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
@@ -52,6 +52,7 @@ groups() ->
init_per_suite(Config) ->
catch ssh:stop(),
+ catch crypto:stop(),
case catch crypto:start() of
ok ->
DataDir = ?config(data_dir, Config),
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index b8abf5e80e..8ca05746db 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -358,3 +358,16 @@ do_inet_port(Node) ->
{ok, Socket} = rpc:call(Node, gen_tcp, listen, [0, [{reuseaddr, true}]]),
{ok, Port} = rpc:call(Node, inet, port, [Socket]),
{Port, Socket}.
+
+openssh_sanity_check(Config) ->
+ ssh:start(),
+ case ssh:connect("localhost", 22, []) of
+ {ok, Pid} ->
+ ssh:close(Pid),
+ ssh:stop(),
+ Config;
+ Err ->
+ Str = lists:append(io_lib:format("~p", [Err])),
+ ssh:stop(),
+ {skip, Str}
+ end.
diff --git a/lib/ssh/test/ssh_to_openssh_SUITE.erl b/lib/ssh/test/ssh_to_openssh_SUITE.erl
index af70eeb46c..277e3a1b08 100644
--- a/lib/ssh/test/ssh_to_openssh_SUITE.erl
+++ b/lib/ssh/test/ssh_to_openssh_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -60,13 +60,14 @@ groups() ->
].
init_per_suite(Config) ->
+ catch crypto:stop(),
case catch crypto:start() of
ok ->
case gen_tcp:connect("localhost", 22, []) of
{error,econnrefused} ->
{skip,"No openssh deamon"};
_ ->
- Config
+ ssh_test_lib:openssh_sanity_check(Config)
end;
_Else ->
{skip,"Could not start crypto!"}
@@ -166,9 +167,11 @@ erlang_client_openssh_server_exec_compressed() ->
[{doc, "Test that compression option works"}].
erlang_client_openssh_server_exec_compressed(Config) when is_list(Config) ->
+ CompressAlgs = [zlib, '[email protected]',none],
ConnectionRef = ssh_test_lib:connect(?SSH_DEFAULT_PORT, [{silently_accept_hosts, true},
{user_interaction, false},
- {compression, zlib}]),
+ {preferred_algorithms,
+ [{compression,CompressAlgs}]}]),
{ok, ChannelId} = ssh_connection:session_channel(ConnectionRef, infinity),
success = ssh_connection:exec(ConnectionRef, ChannelId,
"echo testing", infinity),
@@ -326,8 +329,11 @@ erlang_server_openssh_client_exec_compressed(Config) when is_list(Config) ->
PrivDir = ?config(priv_dir, Config),
KnownHosts = filename:join(PrivDir, "known_hosts"),
+%% CompressAlgs = [zlib, '[email protected]'], % Does not work
+ CompressAlgs = [zlib],
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
- {compression, zlib},
+ {preferred_algorithms,
+ [{compression, CompressAlgs}]},
{failfun, fun ssh_test_lib:failfun/2}]),
ct:sleep(500),
@@ -545,6 +551,7 @@ receive_hej() ->
receive_logout() ->
receive
<<"logout">> ->
+ extra_logout(),
receive
<<"Connection closed">> ->
ok
@@ -564,6 +571,14 @@ receive_normal_exit(Shell) ->
ct:fail({unexpected_msg, Other})
end.
+extra_logout() ->
+ receive
+ <<"logout">> ->
+ ok
+ after 500 ->
+ ok
+ end.
+
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
%% Check if we have a "newer" ssh client that supports these test cases
diff --git a/lib/ssh/test/ssh_unicode_SUITE.erl b/lib/ssh/test/ssh_unicode_SUITE.erl
index cc916673b3..07d51335c6 100644
--- a/lib/ssh/test/ssh_unicode_SUITE.erl
+++ b/lib/ssh/test/ssh_unicode_SUITE.erl
@@ -55,6 +55,7 @@ all() ->
init_per_suite(Config) ->
+ catch crypto:stop(),
case {file:native_name_encoding(), (catch crypto:start())} of
{utf8, ok} ->
ssh:start(),
diff --git a/lib/ssh/vsn.mk b/lib/ssh/vsn.mk
index c8cac3e852..cef9992f1b 100644
--- a/lib/ssh/vsn.mk
+++ b/lib/ssh/vsn.mk
@@ -1,5 +1,4 @@
#-*-makefile-*- ; force emacs to enter makefile-mode
-SSH_VSN = 3.1
+SSH_VSN = 4.0
APP_VSN = "ssh-$(SSH_VSN)"
-
diff --git a/lib/ssl/doc/src/Makefile b/lib/ssl/doc/src/Makefile
index fb12499ef7..143756bd39 100644
--- a/lib/ssl/doc/src/Makefile
+++ b/lib/ssl/doc/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2012. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -37,7 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
# Target Specs
# ----------------------------------------------------
XML_APPLICATION_FILES = refman.xml
-XML_REF3_FILES = ssl.xml ssl_session_cache_api.xml
+XML_REF3_FILES = ssl.xml ssl_crl_cache.xml ssl_crl_cache_api.xml ssl_session_cache_api.xml
XML_REF6_FILES = ssl_app.xml
XML_PART_FILES = release_notes.xml usersguide.xml
diff --git a/lib/ssl/doc/src/notes.xml b/lib/ssl/doc/src/notes.xml
index 4349e5a456..fe0606b1a3 100644
--- a/lib/ssl/doc/src/notes.xml
+++ b/lib/ssl/doc/src/notes.xml
@@ -25,7 +25,96 @@
<file>notes.xml</file>
</header>
<p>This document describes the changes made to the SSL application.</p>
- <section><title>SSL 5.3.8</title>
+ <section><title>SSL 6.0.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Terminate gracefully when receving bad input to premaster
+ secret calculation</p>
+ <p>
+ Own Id: OTP-12783</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>SSL 6.0</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Exclude self-signed trusted anchor certificates from
+ certificate prospective certification path according to
+ RFC 3280.</p>
+ <p>
+ This will avoid some unnecessary certificate processing.</p>
+ <p>
+ Own Id: OTP-12449</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Separate client and server session cache internally.</p>
+ <p>
+ Avoid session table growth when client starts many
+ connections in such a manner that many connections are
+ started before session reuse is possible. Only save a new
+ session in client if there is no equivalent session
+ already stored.</p>
+ <p>
+ Own Id: OTP-11365</p>
+ </item>
+ <item>
+ <p>
+ The PEM cache is now validated by a background process,
+ instead of always keeping it if it is small enough and
+ clearing it otherwise. That strategy required that small
+ caches where cleared by API function if a file changes on
+ disk.</p>
+ <p>
+ However export the API function to clear the cache as it
+ may still be useful.</p>
+ <p>
+ Own Id: OTP-12391</p>
+ </item>
+ <item>
+ <p>
+ Add padding check for TLS-1.0 to remove Poodle
+ vulnerability from TLS 1.0, also add the option
+ padding_check. This option only affects TLS-1.0
+ connections and if set to false it disables the block
+ cipher padding check to be able to interoperate with
+ legacy software.</p>
+ <p>
+ *** POTENTIAL INCOMPATIBILITY ***</p>
+ <p>
+ Own Id: OTP-12420</p>
+ </item>
+ <item>
+ <p>
+ Add support for TLS_FALLBACK_SCSV used to prevent
+ undesired TLS version downgrades. If used by a client
+ that is vulnerable to the POODLE attack, and the server
+ also supports TLS_FALLBACK_SCSV, the attack can be
+ prevented.</p>
+ <p>
+ Own Id: OTP-12458</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>SSL 5.3.8</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/ssl/doc/src/refman.xml b/lib/ssl/doc/src/refman.xml
index ae11198edb..d5f2219af9 100644
--- a/lib/ssl/doc/src/refman.xml
+++ b/lib/ssl/doc/src/refman.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>1999</year><year>2013</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -28,23 +28,10 @@
<rev>B</rev>
<file>refman.sgml</file>
</header>
- <description>
- <p>The <em>SSL</em> application provides secure communication over
- sockets.
- </p>
- <p>This product includes software developed by the OpenSSL Project for
- use in the OpenSSL Toolkit (http://www.openssl.org/).
- </p>
- <p>This product includes cryptographic software written by Eric Young
- </p>
- <p>This product includes software written by Tim Hudson
- </p>
- <p>For full OpenSSL and SSLeay license texts, see <seealso marker="licenses#licenses">Licenses</seealso>.</p>
- </description>
<xi:include href="ssl_app.xml"/>
<xi:include href="ssl.xml"/>
+ <xi:include href="ssl_crl_cache.xml"/>
+ <xi:include href="ssl_crl_cache_api.xml"/>
<xi:include href="ssl_session_cache_api.xml"/>
</application>
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index 83e5ed82bb..18d98e5efb 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -21,241 +21,280 @@
</legalnotice>
<title>ssl</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
<file>ssl.xml</file>
</header>
<module>ssl</module>
<modulesummary>Interface Functions for Secure Socket Layer</modulesummary>
<description>
- <p>This module contains interface functions to the Secure Socket
- Layer.
- </p>
+ <p>This module contains interface functions for the SSL.</p>
</description>
<section>
<title>SSL</title>
<list type="bulleted">
- <item>ssl requires the crypto and public_key applications.</item>
+ <item>For application dependencies see <seealso marker="ssl_app"> ssl(6)</seealso> </item>
<item>Supported SSL/TLS-versions are SSL-3.0, TLS-1.0,
- TLS-1.1 and TLS-1.2.</item>
- <item>For security reasons sslv2 is not supported.</item>
- <item>Ephemeral Diffie-Hellman cipher suites are supported
+ TLS-1.1, and TLS-1.2.</item>
+ <item>For security reasons SSL-2.0 is not supported.</item>
+ <item>For security reasons SSL-3.0 is no longer supported by default,
+ but can be configured.</item>
+ <item>Ephemeral Diffie-Hellman cipher suites are supported,
but not Diffie Hellman Certificates cipher suites.</item>
- <item>Elliptic Curve cipher suites are supported if crypto
- supports it and named curves are used.
+ <item>Elliptic Curve cipher suites are supported if the Crypto
+ application supports it and named curves are used.
</item>
<item>Export cipher suites are not supported as the
U.S. lifted its export restrictions in early 2000.</item>
<item>IDEA cipher suites are not supported as they have
- become deprecated by the latest TLS spec so there is not any
- real motivation to implement them.</item>
- <item>CRL and policy certificate extensions are not supported
- yet. However CRL verification is supported by public_key, only not integrated
- in ssl yet. </item>
- <item>Support for 'Server Name Indication' extension client side
- (RFC 6066 section 3).</item>
+ become deprecated by the latest TLS specification so it is not
+ motivated to implement them.</item>
+ <item>CRL validation is supported.</item>
+ <item>Policy certificate extensions are not supported.</item>
+ <item>'Server Name Indication' extension client side
+ (RFC 6066, Section 3) is supported.</item>
</list>
</section>
<section>
- <title>COMMON DATA TYPES</title>
- <p>The following data types are used in the functions below:
- </p>
+ <title>DATA TYPES</title>
+ <p>The following data types are used in the functions for SSL:</p>
- <p><c>boolean() = true | false</c></p>
+ <taglist>
- <p><c>option() = socketoption() | ssloption() | transportoption()</c></p>
+ <tag><c>boolean() =</c></tag>
+ <item><p><c>true | false</c></p></item>
- <p><c>socketoption() = proplists:property() - The default socket options are
- [{mode,list},{packet, 0},{header, 0},{active, true}].
- </c></p>
+ <tag><c>option() =</c></tag>
+ <item><p><c>socketoption() | ssloption() | transportoption()</c></p>
+ </item>
- <p>For valid options
- see <seealso marker="kernel:inet">inet(3)</seealso> and
- <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso>.
- </p>
-
- <p><marker id="type-ssloption"></marker><c>ssloption() = {verify, verify_type()} |
- {verify_fun, {fun(), term()}} |
- {fail_if_no_peer_cert, boolean()}
- {depth, integer()} |
- {cert, der_encoded()}| {certfile, path()} |
- {key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}} |
- {keyfile, path()} | {password, string()} |
- {cacerts, [der_encoded()]} | {cacertfile, path()} |
- |{dh, der_encoded()} | {dhfile, path()} | {ciphers, ciphers()} |
- {user_lookup_fun, {fun(), term()}}, {psk_identity, string()}, {srp_identity, {string(), string()}} |
- {ssl_imp, ssl_imp()} | {reuse_sessions, boolean()} | {reuse_session, fun()}
- {next_protocols_advertised, [binary()]} |
- {client_preferred_next_protocols, {client | server, [binary()]} | {client | server, [binary()], binary()}} |
- {log_alert, boolean()} | {server_name_indication, hostname() | disable}
- </c></p>
-
- <p><c>transportoption() = {cb_info, {CallbackModule::atom(), DataTag::atom(), ClosedTag::atom(), ErrTag:atom()}}
- - defaults to {gen_tcp, tcp, tcp_closed, tcp_error}. Can be used to customize
- the transport layer. The callback module must implement a reliable transport
- protocol and behave as gen_tcp and in addition have functions corresponding to
- inet:setopts/2, inet:getopts/2, inet:peername/1, inet:sockname/1 and inet:port/1.
- The callback gen_tcp is treated specially and will call inet directly.
- </c></p>
-
- <p><c>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; CallbackModule =
- atom()</c>
- </p> <p><c>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; DataTag =
- atom() - tag used in socket data message.</c></p>
- <p><c>&nbsp;&nbsp;&nbsp;&nbsp;&nbsp; ClosedTag = atom() - tag used in
- socket close message.</c></p>
-
- <p><c>verify_type() = verify_none | verify_peer</c></p>
-
- <p><c>path() = string() - representing a file path.</c></p>
+ <tag><c>socketoption() =</c></tag>
+ <item><p><c>proplists:property()</c></p>
+ <p>The default socket options are
+ <c>[{mode,list},{packet, 0},{header, 0},{active, true}]</c>.</p>
+ <p>For valid options, see the
+ <seealso marker="kernel:inet">inet(3)</seealso> and
+ <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso> manual pages
+ in Kernel.</p></item>
- <p><c>der_encoded() = binary() -Asn1 DER encoded entity as an erlang binary.</c></p>
-
- <p><c>host() = hostname() | ipaddress()</c></p>
-
- <p><c>hostname() = string()</c></p>
-
- <p><c>
- ip_address() = {N1,N2,N3,N4} % IPv4
- | {K1,K2,K3,K4,K5,K6,K7,K8} % IPv6 </c></p>
+ <tag><marker id="type-ssloption"></marker><c>ssloption() =</c></tag>
+ <item>
+ <p><c>{verify, verify_type()}</c></p>
+ <p><c>| {verify_fun, {fun(), term()}}</c></p>
+ <p><c>| {fail_if_no_peer_cert, boolean()} {depth, integer()}</c></p>
+ <p><c>| {cert, public_key:der_encoded()}</c></p>
+ <p><c>| {certfile, path()}</c></p>
+ <p><c>| {key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey'
+ | 'PrivateKeyInfo', public_key:der_encoded()}}</c></p>
+ <p><c>| {keyfile, path()}</c></p>
+ <p><c>| {password, string()}</c></p>
+ <p><c>| {cacerts, [public_key:der_encoded()]}</c></p>
+ <p><c>| {cacertfile, path()}</c></p>
+ <p><c>| {dh, public_key:der_encoded()}</c></p>
+ <p><c>| {dhfile, path()}</c></p>
+ <p><c>| {ciphers, ciphers()}</c></p>
+ <p><c>| {user_lookup_fun, {fun(), term()}}, {psk_identity, string()},
+ {srp_identity, {string(), string()}}</c></p>
+ <p><c>| {reuse_sessions, boolean()}</c></p>
+ <p><c>| {reuse_session, fun()} {next_protocols_advertised, [binary()]}</c></p>
+ <p><c>| {client_preferred_next_protocols, {client | server,
+ [binary()]} | {client | server, [binary()], binary()}}</c></p>
+ <p><c>| {log_alert, boolean()}</c></p>
+ <p><c>| {server_name_indication, hostname() | disable}</c></p>
+ <p><c>| {sni_hosts, [{hostname(), ssloptions()}]}</c></p>
+ <p><c>| {sni_fun, SNIfun::fun()}</c></p>
+ </item>
+
+ <tag><c>transportoption() =</c></tag>
+ <item><p><c>{cb_info, {CallbackModule::atom(), DataTag::atom(),
+
+ ClosedTag::atom(), ErrTag:atom()}}</c></p>
+ <p>Defaults to <c>{gen_tcp, tcp, tcp_closed, tcp_error}</c>. Can be used
+ to customize the transport layer. The callback module must implement a
+ reliable transport protocol, behave as <c>gen_tcp</c>, and have functions
+ corresponding to <c>inet:setopts/2</c>, <c>inet:getopts/2</c>,
+ <c>inet:peername/1</c>, <c>inet:sockname/1</c>, and <c>inet:port/1</c>.
+ The callback <c>gen_tcp</c> is treated specially and calls <c>inet</c>
+ directly.</p>
+ <taglist>
+ <tag><c>CallbackModule =</c></tag>
+ <item><p><c>atom()</c></p></item>
+ <tag><c>DataTag =</c></tag>
+ <item><p><c>atom()</c></p>
+ <p>Used in socket data message.</p></item>
+ <tag><c>ClosedTag =</c></tag>
+ <item><p><c>atom()</c></p>
+ <p>Used in socket close message.</p></item>
+ </taglist>
+ </item>
- <p><c>sslsocket() - opaque to the user. </c></p>
-
- <p><c>protocol() = sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2' </c></p>
-
- <p><c>ciphers() = [ciphersuite()] | string() (according to old API)</c></p>
-
- <p><c>ciphersuite() =
- {key_exchange(), cipher(), hash()}</c></p>
-
- <p><c>key_exchange() = rsa | dhe_dss | dhe_rsa | dh_anon
- | psk | dhe_psk | rsa_psk | srp_anon | srp_dss | srp_rsa
- | ecdh_anon | ecdh_ecdsa | ecdhe_ecdsa | ecdh_rsa | ecdhe_rsa
- </c></p>
+ <tag><c>verify_type() =</c></tag>
+ <item><p><c>verify_none | verify_peer</c></p></item>
+
+ <tag><c>path() =</c></tag>
+ <item><p><c>string()</c></p>
+ <p>Represents a file path.</p></item>
+
+ <tag><c>public_key:der_encoded() =</c></tag>
+ <item><p><c>binary()</c></p>
+ <p>ASN.1 DER-encoded entity as an Erlang binary.</p></item>
+
+ <tag><c>host() =</c></tag>
+ <item><p><c>hostname() | ipaddress()</c></p></item>
+
+ <tag><c>hostname() =</c></tag>
+ <item><p><c>string()</c></p></item>
+
+ <tag><c>ip_address() =</c></tag>
+ <item><p><c>{N1,N2,N3,N4} % IPv4 | {K1,K2,K3,K4,K5,K6,K7,K8} % IPv6
+ </c></p></item>
- <p><c>cipher() = rc4_128 | des_cbc | '3des_ede_cbc'
- | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm </c></p>
+ <tag><c>sslsocket() =</c></tag>
+ <item><p>opaque()</p></item>
- <p> <c>hash() = md5 | sha
- </c></p>
+ <tag><c>protocol() =</c></tag>
+ <item><p><c>sslv3 | tlsv1 | 'tlsv1.1' | 'tlsv1.2'</c></p></item>
- <p><c>prf_random() = client_random | server_random
- </c></p>
+ <tag><c>ciphers() =</c></tag>
+ <item><p><c>= [ciphersuite()] | string()</c></p>
+ <p>According to old API.</p></item>
- <p><c>srp_param_type() = srp_1024 | srp_1536 | srp_2048 | srp_3072
- | srp_4096 | srp_6144 | srp_8192</c></p>
+ <tag><c>ciphersuite() =</c></tag>
+ <item><p><c>{key_exchange(), cipher(), hash()}</c></p></item>
+ <tag><c>key_exchange()=</c></tag>
+ <item><p><c>rsa | dhe_dss | dhe_rsa | dh_anon | psk | dhe_psk
+ | rsa_psk | srp_anon | srp_dss | srp_rsa | ecdh_anon | ecdh_ecdsa
+ | ecdhe_ecdsa | ecdh_rsa | ecdhe_rsa</c></p></item>
+
+ <tag><c>cipher() =</c></tag>
+ <item><p><c>rc4_128 | des_cbc | '3des_ede_cbc'
+ | aes_128_cbc | aes_256_cbc | aes_128_gcm | aes_256_gcm</c></p></item>
+
+ <tag><c>hash() =</c></tag>
+ <item><p><c>md5 | sha</c></p></item>
+
+ <tag><c>prf_random() =</c></tag>
+ <item><p><c>client_random | server_random</c></p></item>
+
+ <tag><c>srp_param_type() =</c></tag>
+ <item><p><c>srp_1024 | srp_1536 | srp_2048 | srp_3072
+ | srp_4096 | srp_6144 | srp_8192</c></p></item>
+
+ <tag><c>SNIfun::fun()</c></tag>
+ <item><p><c>= fun(ServerName :: string()) -> ssloptions()</c></p></item>
+
+ </taglist>
</section>
<section>
<title>SSL OPTION DESCRIPTIONS - COMMON for SERVER and CLIENT</title>
- <p>Options described here are options that are have the same
- meaning in the client and the server.
- </p>
+ <p>The following options have the same meaning in the client and
+ the server:</p>
<taglist>
- <tag>{cert, der_encoded()}</tag>
- <item> The DER encoded users certificate. If this option
- is supplied it will override the certfile option.</item>
+ <tag><c>{cert, public_key:der_encoded()}</c></tag>
+ <item><p>The DER-encoded users certificate. If this option
+ is supplied, it overrides option <c>certfile</c>.</p></item>
- <tag>{certfile, path()}</tag>
- <item>Path to a file containing the user's certificate.</item>
+ <tag><c>{certfile, path()}</c></tag>
+ <item><p>Path to a file containing the user certificate.</p></item>
- <tag>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey' |'PrivateKeyInfo', der_encoded()}}</tag>
- <item> The DER encoded users private key. If this option
- is supplied it will override the keyfile option.</item>
+ <tag><c>{key, {'RSAPrivateKey'| 'DSAPrivateKey' | 'ECPrivateKey'
+ |'PrivateKeyInfo', public_key:der_encoded()}}</c></tag>
+ <item><p>The DER-encoded user's private key. If this option
+ is supplied, it overrides option <c>keyfile</c>.</p></item>
- <tag>{keyfile, path()}</tag>
- <item>Path to file containing user's
- private PEM encoded key. As PEM-files may contain several
- entries this option defaults to the same file as given by
- certfile option.</item>
-
- <tag>{password, string()}</tag>
- <item>String containing the user's password.
- Only used if the private keyfile is password protected.
- </item>
-
- <tag>{cacerts, [der_encoded()]}</tag>
- <item> The DER encoded trusted certificates. If this option
- is supplied it will override the cacertfile option.</item>
-
- <tag>{ciphers, ciphers()}</tag>
- <item>The cipher suites that should be supported. The function
+ <tag><c>{keyfile, path()}</c></tag>
+ <item><p>Path to the file containing the user's
+ private PEM-encoded key. As PEM-files can contain several
+ entries, this option defaults to the same file as given by
+ option <c>certfile</c>.</p></item>
+
+ <tag><c>{password, string()}</c></tag>
+ <item><p>String containing the user's password. Only used if the
+ private keyfile is password-protected.</p></item>
+
+ <tag><c>{ciphers, ciphers()}</c></tag>
+ <item><p>Supported cipher suites. The function
<c>cipher_suites/0</c> can be used to find all ciphers that are
- supported by default. <c>cipher_suites(all)</c> may be called
- to find all available cipher suites.
- Pre-Shared Key (<url href="http://www.ietf.org/rfc/rfc4279.txt">RFC 4279</url> and
+ supported by default. <c>cipher_suites(all)</c> can be called
+ to find all available cipher suites. Pre-Shared Key
+ (<url href="http://www.ietf.org/rfc/rfc4279.txt">RFC 4279</url> and
<url href="http://www.ietf.org/rfc/rfc5487.txt">RFC 5487</url>),
- Secure Remote Password (<url href="http://www.ietf.org/rfc/rfc5054.txt">RFC 5054</url>)
+ Secure Remote Password
+ (<url href="http://www.ietf.org/rfc/rfc5054.txt">RFC 5054</url>), RC4 cipher suites,
and anonymous cipher suites only work if explicitly enabled by
- this option and they are supported/enabled by the peer also.
- Note that anonymous cipher suites are supported for testing purposes
- only and should not be used when security matters.
+ this option; they are supported/enabled by the peer also.
+ Anonymous cipher suites are supported for testing purposes
+ only and are not be used when security matters.</p></item>
+
+ <tag><c>{secure_renegotiate, boolean()}</c></tag>
+ <item><p>Specifies if to reject renegotiation attempt that does
+ not live up to
+ <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>.
+ By default <c>secure_renegotiate</c> is set to <c>false</c>,
+ that is, secure renegotiation is used if possible,
+ but it fallback to unsecure renegotiation if the peer
+ does not support
+ <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>.</p>
</item>
- <tag>{ssl_imp, new | old}</tag>
- <item>No longer has any meaning as the old implementation has
- been removed, it will be ignored.
- </item>
+ <tag><c>{depth, integer()}</c></tag>
+ <item><p>Maximum number of non-self-issued
+ intermediate certificates that can follow the peer certificate
+ in a valid certification path. So, if depth is 0 the PEER must
+ be signed by the trusted ROOT-CA directly; if 1 the path can
+ be PEER, CA, ROOT-CA; if 2 the path can be PEER, CA, CA,
+ ROOT-CA, and so on. The default value is 1.</p></item>
- <tag>{secure_renegotiate, boolean()}</tag>
- <item>Specifies if to reject renegotiation attempt that does
- not live up to <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>. By default secure_renegotiate is
- set to false i.e. secure renegotiation will be used if possible
- but it will fallback to unsecure renegotiation if the peer
- does not support <url href="http://www.ietf.org/rfc/rfc5746.txt">RFC 5746</url>.
- </item>
-
- <tag>{depth, integer()}</tag>
- <item>
- The depth is the maximum number of non-self-issued
- intermediate certificates that may follow the peer certificate
- in a valid certification path. So if depth is 0 the PEER must
- be signed by the trusted ROOT-CA directly, if 1 the path can
- be PEER, CA, ROOT-CA, if it is 2 PEER, CA, CA, ROOT-CA and so
- on. The default value is 1.
- </item>
-
- <tag>{verify_fun, {Verifyfun :: fun(), InitialUserState :: term()}}</tag>
- <item>
- <p>The verification fun should be defined as:</p>
+ <tag><c>{verify_fun, {Verifyfun :: fun(), InitialUserState ::
+ term()}}</c></tag>
+ <item><p>The verification fun is to be defined as follows:</p>
<code>
-fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revoked, atom()}} |
+fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revoked,
+atom()}} |
{extension, #'Extension'{}}, InitialUserState :: term()) ->
{valid, UserState :: term()} | {valid_peer, UserState :: term()} |
{fail, Reason :: term()} | {unknown, UserState :: term()}.
</code>
- <p>The verify fun will be called during the X509-path
- validation when an error or an extension unknown to the ssl
- application is encountered. Additionally it will be called
+ <p>The verification fun is called during the X509-path
+ validation when an error or an extension unknown to the SSL
+ application is encountered. It is also called
when a certificate is considered valid by the path validation
to allow access to each certificate in the path to the user
- application. Note that it will differentiate between the
- peer certificate and CA certificates by using valid_peer or
- valid as the second argument to the verify fun. See <seealso
- marker="public_key:cert_records">the public_key User's
- Guide</seealso> for definition of #'OTPCertificate'{} and
- #'Extension'{}.</p>
-
- <p>If the verify callback fun returns {fail, Reason}, the
- verification process is immediately stopped and an alert is
- sent to the peer and the TLS/SSL handshake is terminated. If
- the verify callback fun returns {valid, UserState}, the
- verification process is continued. If the verify callback fun
- always returns {valid, UserState}, the TLS/SSL handshake will
- not be terminated with respect to verification failures and
- the connection will be established. If called with an
- extension unknown to the user application, the return value
- {unknown, UserState} should be used.</p>
-
- <p>The default verify_fun option in verify_peer mode:</p>
+ application. It differentiates between the peer
+ certificate and the CA certificates by using <c>valid_peer</c> or
+ <c>valid</c> as second argument to the verification fun. See the
+ <seealso marker="public_key:public_key_records">public_key User's
+ Guide</seealso> for definition of <c>#'OTPCertificate'{}</c> and
+ <c>#'Extension'{}</c>.</p>
+
+ <list type="bulleted">
+ <item><p>If the verify callback fun returns <c>{fail, Reason}</c>,
+ the verification process is immediately stopped, an alert is
+ sent to the peer, and the TLS/SSL handshake terminates.</p></item>
+ <item><p>If the verify callback fun returns <c>{valid, UserState}</c>,
+ the verification process continues.</p></item>
+ <item><p>If the verify callback fun always returns
+ <c>{valid, UserState}</c>, the TLS/SSL handshake does not
+ terminate regarding verification failures and the connection is
+ established.</p></item>
+ <item><p>If called with an extension unknown to the user application,
+ return value <c>{unknown, UserState}</c> is to be used.</p></item>
+ </list>
+
+ <p>Default option <c>verify_fun</c> in <c>verify_peer mode</c>:</p>
<code>
{fun(_,{bad_cert, _} = Reason, _) ->
@@ -269,7 +308,7 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
end, []}
</code>
- <p>The default verify_fun option in verify_none mode:</p>
+ <p>Default option <c>verify_fun</c> in mode <c>verify_none</c>:</p>
<code>
{fun(_,{bad_cert, _}, UserState) ->
@@ -283,49 +322,88 @@ fun(OtpCert :: #'OTPCertificate'{}, Event :: {bad_cert, Reason :: atom() | {revo
end, []}
</code>
- <p>Possible path validation errors are given on the form {bad_cert, Reason} where Reason is:</p>
+ <p>The possible path validation errors are given on form
+ <c>{bad_cert, Reason}</c> where <c>Reason</c> is:</p>
<taglist>
- <tag>unknown_ca</tag>
- <item>No trusted CA was found in the trusted store. The trusted CA is
- normally a so called ROOT CA that is a self-signed cert. Trust may
- be claimed for an intermediat CA (trusted anchor does not have to be self signed
- according to X-509) by using the option <c>partial_chain</c></item>
-
- <tag>selfsigned_peer</tag>
- <item>The chain consisted only of one self-signed certificate.</item>
-
- <tag>PKIX X-509-path validation error</tag>
- <item> Possible such reasons see <seealso
- marker="public_key:public_key#pkix_path_validation-3"> public_key:pkix_path_validation/3 </seealso></item>
+ <tag><c>unknown_ca</c></tag>
+ <item><p>No trusted CA was found in the trusted store. The trusted CA is
+ normally a so called ROOT CA, which is a self-signed certificate. Trust can
+ be claimed for an intermediat CA (trusted anchor does not have to be
+ self-signed according to X-509) by using option <c>partial_chain</c>.</p>
+ </item>
+
+ <tag><c>selfsigned_peer</c></tag>
+ <item><p>The chain consisted only of one self-signed certificate.</p></item>
+
+ <tag><c>PKIX X-509-path validation error</c></tag>
+ <item><p>For possible reasons, see <seealso
+marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3</seealso>
+ </p></item>
</taglist>
-
</item>
- <tag>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} | unknown_ca </tag>
+ <tag><c>{crl_check, boolean() | peer | best_effort }</c></tag>
<item>
- Claim an intermediat CA in the chain as trusted. TLS will then perform the public_key:pkix_path_validation/3
- with the selected CA as trusted anchor and the rest of the chain.
- </item>
+ Perform CRL (Certificate Revocation List) verification
+ <seealso marker="public_key:public_key#pkix_crls_validate-3">
+ (public_key:pkix_crls_validate/3)</seealso> on all the certificates during the path validation
+ <seealso
+ marker="public_key:public_key#pkix_path_validation-3">(public_key:pkix_path_validation/3)
+ </seealso>
+ of the certificate chain. Defaults to false.
+
+ <p><c>peer</c> - check is only performed on
+ the peer certificate.</p>
- <tag>{versions, [protocol()]}</tag>
- <item>TLS protocol versions that will be supported by started clients and servers.
- This option overrides the application environment option <c>protocol_version</c>. If the
- environment option is not set it defaults to all versions supported by the SSL application. See also
- <seealso marker="ssl:ssl_app">ssl(6)</seealso>
- </item>
+ <p><c>best_effort</c> - if certificate revocation status can not be determined
+ it will be accepted as valid.</p>
- <tag>{hibernate_after, integer()|undefined}</tag>
- <item>When an integer-value is specified, the <c>ssl_connection</c>
- will go into hibernation after the specified number of milliseconds
- of inactivity, thus reducing its memory footprint. When
- <c>undefined</c> is specified (this is the default), the process
- will never go into hibernation.
+ <p>The CA certificates specified for the connection will be used to
+ construct the certificate chain validating the CRLs.</p>
+
+ <p>The CRLs will be fetched from a local or external cache see
+ <seealso marker="ssl:ssl_crl_cache_api">ssl_crl_cache_api(3)</seealso>.</p>
</item>
- <tag>{user_lookup_fun, {Lookupfun :: fun(), UserState :: term()}}</tag>
+ <tag><c>{crl_cache, {Module :: atom(), {DbHandle :: internal | term(), Args :: list()}}}</c></tag>
<item>
- <p>The lookup fun should be defined as:</p>
+ <p>Module defaults to ssl_crl_cache with <c> DbHandle </c> internal and an
+ empty argument list. The following arguments may be specified for the internal cache.</p>
+ <taglist>
+ <tag><c>{http, timeout()}</c></tag>
+ <item><p>
+ Enables fetching of CRLs specified as http URIs in<seealso
+ marker="public_key:public_key_records"> X509 cerificate extensions.</seealso>
+ Requires the OTP inets application.</p>
+ </item>
+ </taglist>
+ </item>
+
+ <tag><c>{partial_chain, fun(Chain::[DerCert]) -> {trusted_ca, DerCert} |
+ unknown_ca }</c></tag>
+ <item><p>Claim an intermediate CA in the chain as trusted. TLS then
+ performs <seealso
+ marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3</seealso>
+ with the selected CA as trusted anchor and the rest of the chain.</p></item>
+
+ <tag><c>{versions, [protocol()]}</c></tag>
+ <item><p>TLS protocol versions supported by started clients and servers.
+ This option overrides the application environment option
+ <c>protocol_version</c>. If the environment option is not set, it defaults
+ to all versions, except SSL-3.0, supported by the SSL application.
+ See also <seealso marker="ssl:ssl_app">ssl(6).</seealso></p></item>
+
+ <tag><c>{hibernate_after, integer()|undefined}</c></tag>
+ <item><p>When an integer-value is specified, <c>ssl_connection</c>
+ goes into hibernation after the specified number of milliseconds
+ of inactivity, thus reducing its memory footprint. When
+ <c>undefined</c> is specified (this is the default), the process
+ never goes into hibernation.</p></item>
+
+ <tag><c>{user_lookup_fun, {Lookupfun :: fun(), UserState :: term()}}</c></tag>
+ <item><p>The lookup fun is to defined as follows:</p>
+
<code>
fun(psk, PSKIdentity ::string(), UserState :: term()) ->
{ok, SharedSecret :: binary()} | error;
@@ -333,227 +411,311 @@ fun(srp, Username :: string(), UserState :: term()) ->
{ok, {SRPParams :: srp_param_type(), Salt :: binary(), DerivedKey :: binary()}} | error.
</code>
- <p>For Pre-Shared Key (PSK) cipher suites, the lookup fun will
- be called by the client and server to determine the shared
- secret. When called by the client, PSKIdentity will be set to the
- hint presented by the server or undefined. When called by the
- server, PSKIdentity is the identity presented by the client.
- </p>
-
- <p>For Secure Remote Password (SRP), the fun will only be used by the server to obtain
- parameters that it will use to generate its session keys. <c>DerivedKey</c> should be
- derived according to <url href="http://tools.ietf.org/html/rfc2945#section-3"> RFC 2945</url> and
- <url href="http://tools.ietf.org/html/rfc5054#section-2.4"> RFC 5054</url>:
- <c>crypto:sha([Salt, crypto:sha([Username, &lt;&lt;$:&gt;&gt;, Password])]) </c>
+ <p>For Pre-Shared Key (PSK) cipher suites, the lookup fun is
+ called by the client and server to determine the shared
+ secret. When called by the client, <c>PSKIdentity</c> is set to the
+ hint presented by the server or to undefined. When called by the
+ server, <c>PSKIdentity</c> is the identity presented by the client.</p>
+
+ <p>For Secure Remote Password (SRP), the fun is only used by the server to
+ obtain parameters that it uses to generate its session keys.
+ <c>DerivedKey</c> is to be derived according to
+ <url href="http://tools.ietf.org/html/rfc2945#section-3"> RFC 2945</url> and
+ <url href="http://tools.ietf.org/html/rfc5054#section-2.4"> RFC 5054</url>:
+ <c>crypto:sha([Salt, crypto:sha([Username, &lt;&lt;$:&gt;&gt;, Password])])</c>
</p>
</item>
+ <tag><c>{padding_check, boolean()}</c></tag>
+ <item><p>Affects TLS-1.0 connections only.
+ If set to <c>false</c>, it disables the block cipher padding check
+ to be able to interoperate with legacy software.</p></item>
+
</taglist>
- </section>
+ <warning><p>Using <c>{padding_check, boolean()}</c> makes TLS
+ vulnerable to the Poodle attack.</p></warning>
- <section>
+ </section>
+
+ <section>
<title>SSL OPTION DESCRIPTIONS - CLIENT SIDE</title>
- <p>Options described here are client specific or has a slightly different
- meaning in the client than in the server.</p>
+ <p>The following options are client-specific or have a slightly different
+ meaning in the client than in the server:</p>
<taglist>
- <tag>{verify, verify_type()}</tag>
- <item> In verify_none mode the default behavior will be to
- allow all x509-path validation errors. See also the verify_fun
- option.
- </item>
- <tag>{reuse_sessions, boolean()}</tag>
- <item>Specifies if client should try to reuse sessions
- when possible.
+
+ <tag><c>{verify, verify_type()}</c></tag>
+ <item><p>In mode <c>verify_none</c> the default behavior is to allow
+ all x509-path validation errors. See also option <c>verify_fun</c>.</p>
</item>
+
+ <tag><c>{reuse_sessions, boolean()}</c></tag>
+ <item><p>Specifies if the client is to try to reuse sessions
+ when possible.</p></item>
+
+ <tag><c>{cacerts, [public_key:der_encoded()]}</c></tag>
+ <item><p>The DER-encoded trusted certificates. If this option
+ is supplied it overrides option <c>cacertfile</c>.</p></item>
- <tag>{cacertfile, path()}</tag>
- <item>The path to a file containing PEM encoded CA certificates. The CA
+ <tag><c>{cacertfile, path()}</c></tag>
+ <item><p>Path to a file containing PEM-encoded CA certificates. The CA
certificates are used during server authentication and when building the
- client certificate chain.
- </item>
-
- <tag>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()]}}</tag>
- <tag>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()], Default :: binary()}}</tag>
+ client certificate chain.</p>
+ </item>
+
+ <tag><c>{alpn_advertised_protocols, [binary()]}</c></tag>
+ <item>
+ <p>The list of protocols supported by the client to be sent to the
+ server to be used for an Application-Layer Protocol Negotiation (ALPN).
+ If the server supports ALPN then it will choose a protocol from this
+ list; otherwise it will fail the connection with a "no_application_protocol"
+ alert. A server that does not support ALPN will ignore this value.</p>
+
+ <p>The list of protocols must not contain an empty binary.</p>
+
+ <p>The negotiated protocol can be retrieved using the <c>negotiated_protocol/1</c> function.</p>
+ </item>
+
+ <tag><c>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()]}}</c></tag>
+ <tag><c>{client_preferred_next_protocols, {Precedence :: server | client, ClientPrefs :: [binary()], Default :: binary()}}</c></tag>
<item>
- <p>Indicates the client will try to perform Next Protocol
+ <p>Indicates that the client is to try to perform Next Protocol
Negotiation.</p>
- <p>If precedence is server the negotiated protocol will be the
- first protocol that appears on the server advertised list that is
+ <p>If precedence is server, the negotiated protocol is the
+ first protocol to be shown on the server advertised list, which is
also on the client preference list.</p>
- <p>If precedence is client the negotiated protocol will be the
- first protocol that appears on the client preference list that is
+ <p>If precedence is client, the negotiated protocol is the
+ first protocol to be shown on the client preference list, which is
also on the server advertised list.</p>
<p>If the client does not support any of the server advertised
- protocols or the server does not advertise any protocols the
- client will fallback to the first protocol in its list or if a
- default is supplied it will fallback to that instead. If the
- server does not support Next Protocol Negotiation the
- connection will be aborted if no default protocol is supplied.</p>
+ protocols or the server does not advertise any protocols, the
+ client falls back to the first protocol in its list or to the
+ default protocol (if a default is supplied). If the
+ server does not support Next Protocol Negotiation, the
+ connection terminates if no default protocol is supplied.</p>
</item>
- <tag>{psk_identity, string()}</tag>
- <item>Specifies the identity the client presents to the server. The matching secret is
- found by calling the user_look_fun.
- </item>
- <tag>{srp_identity, {Username :: string(), Password :: string()}</tag>
- <item>Specifies the Username and Password to use to authenticate to the server.
+ <tag><c>{psk_identity, string()}</c></tag>
+ <item><p>Specifies the identity the client presents to the server.
+ The matching secret is found by calling <c>user_lookup_fun</c>.</p>
</item>
- <tag>{server_name_indication, hostname()}</tag>
- <tag>{server_name_indication, disable}</tag>
+
+ <tag><c>{srp_identity, {Username :: string(), Password :: string()}
+ </c></tag>
+ <item><p>Specifies the username and password to use to authenticate
+ to the server.</p></item>
+
+ <tag><c>{server_name_indication, hostname()}</c></tag>
+ <item><p>Can be specified when upgrading a TCP socket to a TLS
+ socket to use the TLS Server Name Indication extension.</p></item>
+
+ <tag><c>{server_name_indication, disable}</c></tag>
<item>
- <p>This option can be specified when upgrading a TCP socket to a TLS
- socket to use the TLS Server Name Indication extension.</p>
- <p>When starting a TLS connection without upgrade the Server Name
- Indication extension will be sent if possible, this option may also be
+ <p>When starting a TLS connection without upgrade, the Server Name
+ Indication extension is sent if possible. This option can be
used to disable that behavior.</p>
</item>
+ <tag><c>{fallback, boolean()}</c></tag>
+ <item>
+ <p> Send special cipher suite TLS_FALLBACK_SCSV to avoid undesired TLS version downgrade.
+ Defaults to false</p>
+ <warning><p>Note this option is not needed in normal TLS usage and should not be used
+ to implement new clients. But legacy clients that retries connections in the following manner</p>
+
+ <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv2', 'tlsv1.1', 'tlsv1', 'sslv3']}])</c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, [tlsv1.1', 'tlsv1', 'sslv3']}, {fallback, true}])</c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, ['tlsv1', 'sslv3']}, {fallback, true}]) </c></p>
+ <p><c> ssl:connect(Host, Port, [...{versions, ['sslv3']}, {fallback, true}]) </c></p>
+
+ <p>may use it to avoid undesired TLS version downgrade. Note that TLS_FALLBACK_SCSV must also
+ be supported by the server for the prevention to work.
+ </p></warning>
+ </item>
+
</taglist>
</section>
<section>
<title>SSL OPTION DESCRIPTIONS - SERVER SIDE</title>
- <p>Options described here are server specific or has a slightly different
- meaning in the server than in the client.</p>
+ <p>The following options are server-specific or have a slightly different
+ meaning in the server than in the client:</p>
<taglist>
+
+ <tag><c>{cacerts, [public_key:der_encoded()]}</c></tag>
+ <item><p>The DER-encoded trusted certificates. If this option
+ is supplied it overrides option <c>cacertfile</c>.</p></item>
- <tag>{cacertfile, path()}</tag>
- <item>The path to a file containing PEM encoded CA
+ <tag><c>{cacertfile, path()}</c></tag>
+ <item><p>Path to a file containing PEM-encoded CA
certificates. The CA certificates are used to build the server
- certificate chain, and for client authentication. Also the CAs
- are used in the list of acceptable client CAs passed to the
- client when a certificate is requested. May be omitted if there
- is no need to verify the client and if there are not any
- intermediate CAs for the server certificate.
- </item>
+ certificate chain and for client authentication. The CAs are
+ also used in the list of acceptable client CAs passed to the
+ client when a certificate is requested. Can be omitted if there
+ is no need to verify the client and if there are no
+ intermediate CAs for the server certificate.</p></item>
- <tag>{dh, der_encoded()}</tag>
- <item>The DER encoded Diffie Hellman parameters. If this option
- is supplied it will override the dhfile option.
+ <tag><c>{dh, public_key:der_encoded()}</c></tag>
+ <item><p>The DER-encoded Diffie-Hellman parameters. If specified,
+ it overrides option <c>dhfile</c>.</p></item>
+
+ <tag><c>{dhfile, path()}</c></tag>
+ <item><p>Path to a file containing PEM-encoded Diffie Hellman parameters
+ to be used by the server if a cipher suite using Diffie Hellman key
+ exchange is negotiated. If not specified, default parameters are used.
+ </p></item>
+
+ <tag><c>{verify, verify_type()}</c></tag>
+ <item><p>A server only does x509-path validation in mode <c>verify_peer</c>,
+ as it then sends a certificate request to the client
+ (this message is not sent if the verify option is <c>verify_none</c>).
+ You can then also want to specify option <c>fail_if_no_peer_cert</c>.
+ </p></item>
+
+ <tag><c>{fail_if_no_peer_cert, boolean()}</c></tag>
+ <item><p>Used together with <c>{verify, verify_peer}</c> by an SSL server.
+ If set to <c>true</c>, the server fails if the client does not have
+ a certificate to send, that is, sends an empty certificate. If set to
+ <c>false</c>, it fails only if the client sends an invalid
+ certificate (an empty certificate is considered valid). Defaults to false.</p>
</item>
- <tag>{dhfile, path()}</tag>
- <item>Path to file containing PEM encoded Diffie Hellman parameters,
- for the server to use if a cipher suite using Diffie Hellman key exchange
- is negotiated. If not specified default parameters will be used.
- </item>
+ <tag><c>{reuse_sessions, boolean()}</c></tag>
+ <item><p>Specifies if the server is to agree to reuse sessions
+ when requested by the clients. See also option <c>reuse_session</c>.
+ </p></item>
+
+ <tag><c>{reuse_session, fun(SuggestedSessionId,
+ PeerCert, Compression, CipherSuite) -> boolean()}</c></tag>
+ <item><p>Enables the SSL server to have a local policy
+ for deciding if a session is to be reused or not.
+ Meaningful only if <c>reuse_sessions</c> is set to <c>true</c>.
+ <c>SuggestedSessionId</c> is a <c>binary()</c>, <c>PeerCert</c> is
+ a DER-encoded certificate, <c>Compression</c> is an enumeration integer,
+ and <c>CipherSuite</c> is of type <c>ciphersuite()</c>.</p></item>
+
+ <tag><c>{alpn_preferred_protocols, [binary()]}</c></tag>
+ <item>
+ <p>Indicates the server will try to perform Application-Layer
+ Protocol Negotiation (ALPN).</p>
- <tag>{verify, verify_type()}</tag>
- <item>Servers only do the x509-path validation in verify_peer
- mode, as it then will send a certificate request to the client
- (this message is not sent if the verify option is verify_none)
- and you may then also want to specify the option
- fail_if_no_peer_cert.
- </item>
+ <p>The list of protocols is in order of preference. The protocol
+ negotiated will be the first in the list that matches one of the
+ protocols advertised by the client. If no protocol matches, the
+ server will fail the connection with a "no_application_protocol" alert.</p>
- <tag>{fail_if_no_peer_cert, boolean()}</tag>
- <item>Used together with {verify, verify_peer} by an ssl server.
- If set to true, the server will fail if the client does not have
- a certificate to send, i.e. sends a empty certificate, if set to
- false it will only fail if the client sends an invalid
- certificate (an empty certificate is considered valid).
+ <p>The negotiated protocol can be retrieved using the <c>negotiated_protocol/1</c> function.</p>
</item>
- <tag>{reuse_sessions, boolean()}</tag>
- <item>Specifies if the server should agree to reuse sessions
- when the clients request to do so. See also the reuse_session
- option.
- </item>
+ <tag><c>{next_protocols_advertised, Protocols :: [binary()]}</c></tag>
+ <item><p>List of protocols to send to the client if the client indicates that
+ it supports the Next Protocol extension. The client can select a protocol
+ that is not on this list. The list of protocols must not contain an empty
+ binary. If the server negotiates a Next Protocol, it can be accessed
+ using the <c>negotiated_next_protocol/1</c> method.</p></item>
- <tag>{reuse_session, fun(SuggestedSessionId,
- PeerCert, Compression, CipherSuite) -> boolean()}</tag>
- <item>Enables the ssl server to have a local policy
- for deciding if a session should be reused or not,
- only meaningful if <c>reuse_sessions</c> is set to true.
- SuggestedSessionId is a binary(), PeerCert is a DER encoded
- certificate, Compression is an enumeration integer
- and CipherSuite is of type ciphersuite().
- </item>
+ <tag><c>{psk_identity, string()}</c></tag>
+ <item><p>Specifies the server identity hint, which the server presents to
+ the client.</p></item>
- <tag>{next_protocols_advertised, Protocols :: [binary()]}</tag>
- <item>The list of protocols to send to the client if the client indicates
- it supports the Next Protocol extension. The client may select a protocol
- that is not on this list. The list of protocols must not contain an empty
- binary. If the server negotiates a Next Protocol it can be accessed
- using <c>negotiated_next_protocol/1</c> method.
- </item>
+ <tag><c>{log_alert, boolean()}</c></tag>
+ <item><p>If set to <c>false</c>, error reports are not displayed.</p></item>
+
+ <tag><c>{honor_cipher_order, boolean()}</c></tag>
+ <item><p>If set to <c>true</c>, use the server preference for cipher
+ selection. If set to <c>false</c> (the default), use the client
+ preference.</p></item>
+
+ <tag><c>{sni_hosts, [{hostname(), ssloptions()}]}</c></tag>
+ <item><p>If the server receives a SNI (Server Name Indication) from the client
+ matching a host listed in the <c>sni_hosts</c> option, the speicific options for
+ that host will override previously specified options.
+
+ The option <c>sni_fun</c>, and <c>sni_hosts</c> are mutually exclusive.</p></item>
+
+ <tag><c>{sni_fun, SNIfun::fun()}</c></tag>
+ <item><p>If the server receives a SNI (Server Name Indication) from the client,
+ the given function will be called to retrive <c>ssloptions()</c> for indicated server.
+ These options will be merged into predefined <c>ssloptions()</c>.
+
+ The function should be defined as:
+ <c>fun(ServerName :: string()) -> ssloptions()</c>
+ and can be specified as a fun or as named <c>fun module:function/1</c>
+
+ The option <c>sni_fun</c>, and <c>sni_hosts</c> are mutually exclusive.</p></item>
- <tag>{psk_identity, string()}</tag>
- <item>Specifies the server identity hint the server presents to the client.
- </item>
- <tag>{log_alert, boolean()}</tag>
- <item>If false, error reports will not be displayed.</item>
- <tag>{honor_cipher_order, boolean()}</tag>
- <item>If true, use the server's preference for cipher selection. If false
- (the default), use the client's preference.
- </item>
</taglist>
</section>
<section>
<title>General</title>
- <p>When an ssl socket is in active mode (the default), data from the
+ <p>When an SSL socket is in active mode (the default), data from the
socket is delivered to the owner of the socket in the form of
- messages:
- </p>
+ messages:</p>
+
<list type="bulleted">
- <item>{ssl, Socket, Data}
- </item>
- <item>{ssl_closed, Socket}
- </item>
- <item>
- {ssl_error, Socket, Reason}
- </item>
+ <item><p><c>{ssl, Socket, Data}</c></p></item>
+ <item><p><c>{ssl_closed, Socket}</c></p></item>
+ <item><p><c>{ssl_error, Socket, Reason}</c></p></item>
</list>
-
- <p>A <c>Timeout</c> argument specifies a timeout in milliseconds. The
- default value for a <c>Timeout</c> argument is <c>infinity</c>.
- </p>
+
+ <p>A <c>Timeout</c> argument specifies a time-out in milliseconds. The
+ default value for argument <c>Timeout</c> is <c>infinity</c>.</p>
</section>
<funcs>
<func>
<name>cipher_suites() -></name>
<name>cipher_suites(Type) -> ciphers()</name>
- <fsummary> Returns a list of supported cipher suites</fsummary>
+ <fsummary>Returns a list of supported cipher suites.</fsummary>
<type>
<v>Type = erlang | openssl | all</v>
-
</type>
<desc><p>Returns a list of supported cipher suites.
- cipher_suites() is equivalent to cipher_suites(erlang).
- Type openssl is provided for backwards compatibility with
- old ssl that used openssl. cipher_suites(all) returns
+ <c>cipher_suites()</c> is equivalent to <c>cipher_suites(erlang).</c>
+ Type <c>openssl</c> is provided for backwards compatibility with the
+ old SSL, which used OpenSSL. <c>cipher_suites(all)</c> returns
all available cipher suites. The cipher suites not present
- in cipher_suites(erlang) but in included in cipher_suites(all)
- will not be used unless explicitly configured by the user.
- </p>
+ in <c>cipher_suites(erlang)</c> but included in
+ <c>cipher_suites(all)</c> are not used unless explicitly configured
+ by the user.</p>
</desc>
</func>
-
+
+ <func>
+ <name>clear_pem_cache() -> ok </name>
+ <fsummary> Clears the pem cache</fsummary>
+
+ <desc><p>PEM files, used by ssl API-functions, are cached. The
+ cache is regularly checked to see if any cache entries should be
+ invalidated, however this function provides a way to
+ unconditionally clear the whole cache.
+ </p>
+ </desc>
+ </func>
+
<func>
<name>connect(Socket, SslOptions) -> </name>
<name>connect(Socket, SslOptions, Timeout) -> {ok, SslSocket}
| {error, Reason}</name>
- <fsummary> Upgrades a gen_tcp, or
- equivalent, connected socket to an ssl socket. </fsummary>
+ <fsummary>Upgrades a <c>gen_tcp</c>, or
+ equivalent, connected socket to an SSL socket.</fsummary>
<type>
- <v>Socket = socket()</v>
- <v>SslOptions = [ssloption()]</v>
+ <v>Socket = socket()</v>
+ <v>SslOptions = [ssloption()]</v>
<v>Timeout = integer() | infinity</v>
<v>SslSocket = sslsocket()</v>
<v>Reason = term()</v>
</type>
- <desc> <p>Upgrades a gen_tcp, or equivalent,
- connected socket to an ssl socket i.e. performs the
+ <desc><p>Upgrades a <c>gen_tcp</c>, or equivalent,
+ connected socket to an SSL socket, that is, performs the
client-side ssl handshake.</p>
</desc>
</func>
@@ -562,7 +724,7 @@ fun(srp, Username :: string(), UserState :: term()) ->
<name>connect(Host, Port, Options) -></name>
<name>connect(Host, Port, Options, Timeout) ->
{ok, SslSocket} | {error, Reason}</name>
- <fsummary>Opens an ssl connection to Host, Port. </fsummary>
+ <fsummary>Opens an SSL connection to <c>Host</c>, <c>Port</c>.</fsummary>
<type>
<v>Host = host()</v>
<v>Port = integer()</v>
@@ -571,72 +733,109 @@ fun(srp, Username :: string(), UserState :: term()) ->
<v>SslSocket = sslsocket()</v>
<v>Reason = term()</v>
</type>
- <desc> <p>Opens an ssl connection to Host, Port.</p> </desc>
+ <desc><p>Opens an SSL connection to <c>Host</c>, <c>Port</c>.</p></desc>
</func>
<func>
<name>close(SslSocket) -> ok | {error, Reason}</name>
- <fsummary>Close an ssl connection</fsummary>
+ <fsummary>Closes an SSL connection.</fsummary>
<type>
<v>SslSocket = sslsocket()</v>
<v>Reason = term()</v>
</type>
- <desc><p>Close an ssl connection.</p>
+ <desc><p>Closes an SSL connection.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name>connection_info(SslSocket) ->
+ {ok, {ProtocolVersion, CipherSuite}} | {error, Reason}</name>
+ <fsummary>Returns the Negotiated Protocol version and cipher suite.
+ </fsummary>
+ <type>
+ <v>CipherSuite = ciphersuite()</v>
+ <v>ProtocolVersion = protocol()</v>
+ </type>
+ <desc><p>Returns the Negotiated Protocol version and cipher suite.</p>
</desc>
</func>
<func>
<name>controlling_process(SslSocket, NewOwner) ->
ok | {error, Reason}</name>
-
<fsummary>Assigns a new controlling process to the
- ssl-socket.</fsummary>
-
+ SSL socket.</fsummary>
<type>
<v>SslSocket = sslsocket()</v>
<v>NewOwner = pid()</v>
<v>Reason = term()</v>
</type>
- <desc><p>Assigns a new controlling process to the ssl-socket. A
- controlling process is the owner of an ssl-socket, and receives
- all messages from the socket.</p>
+ <desc><p>Assigns a new controlling process to the SSL socket. A
+ controlling process is the owner of an SSL socket, and receives
+ all messages from the socket.</p>
</desc>
</func>
<func>
- <name>connection_info(SslSocket) ->
- {ok, {ProtocolVersion, CipherSuite}} | {error, Reason} </name>
- <fsummary>Returns the negotiated protocol version and cipher suite.
+ <name>connection_information(SslSocket) ->
+ {ok, Info} | {error, Reason} </name>
+ <fsummary>Returns all the connection information.
</fsummary>
<type>
+ <v>Info = [InfoTuple]</v>
+ <v>InfoTuple = {protocol, Protocol} | {cipher_suite, CipherSuite} | {sni_hostname, SNIHostname}</v>
<v>CipherSuite = ciphersuite()</v>
<v>ProtocolVersion = protocol()</v>
+ <v>SNIHostname = string()</v>
+ <v>Reason = term()</v>
</type>
- <desc><p>Returns the negotiated protocol version and cipher suite.</p>
+ <desc><p>Return all the connection information containing negotiated protocol version, cipher suite, and the hostname of SNI extension.
+ Info will be a proplists containing all the connection information on success, otherwise <c>{error, Reason}</c> will be returned.</p>
</desc>
</func>
- <func>
+ <func>
+ <name>connection_information(SslSocket, Items) ->
+ {ok, Info} | {error, Reason} </name>
+ <fsummary>Returns the requested connection information.
+ </fsummary>
+ <type>
+ <v>Items = [Item]</v>
+ <v>Item = protocol | cipher_suite | sni_hostname</v>
+ <v>Info = [InfoTuple]</v>
+ <v>InfoTuple = {protocol, Protocol} | {cipher_suite, CipherSuite} | {sni_hostname, SNIHostname}</v>
+ <v>CipherSuite = ciphersuite()</v>
+ <v>ProtocolVersion = protocol()</v>
+ <v>SNIHostname = string()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc><p>Returns the connection information you requested. The connection information you can request contains protocol, cipher_suite, and sni_hostname.
+ <c>{ok, Info}</c> will be returned if it executes sucessfully. The Info is a proplists containing the information you requested.
+ Otherwise, <c>{error, Reason}</c> will be returned.</p>
+ </desc>
+ </func>
+
+ <func>
<name>format_error(Reason) -> string()</name>
- <fsummary>Return an error string.</fsummary>
+ <fsummary>Returns an error string.</fsummary>
<type>
<v>Reason = term()</v>
</type>
<desc>
- <p>Presents the error returned by an ssl function as a printable string.</p>
+ <p>Presents the error returned by an SSL function as a printable string.</p>
</desc>
</func>
<func>
<name>getopts(Socket, OptionNames) ->
{ok, [socketoption()]} | {error, Reason}</name>
- <fsummary>Get the value of the specified options.</fsummary>
+ <fsummary>Gets the values of the specified options.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>OptionNames = [atom()]</v>
</type>
<desc>
- <p>Get the value of the specified socket options.
+ <p>Gets the values of the specified socket options.
</p>
</desc>
</func>
@@ -644,34 +843,49 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>listen(Port, Options) ->
{ok, ListenSocket} | {error, Reason}</name>
- <fsummary>Creates an ssl listen socket.</fsummary>
+ <fsummary>Creates an SSL listen socket.</fsummary>
<type>
<v>Port = integer()</v>
<v>Options = options()</v>
<v>ListenSocket = sslsocket()</v>
</type>
<desc>
- <p>Creates an ssl listen socket.</p>
+ <p>Creates an SSL listen socket.</p>
</desc>
</func>
<func>
+ <name>negotiated_protocol(Socket) -> {ok, Protocol} | {error, protocol_not_negotiated}</name>
+ <fsummary>Returns the protocol negotiated through ALPN or NPN extensions.</fsummary>
+ <type>
+ <v>Socket = sslsocket()</v>
+ <v>Protocol = binary()</v>
+ </type>
+ <desc>
+ <p>
+ Returns the protocol negotiated through ALPN or NPN extensions.
+ </p>
+ </desc>
+ </func>
+
+ <func>
<name>peercert(Socket) -> {ok, Cert} | {error, Reason}</name>
- <fsummary>Return the peer certificate.</fsummary>
+ <fsummary>Returns the peer certificate.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Cert = binary()</v>
</type>
<desc>
- <p>The peer certificate is returned as a DER encoded binary.
- The certificate can be decoded with <c>public_key:pkix_decode_cert/2</c>.
- </p>
+ <p>The peer certificate is returned as a DER-encoded binary.
+ The certificate can be decoded with
+ <c>public_key:pkix_decode_cert/2</c>.</p>
</desc>
</func>
+
<func>
<name>peername(Socket) -> {ok, {Address, Port}} |
{error, Reason}</name>
- <fsummary>Return peer address and port.</fsummary>
+ <fsummary>Returns the peer address and port.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Address = ipaddress()</v>
@@ -681,12 +895,32 @@ fun(srp, Username :: string(), UserState :: term()) ->
<p>Returns the address and port number of the peer.</p>
</desc>
</func>
+
+ <func>
+ <name>prf(Socket, Secret, Label, Seed, WantedLength) -> {ok, binary()} | {error, reason()}</name>
+ <fsummary>Uses a session Pseudo-Random Function to generate key material.</fsummary>
+ <type>
+ <v>Socket = sslsocket()</v>
+ <v>Secret = binary() | master_secret</v>
+ <v>Label = binary()</v>
+ <v>Seed = [binary() | prf_random()]</v>
+ <v>WantedLength = non_neg_integer()</v>
+ </type>
+ <desc>
+ <p>Uses the Pseudo-Random Function (PRF) of a TLS session to generate
+ extra key material. It either takes user-generated values for
+ <c>Secret</c> and <c>Seed</c> or atoms directing it to use a specific
+ value from the session security parameters.</p>
+ <p>Can only be used with TLS connections; <c>{error, undefined}</c>
+ is returned for SSLv3 connections.</p>
+ </desc>
+ </func>
<func>
<name>recv(Socket, Length) -> </name>
<name>recv(Socket, Length, Timeout) -> {ok, Data} | {error,
Reason}</name>
- <fsummary>Receive data on a socket.</fsummary>
+ <fsummary>Receives data on a socket.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Length = integer()</v>
@@ -694,63 +928,43 @@ fun(srp, Username :: string(), UserState :: term()) ->
<v>Data = [char()] | binary()</v>
</type>
<desc>
- <p>This function receives a packet from a socket in passive
- mode. A closed socket is indicated by a return value
+ <p>Receives a packet from a socket in passive
+ mode. A closed socket is indicated by return value
<c>{error, closed}</c>.</p>
- <p>The <c>Length</c> argument is only meaningful when
- the socket is in <c>raw</c> mode and denotes the number of
+ <p>Argument <c>Length</c> is meaningful only when
+ the socket is in mode <c>raw</c> and denotes the number of
bytes to read. If <c>Length</c> = 0, all available bytes are
returned. If <c>Length</c> &gt; 0, exactly <c>Length</c>
bytes are returned, or an error; possibly discarding less
than <c>Length</c> bytes of data when the socket gets closed
from the other side.</p>
- <p>The optional <c>Timeout</c> parameter specifies a timeout in
+ <p>Optional argument <c>Timeout</c> specifies a time-out in
milliseconds. The default value is <c>infinity</c>.</p>
</desc>
</func>
<func>
- <name>prf(Socket, Secret, Label, Seed, WantedLength) -> {ok, binary()} | {error, reason()}</name>
- <fsummary>Use a sessions pseudo random function to generate key material.</fsummary>
- <type>
- <v>Socket = sslsocket()</v>
- <v>Secret = binary() | master_secret</v>
- <v>Label = binary()</v>
- <v>Seed = [binary() | prf_random()]</v>
- <v>WantedLength = non_neg_integer()</v>
- </type>
- <desc>
- <p>Use the pseudo random function (PRF) of a TLS session to generate
- additional key material. It either takes user generated values for
- <c>Secret</c> and <c>Seed</c> or atoms directing it use a specific
- value from the session security parameters.</p>
- <p>This function can only be used with TLS connections, <c>{error, undefined}</c>
- is returned for SSLv3 connections.</p>
- </desc>
- </func>
-
- <func>
<name>renegotiate(Socket) -> ok | {error, Reason}</name>
- <fsummary> Initiates a new handshake.</fsummary>
+ <fsummary>Initiates a new handshake.</fsummary>
<type>
<v>Socket = sslsocket()</v>
</type>
<desc><p>Initiates a new handshake. A notable return value is
<c>{error, renegotiation_rejected}</c> indicating that the peer
- refused to go through with the renegotiation but the connection
+ refused to go through with the renegotiation, but the connection
is still active using the previously negotiated session.</p>
</desc>
</func>
<func>
<name>send(Socket, Data) -> ok | {error, Reason}</name>
- <fsummary>Write data to a socket.</fsummary>
+ <fsummary>Writes data to a socket.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Data = iodata()</v>
</type>
<desc>
- <p>Writes <c>Data</c> to <c>Socket</c>. </p>
+ <p>Writes <c>Data</c> to <c>Socket</c>.</p>
<p>A notable return value is <c>{error, closed}</c> indicating that
the socket is closed.</p>
</desc>
@@ -758,31 +972,31 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>setopts(Socket, Options) -> ok | {error, Reason}</name>
- <fsummary>Set socket options.</fsummary>
+ <fsummary>Sets socket options.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Options = [socketoption]()</v>
</type>
<desc>
- <p>Sets options according to <c>Options</c> for the socket
- <c>Socket</c>. </p>
+ <p>Sets options according to <c>Options</c> for socket
+ <c>Socket</c>.</p>
</desc>
</func>
<func>
<name>shutdown(Socket, How) -> ok | {error, Reason}</name>
- <fsummary>Immediately close a socket</fsummary>
+ <fsummary>Immediately closes a socket.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>How = read | write | read_write</v>
<v>Reason = reason()</v>
</type>
<desc>
- <p>Immediately close a socket in one or two directions.</p>
+ <p>Immediately closes a socket in one or two directions.</p>
<p><c>How == write</c> means closing the socket for writing,
reading from it is still possible.</p>
<p>To be able to handle that the peer has done a shutdown on
- the write side, the <c>{exit_on_close, false}</c> option
+ the write side, option <c>{exit_on_close, false}</c>
is useful.</p>
</desc>
</func>
@@ -790,16 +1004,16 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>ssl_accept(Socket) -> </name>
<name>ssl_accept(Socket, Timeout) -> ok | {error, Reason}</name>
- <fsummary>Perform server-side SSL/TLS handshake</fsummary>
+ <fsummary>Performs server-side SSL/TLS handshake.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Timeout = integer()</v>
<v>Reason = term()</v>
</type>
<desc>
- <p> Performs the SSL/TLS server-side handshake <c>Socket</c> is a socket as returned
- by <seealso
- marker="#transport_accept-2">ssl:transport_accept/[1,2]</seealso>
+ <p>Performs the SSL/TLS server-side handshake.</p>
+ <p><c>Socket</c> is a socket as returned by
+ <seealso marker="#transport_accept-2">ssl:transport_accept/[1,2]</seealso>
</p>
</desc>
</func>
@@ -807,7 +1021,7 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>ssl_accept(Socket, SslOptions) -> </name>
<name>ssl_accept(Socket, SslOptions, Timeout) -> {ok, Socket} | ok | {error, Reason}</name>
- <fsummary>Perform server-side SSL/TLS handshake</fsummary>
+ <fsummary>Performs server-side SSL/TLS handshake.</fsummary>
<type>
<v>Socket = socket() | sslsocket() </v>
<v>SslOptions = ssloptions()</v>
@@ -815,17 +1029,19 @@ fun(srp, Username :: string(), UserState :: term()) ->
<v>Reason = term()</v>
</type>
<desc>
- <p> If <c>Socket</c> is a socket() - upgrades a gen_tcp, or equivalent, socket to an ssl socket
- i.e. performs the SSL/TLS server-side handshake and returns the ssl socket.
- </p>
+ <p>If <c>Socket</c> is a <c>socket()</c>: upgrades a <c>gen_tcp</c>,
+ or equivalent, socket to an SSL socket, that is, performs
+ the SSL/TLS server-side handshake and returns the SSL socket.</p>
- <warning><p>Note that the listen socket should be in {active, false} mode
+ <warning><p>The listen socket is to be in mode <c>{active, false}</c>
before telling the client that the server is ready to upgrade
- by calling this function, otherwise the upgrade may
- or may not succeed depending on timing.</p></warning>
+ by calling this function, else the upgrade succeeds or does not
+ succeed depending on timing.</p></warning>
- <p> If <c>Socket</c> is an sslsocket() - provides additional SSL/TLS options to those specified in <seealso
- marker="#listen-2">ssl:listen/2 </seealso> and then performs the SSL/TLS handshake.
+ <p>If <c>Socket</c> is an <c>sslsocket()</c>: provides extra SSL/TLS
+ options to those specified in
+ <seealso marker="#listen-2">ssl:listen/2 </seealso> and then performs
+ the SSL/TLS handshake.
</p>
</desc>
</func>
@@ -833,14 +1049,14 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>sockname(Socket) -> {ok, {Address, Port}} |
{error, Reason}</name>
- <fsummary>Return the local address and port.</fsummary>
+ <fsummary>Returns the local address and port.</fsummary>
<type>
<v>Socket = sslsocket()</v>
<v>Address = ipaddress()</v>
<v>Port = integer()</v>
</type>
<desc>
- <p>Returns the local address and port number of the socket
+ <p>Returns the local address and port number of socket
<c>Socket</c>.</p>
</desc>
</func>
@@ -848,22 +1064,21 @@ fun(srp, Username :: string(), UserState :: term()) ->
<func>
<name>start() -> </name>
<name>start(Type) -> ok | {error, Reason}</name>
- <fsummary>Starts the Ssl application. </fsummary>
+ <fsummary>Starts the SSL application.</fsummary>
<type>
- <v>Type = permanent | transient | temporary</v>
+ <v>Type = permanent | transient | temporary</v>
</type>
<desc>
- <p>Starts the Ssl application. Default type
- is temporary.
- <seealso marker="kernel:application">application(3)</seealso></p>
+ <p>Starts the SSL application. Default type
+ is <c>temporary</c>.</p>
</desc>
</func>
+
<func>
<name>stop() -> ok </name>
- <fsummary>Stops the Ssl application.</fsummary>
+ <fsummary>Stops the SSL application.</fsummary>
<desc>
- <p>Stops the Ssl application.
- <seealso marker="kernel:application">application(3)</seealso></p>
+ <p>Stops the SSL application.</p>
</desc>
</func>
@@ -871,8 +1086,8 @@ fun(srp, Username :: string(), UserState :: term()) ->
<name>transport_accept(ListenSocket) -></name>
<name>transport_accept(ListenSocket, Timeout) ->
{ok, NewSocket} | {error, Reason}</name>
- <fsummary>Accept an incoming connection and
- prepare for <c>ssl_accept</c></fsummary>
+ <fsummary>Accepts an incoming connection and
+ prepares for <c>ssl_accept</c>.</fsummary>
<type>
<v>ListenSocket = NewSocket = sslsocket()</v>
<v>Timeout = integer()</v>
@@ -881,66 +1096,66 @@ fun(srp, Username :: string(), UserState :: term()) ->
<desc>
<p>Accepts an incoming connection request on a listen socket.
<c>ListenSocket</c> must be a socket returned from
- <seealso
- marker="#listen-2"> ssl:listen/2</seealso>.
- The socket returned should be passed to
+ <seealso marker="#listen-2"> ssl:listen/2</seealso>.
+ The socket returned is to be passed to
<seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso>
- to complete handshaking i.e
+ to complete handshaking, that is,
establishing the SSL/TLS connection.</p>
<warning>
<p>The socket returned can only be used with
- <seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso>
- no traffic can be sent or received before that call.</p>
+ <seealso marker="#ssl_accept-2"> ssl:ssl_accept[2,3]</seealso>.
+ No traffic can be sent or received before that call.</p>
</warning>
<p>The accepted socket inherits the options set for
- <c>ListenSocket</c> in <seealso
- marker="#listen-2"> ssl:listen/2</seealso>.</p>
+ <c>ListenSocket</c> in
+ <seealso marker="#listen-2"> ssl:listen/2</seealso>.</p>
<p>The default
value for <c>Timeout</c> is <c>infinity</c>. If
- <c>Timeout</c> is specified, and no connection is accepted
+ <c>Timeout</c> is specified and no connection is accepted
within the given time, <c>{error, timeout}</c> is
returned.</p>
</desc>
</func>
<func>
- <name>versions() ->
- [{SslAppVer, SupportedSslVer, AvailableSslVsn}]</name>
+ <name>versions() -> [versions_info()]</name>
<fsummary>Returns version information relevant for the
- ssl application.</fsummary>
- <type>
- <v>SslAppVer = string()</v>
- <v>SupportedSslVer = [protocol()]</v>
- <v>AvailableSslVsn = [protocol()]</v>
- </type>
- <desc>
- <p>
- Returns version information relevant for the
- ssl application.</p>
- </desc>
- </func>
- <func>
- <name>negotiated_next_protocol(Socket) -> {ok, Protocol} | {error, next_protocol_not_negotiated}</name>
- <fsummary>Returns the Next Protocol negotiated.</fsummary>
+ SSL application.</fsummary>
<type>
- <v>Socket = sslsocket()</v>
- <v>Protocol = binary()</v>
+ <v>versions_info() = {app_vsn, string()} | {supported | available, [protocol()] </v>
</type>
<desc>
- <p>
- Returns the Next Protocol negotiated.
- </p>
+ <p>Returns version information relevant for the SSL
+ application.</p>
+ <taglist>
+ <tag><c>app_vsn</c></tag>
+ <item>The application version of the SSL application.</item>
+
+ <tag><c>supported</c></tag>
+ <item>TLS/SSL versions supported by default.
+ Overridden by a version option on
+ <seealso marker="#connect-2"> connect/[2,3,4]</seealso>,
+ <seealso marker="#listen-2"> listen/2</seealso>, and <seealso
+ marker="#ssl_accept-2">ssl_accept/[1,2,3]</seealso>.
+ For the negotiated TLS/SSL version, see <seealso
+ marker="#connection_info-1">ssl:connection_info/1
+ </seealso>.</item>
+
+ <tag><c>available</c></tag>
+ <item>All TLS/SSL versions supported by the SSL application.
+ TLS 1.2 requires sufficient support from the Crypto
+ application.</item>
+ </taglist>
</desc>
</func>
-
+
</funcs>
<section>
<title>SEE ALSO</title>
- <p><seealso marker="kernel:inet">inet(3) </seealso> and
- <seealso marker="kernel:gen_tcp">gen_tcp(3) </seealso>
+ <p><seealso marker="kernel:inet">inet(3)</seealso> and
+ <seealso marker="kernel:gen_tcp">gen_tcp(3)</seealso>
</p>
</section>
</erlref>
-
diff --git a/lib/ssl/doc/src/ssl_app.xml b/lib/ssl/doc/src/ssl_app.xml
index c8024548b5..f17f5cb9fe 100644
--- a/lib/ssl/doc/src/ssl_app.xml
+++ b/lib/ssl/doc/src/ssl_app.xml
@@ -4,7 +4,7 @@
<appref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -22,71 +22,79 @@
</legalnotice>
<title>ssl</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
<file>ssl_app.sgml</file>
</header>
<app>ssl</app>
- <appsummary>The SSL application provides secure communication over
+ <appsummary>The ssl application provides secure communication over
sockets.</appsummary>
+ <description></description>
<section>
<title>DEPENDENCIES</title>
- <p>The ssl application uses the Erlang applications public_key and
- crypto to handle public keys and encryption, hence these
- applications needs to be loaded for the ssl application to work. In
- an embedded environment that means they need to be started with
- application:start/[1,2] before the ssl application is started.
- </p>
+ <p>The SSL application uses the <c>public_key</c> and
+ Crypto application to handle public keys and encryption, hence
+ these applications must be loaded for the SSL application to work.
+ In an embedded environment this means they must be started with
+ <c>application:start/[1,2]</c> before the SSL application is
+ started.</p>
</section>
<section>
- <title>ENVIRONMENT</title>
- <p>The following application environment configuration parameters
- are defined for the SSL application. See <seealso
- marker="kernel:application">application(3)</seealso>for more
- information about configuration parameters.
- </p>
- <p>Note that the environment parameters can be set on the command line,
- for instance,</p>
- <p><c>erl ... -ssl protocol_version '[sslv3, tlsv1]' ...</c>.
- </p>
+ <title>CONFIGURATION</title>
+ <p>The application environment configuration parameters in this section
+ are defined for the SSL application. For more information
+ about configuration parameters, see the
+ <seealso marker="kernel:application">application(3)</seealso>
+ manual page in Kernel.</p>
+
+ <p>The environment parameters can be set on the command line,
+ for example:</p>
+
+ <p><c>erl -ssl protocol_version "['tlsv1.2', 'tlsv1.1']"</c></p>
+
<taglist>
- <tag><c><![CDATA[protocol_version = [sslv3|tlsv1] <optional>]]></c>.</tag>
- <item>
- <p>Protocol that will be supported by started clients and
- servers. If this option is not set it will default to all
- protocols currently supported by the erlang ssl application.
- Note that this option may be overridden by the version option
- to ssl:connect/[2,3] and ssl:listen/2.
- </p>
- </item>
+ <tag><c><![CDATA[protocol_version = <seealso marker="kernel:error_logger">ssl:protocol()</seealso> <optional>]]></c>.</tag>
+ <item><p>Protocol supported by started clients and
+ servers. If this option is not set, it defaults to all
+ protocols currently supported by the SSL application.
+ This option can be overridden by the version option
+ to <c>ssl:connect/[2,3]</c> and <c>ssl:listen/2</c>.</p></item>
<tag><c><![CDATA[session_lifetime = integer() <optional>]]></c></tag>
- <item>
- <p>The lifetime of session data in seconds.
- </p>
- </item>
+ <item><p>Lifetime of the session data in seconds.</p></item>
- <tag><c><![CDATA[session_cb = atom() <optional>]]></c></tag>
- <item>
- <p>
- Name of session cache callback module that implements
- the ssl_session_cache_api behavior, defaults to
- ssl_session_cache.erl.
- </p>
- </item>
+ <tag><c><![CDATA[session_cb = atom() <optional>]]></c></tag>
+ <item><p>Name of the session cache callback module that implements
+ the <c>ssl_session_cache_api</c> behavior. Defaults to
+ <c>ssl_session_cache.erl</c>.</p></item>
<tag><c><![CDATA[session_cb_init_args = proplist:proplist() <optional>]]></c></tag>
+
+ <item><p>List of extra user-defined arguments to the <c>init</c> function
+ in the session cache callback module. Defaults to <c>[]</c>.</p></item>
+
+ <tag><c><![CDATA[ssl_pem_cache_clean = integer() <optional>]]></c></tag>
<item>
<p>
- List of additional user defined arguments to the init function in session cache
- callback module, defaults to [].
+ Number of milliseconds between PEM cache validations.
</p>
+ <seealso
+ marker="ssl#clear_pem_cache-0">ssl:clear_pem_cache/0</seealso>
+
</item>
-
</taglist>
</section>
<section>
+ <title>ERROR LOGGER AND EVENT HANDLERS</title>
+ <p>The SSL application uses the default <seealso marker="kernel:error_logger">OTP error logger</seealso> to log unexpected errors and TLS alerts. The logging of TLS alerts may be turned off with the <c>log_alert</c> option. </p>
+ </section>
+
+ <section>
<title>SEE ALSO</title>
<p><seealso marker="kernel:application">application(3)</seealso></p>
</section>
diff --git a/lib/ssl/doc/src/ssl_crl_cache.xml b/lib/ssl/doc/src/ssl_crl_cache.xml
new file mode 100644
index 0000000000..83b03375b1
--- /dev/null
+++ b/lib/ssl/doc/src/ssl_crl_cache.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year><year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+ </legalnotice>
+ <title>ssl_crl_cache</title>
+ <file>ssl_crl_cache.xml</file>
+ </header>
+
+ <module>ssl_crl_cache</module>
+ <modulesummary>CRL cache </modulesummary>
+ <description>
+ <p>
+ Implements an internal CRL (Certificate Revocation List) cache.
+ In addition to implementing the <seealso
+ marker="ssl_crl_cache_api"> ssl_crl_cache_api</seealso> behaviour
+ the following functions are available.
+ </p>
+ </description>
+
+ <funcs>
+ <func>
+ <name>delete(Entries) -> ok | {error, Reason} </name>
+ <fsummary> </fsummary>
+ <type>
+ <v> Entries = <seealso marker="inets:http_uri">http_uri:uri() </seealso> | {file, string()} | {der, [<seealso
+ marker="public_key:public_key"> public_key:der_encoded() </seealso>]}</v>
+ <v> Reason = term()</v>
+ </type>
+ <desc>
+ <p>Delete CRLs from the ssl applications local cache. </p>
+ </desc>
+ </func>
+ <func>
+ <name>insert(CRLSrc) -> ok | {error, Reason}</name>
+ <name>insert(URI, CRLSrc) -> ok | {error, Reason}</name>
+ <fsummary> </fsummary>
+ <type>
+ <v> CRLSrc = {file, string()} | {der, [ <seealso
+ marker="public_key:public_key"> public_key:der_encoded() </seealso> ]}</v>
+ <v> URI = <seealso marker="inets:http_uri">http_uri:uri() </seealso> </v>
+ <v> Reason = term()</v>
+ </type>
+ <desc>
+ <p>Insert CRLs into the ssl applications local cache. </p>
+ </desc>
+ </func>
+ </funcs>
+</erlref> \ No newline at end of file
diff --git a/lib/ssl/doc/src/ssl_crl_cache_api.xml b/lib/ssl/doc/src/ssl_crl_cache_api.xml
new file mode 100644
index 0000000000..9230442ae0
--- /dev/null
+++ b/lib/ssl/doc/src/ssl_crl_cache_api.xml
@@ -0,0 +1,105 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year><year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+ </legalnotice>
+ <title>ssl_crl_cache_api</title>
+ <file>ssl_crl_cache_api.xml</file>
+ </header>
+
+ <module>ssl_crl_cache_api</module>
+ <modulesummary>API for a SSL/TLS CRL (Certificate Revocation List) cache.</modulesummary>
+ <description>
+ <p>
+ When SSL/TLS performs certificate path validation according to
+ <url href="http://www.ietf.org/rfc/rfc5280.txt">RFC 5280 </url>
+ it should also perform CRL validation checks. To enable the CRL
+ checks the application needs access to CRLs. A database of CRLs
+ can be set up in many different ways. This module provides the
+ behavior of the API needed to integrate an arbitrary CRL cache
+ with the erlang ssl application. It is also used by the
+ application itself to provide a simple default implementation of
+ a CRL cache.
+ </p>
+ </description>
+
+ <section>
+ <title>DATA TYPES</title>
+
+ <p>The following data types are used in the functions below:
+ </p>
+
+ <taglist>
+
+ <tag><c>cache_ref() =</c></tag>
+ <item>opaque()</item>
+ <tag><c>dist_point() =</c></tag>
+ <item><p>#'DistributionPoint'{} see <seealso
+ marker="public_key:public_key_records"> X509 certificates records</seealso></p></item>
+
+ </taglist>
+
+ </section>
+ <funcs>
+ <func>
+ <name>fresh_crl(DistributionPoint, CRL) -> FreshCRL</name>
+ <fsummary> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to
+ public_key:pkix_crls_validate/3 </fsummary>
+ <type>
+ <v> DistributionPoint = dist_point() </v>
+ <v> CRL = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ <v> FreshCRL = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ </type>
+ <desc>
+ <p> <c>fun fresh_crl/2 </c> will be used as input option <c>update_crl</c> to
+ <seealso marker="public_key:public_key#pkix_crls_validate-3">public_key:pkix_crls_validate/3 </seealso> </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>lookup(DistributionPoint, DbHandle) -> not_available | CRLs </name>
+ <fsummary> </fsummary>
+ <type>
+ <v> DistributionPoint = dist_point() </v>
+ <v> DbHandle = cache_ref() </v>
+ <v> CRLs = [<seealso
+ marker="public_key:public_key">public_key:der_encoded()</seealso>] </v>
+ </type>
+ <desc> <p>Lookup the CRLs belonging to the distribution point <c> Distributionpoint</c>. </p>
+ This function may choose to only look in the cache or to follow distribution point
+ links depending on how the cache is administrated.
+ </desc>
+ </func>
+
+ <func>
+ <name>select(Issuer, DbHandle) -> CRLs </name>
+ <fsummary>Select the CRLs in the cache that are issued by <c>Issuer</c></fsummary>
+ <type>
+ <v> Issuer = <seealso
+ marker="public_key:public_key">public_key:issuer_name()</seealso></v>
+ <v> DbHandle = cache_ref() </v>
+ </type>
+ <desc>
+ <p>Select the CRLs in the cache that are issued by <c>Issuer</c> </p>
+ </desc>
+ </func>
+ </funcs>
+</erlref> \ No newline at end of file
diff --git a/lib/ssl/doc/src/ssl_distribution.xml b/lib/ssl/doc/src/ssl_distribution.xml
index 4b4d042f70..effb304938 100644
--- a/lib/ssl/doc/src/ssl_distribution.xml
+++ b/lib/ssl/doc/src/ssl_distribution.xml
@@ -31,23 +31,20 @@
<rev>B</rev>
<file>ssl_distribution.xml</file>
</header>
- <p>This chapter describes how the Erlang distribution can use
- SSL to get additional verification and security.
- </p>
+ <p>This section describes how the Erlang distribution can use
+ SSL to get extra verification and security.</p>
- <section>
- <title>Introduction</title>
- <p>The Erlang distribution can in theory use almost any connection
- based protocol as bearer. A module that implements the protocol
- specific parts of the connection setup is however needed. The
- default distribution module is <c>inet_tcp_dist</c> which is
- included in the Kernel application. When starting an
+ <p>The Erlang distribution can in theory use almost any
+ connection-based protocol as bearer. However, a module that
+ implements the protocol-specific parts of the connection setup is
+ needed. The default distribution module is <c>inet_tcp_dist</c>
+ in the Kernel application. When starting an
Erlang node distributed, <c>net_kernel</c> uses this module to
- setup listen ports and connections. </p>
+ set up listen ports and connections.</p>
- <p>In the SSL application there is an additional distribution
- module, <c>inet_tls_dist</c> which can be used as an
- alternative. All distribution connections will be using SSL and
+ <p>In the SSL application, an exra distribution
+ module, <c>inet_tls_dist</c>, can be used as an
+ alternative. All distribution connections will use SSL and
all participating Erlang nodes in a distributed system must use
this distribution module.</p>
@@ -55,35 +52,45 @@
SSL connection setup. Erlang node cookies are however always
used, as they can be used to differentiate between two different
Erlang networks.</p>
- <p>Setting up Erlang distribution over SSL involves some simple but
- necessary steps:</p>
+
+ <p>To set up Erlang distribution over SSL:</p>
<list type="bulleted">
- <item>Building boot scripts including the SSL application</item>
- <item>Specifying the distribution module for net_kernel</item>
- <item>Specifying security options and other SSL options</item>
+ <item><em>Step 1:</em> Build boot scripts including the
+ SSL application.</item>
+ <item><em>Step 2:</em> Specify the distribution module for
+ <c>net_kernel</c>.</item>
+ <item><em>Step 3:</em> Specify the security options and other
+ SSL options.</item>
+ <item><em>Step 4:</em> Set up the environment to always use SSL.</item>
</list>
- <p>The rest of this chapter describes the above mentioned steps in
- more detail.</p>
- </section>
+
+ <p>The following sections describe these steps.</p>
<section>
- <title>Building boot scripts including the SSL application</title>
+ <title>Building Boot Scripts Including the ssl Application</title>
<p>Boot scripts are built using the <c>systools</c> utility in the
- SASL application. Refer to the SASL documentations
- for more information on systools. This is only an example of
+ <c>sasl</c> application. For more information on <c>systools</c>,
+ see the <c>sasl</c> documentation. This is only an example of
what can be done.</p>
- <p>The simplest boot script possible includes only the Kernel
- and STDLIB applications. Such a script is located in the
- Erlang distributions bin directory. The source for the script
- can be found under the Erlang installation top directory under
- <c><![CDATA[releases/<OTP version>/start_clean.rel]]></c>. Copy that
- script to another location (and preferably another name)
- and add the applications crypto, public_key and SSL with their current version numbers
- after the STDLIB application.</p>
- <p>An example .rel file with SSL added may look like this:</p>
+ <p>The simplest boot script possible includes only the Kernel
+ and STDLIB applications. Such a script is located in the
+ <c>bin</c> directory of the Erlang distribution. The source for the
+ script is found under the Erlang installation top directory under
+ <c><![CDATA[releases/<OTP version>/start_clean.rel]]></c>.</p>
+
+ <p>Do the following:</p>
+ <list type="bulleted">
+ <item><p>Copy that script to another location (and preferably another
+ name).</p></item>
+ <item><p>Add the applications Crypto, Public Key, and
+ SSL with their current version numbers after the
+ STDLIB application.</p></item>
+ </list>
+ <p>The following shows an example <c>.rel</c> file with SSL
+ added:</p>
<code type="none">
{release, {"OTP APN 181 01","R15A"}, {erts, "5.9"},
[{kernel,"2.15"},
@@ -94,23 +101,29 @@
]}.
</code>
- <p>Note that the version numbers surely will differ in your system.
- Whenever one of the applications included in the script is
- upgraded, the script has to be changed.</p>
- <p>Assuming the above .rel file is stored in a file
- <c>start_ssl.rel</c> in the current directory, a boot script
- can be built like this:</p>
+ <p>The version numbers differ in your system. Whenever one of the
+ applications included in the script is upgraded, change the script.</p>
+ <p>Do the following:</p>
+ <list type="bulleted">
+ <item><p>Build the boot script.</p>
+ <p>Assuming the <c>.rel file</c> is stored in a file
+ <c>start_ssl.rel</c> in the current directory, a boot script
+ can be built as follows:</p></item>
+ </list>
<code type="none">
1> systools:make_script("start_ssl",[]). </code>
- <p>There will now be a file <c>start_ssl.boot</c> in the current
- directory. To test the boot script, start Erlang with the
- <c>-boot</c> command line parameter specifying this boot script
- (with its full path but without the <c>.boot</c> suffix), in
- Unix it could look like this:</p>
- <p></p>
+ <p>There is now a <c>start_ssl.boot</c> file in the current
+ directory.</p>
+ <p>Do the following:</p>
+ <list type="bulleted">
+ <item><p>Test the boot script. To do this, start Erlang with the
+ <c>-boot</c> command-line parameter specifying this boot script
+ (with its full path, but without the <c>.boot</c> suffix). In
+ UNIX it can look as follows:</p></item>
+ </list>
<code type="none"><![CDATA[
$ erl -boot /home/me/ssl/start_ssl
Erlang (BEAM) emulator version 5.0
@@ -118,86 +131,99 @@ Erlang (BEAM) emulator version 5.0
Eshell V5.0 (abort with ^G)
1> whereis(ssl_manager).
<0.41.0> ]]></code>
- <p>The <c>whereis</c> function call verifies that the SSL
- application is really started.</p>
- <p>As an alternative to building a bootscript, one can explicitly
+ <p>The <c>whereis</c> function-call verifies that the SSL
+ application is started.</p>
+
+ <p>As an alternative to building a bootscript, you can explicitly
add the path to the SSL <c>ebin</c> directory on the command
- line. This is done with the command line option <c>-pa</c>. This
+ line. This is done with command-line option <c>-pa</c>. This
works as the SSL application does not need to be started for the
distribution to come up, as a clone of the SSL application is
- hooked into the kernel application, so as long as the
- SSL applications code can be reached, the distribution will
- start. The <c>-pa</c> method is only recommended for testing
- purposes.</p>
+ hooked into the Kernel application. So, as long as the
+ SSL application code can be reached, the distribution starts.
+ The <c>-pa</c> method is only recommended for testing purposes.</p>
- <note><p>Note that the clone of the SSL application is necessary to
+ <note><p>The clone of the SSL application must
enable the use of the SSL code in such an early bootstage as
- needed to setup the distribution, however this will make it
+ needed to set up the distribution. However, this makes it
impossible to soft upgrade the SSL application.</p></note>
</section>
<section>
- <title>Specifying distribution module for net_kernel</title>
+ <title>Specifying Distribution Module for net_kernel</title>
<p>The distribution module for SSL is named <c>inet_tls_dist</c>
- and is specified on the command line with the <c>-proto_dist</c>
- option. The argument to <c>-proto_dist</c> should be the module
- name without the <c>_dist</c> suffix, so this distribution
+ and is specified on the command line with option <c>-proto_dist</c>.
+ The argument to <c>-proto_dist</c> is to be the module
+ name without suffix <c>_dist</c>. So, this distribution
module is specified with <c>-proto_dist inet_tls</c> on the
command line.</p>
- <p></p>
- <p>Extending the command line from above gives us the following:</p>
+ <p>Extending the command line gives the following:</p>
<code type="none">
$ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls </code>
-<p>For the distribution to actually be started, we need to give
-the emulator a name as well:</p>
+<p>For the distribution to be started, give the emulator a name as well:</p>
<code type="none">
$ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls -sname ssl_test
Erlang (BEAM) emulator version 5.0 [source]
Eshell V5.0 (abort with ^G)
(ssl_test@myhost)1> </code>
- <p>Note however that a node started in this way will refuse to talk
- to other nodes, as no ssl parameters are supplied
- (see below).</p>
+
+ <p>However, a node started in this way refuses to talk
+ to other nodes, as no SSL parameters are supplied
+ (see the next section).</p>
</section>
<section>
- <title>Specifying SSL options</title> <p>For SSL to work, at least
- a public key and certificate needs to be specified for the server
- side. In the following example the PEM-files consists of two
- entries the servers certificate and its private key.</p>
-
- <p>On the <c>erl</c> command line one can specify options that the
- SSL distribution will add when creating a socket.</p>
-
- <p>One can specify the simpler SSL options certfile, keyfile,
- password, cacertfile, verify, reuse_sessions,
- secure_renegotiate, depth, hibernate_after and ciphers (use old
- string format) by adding the prefix server_ or client_ to the
- option name. The server can also take the options dhfile and
- fail_if_no_peer_cert (also prefixed).
- <c>client_</c>-prfixed options are used when the distribution initiates a
- connection to another node and the <c>server_</c>-prefixed options are used
- when accepting a connection from a remote node. </p>
-
- <p> More complex options such as verify_fun are not available at
- the moment but a mechanism to handle such options may be added in
- a future release. </p>
-
- <p> Raw socket options such as packet and size must not be specified on
- the command line</p>.
-
- <p>The command line argument for specifying the SSL options is named
- <c>-ssl_dist_opt</c> and should be followed by pairs of
- SSL options and their values. The <c>-ssl_dist_opt</c> argument can
+ <title>Specifying SSL Options</title>
+ <p>For SSL to work, at least
+ a public key and a certificate must be specified for the server
+ side. In the following example, the PEM-files consist of two
+ entries, the server certificate and its private key.</p>
+
+ <p>On the <c>erl</c> command line you can specify options that the
+ SSL distribution adds when creating a socket.</p>
+
+ <p>The simplest SSL options in the following list can be specified
+ by adding the
+ prefix <c>server_</c> or <c>client_</c> to the option name:</p>
+ <list type="bulleted">
+ <item><c>certfile</c></item>
+ <item><c>keyfile</c></item>
+ <item><c>password</c></item>
+ <item><c>cacertfile</c></item>
+ <item><c>verify</c></item>
+ <item><c>reuse_sessions</c></item>
+ <item><c>secure_renegotiate</c></item>
+ <item><c>depth</c></item>
+ <item><c>hibernate_after</c></item>
+ <item><c>ciphers</c> (use old string format)</item>
+ </list>
+
+ <p>The server can also take the options <c>dhfile</c> and
+ <c>fail_if_no_peer_cert</c> (also prefixed).</p>
+
+ <p><c>client_</c>-prefixed options are used when the distribution
+ initiates a connection to another node. <c>server_</c>-prefixed
+ options are used when accepting a connection from a remote node.</p>
+
+ <p>More complex options, such as <c>verify_fun</c>, are currently not
+ available, but a mechanism to handle such options may be added in
+ a future release.</p>
+
+ <p>Raw socket options, such as <c>packet</c> and <c>size</c> must not
+ be specified on the command line.</p>
+
+ <p>The command-line argument for specifying the SSL options is named
+ <c>-ssl_dist_opt</c> and is to be followed by pairs of
+ SSL options and their values. Argument <c>-ssl_dist_opt</c> can
be repeated any number of times.</p>
- <p>An example command line would now look something like this
+ <p>An example command line can now look as follows
(line breaks in the command are for readability,
- they should not be there when typed):</p>
+ and are not be there when typed):</p>
<code type="none">
$ erl -boot /home/me/ssl/start_ssl -proto_dist inet_tls
-ssl_dist_opt server_certfile "/home/me/ssl/erlserver.pem"
@@ -207,20 +233,20 @@ Erlang (BEAM) emulator version 5.0 [source]
Eshell V5.0 (abort with ^G)
(ssl_test@myhost)1> </code>
- <p>A node started in this way will be fully functional, using SSL
+ <p>A node started in this way is fully functional, using SSL
as the distribution protocol.</p>
</section>
<section>
- <title>Setting up environment to always use SSL</title>
- <p>A convenient way to specify arguments to Erlang is to use the
- <c>ERL_FLAGS</c> environment variable. All the flags needed to
- use SSL distribution can be specified in that variable and will
- then be interpreted as command line arguments for all
+ <title>Setting up Environment to Always Use SSL</title>
+ <p>A convenient way to specify arguments to Erlang is to use environment
+ variable <c>ERL_FLAGS</c>. All the flags needed to
+ use the SSL distribution can be specified in that variable and are
+ then interpreted as command-line arguments for all
subsequent invocations of Erlang.</p>
- <p></p>
- <p>In a Unix (Bourne) shell it could look like this (line breaks for
- readability, they should not be there when typed):</p>
+
+ <p>In a Unix (Bourne) shell, it can look as follows (line breaks are for
+ readability, they are not to be there when typed):</p>
<code type="none">
$ ERL_FLAGS="-boot /home/me/ssl/start_ssl -proto_dist inet_tls
-ssl_dist_opt server_certfile /home/me/ssl/erlserver.pem
@@ -240,7 +266,8 @@ Eshell V5.0 (abort with ^G)
{ssl_dist_opt,["server_secure_renegotiate","true",
"client_secure_renegotiate","true"]
{home,["/home/me"]}] </code>
+
<p>The <c>init:get_arguments()</c> call verifies that the correct
- arguments are supplied to the emulator. </p>
+ arguments are supplied to the emulator.</p>
</section>
</chapter>
diff --git a/lib/ssl/doc/src/ssl_introduction.xml b/lib/ssl/doc/src/ssl_introduction.xml
new file mode 100644
index 0000000000..64607a393a
--- /dev/null
+++ b/lib/ssl/doc/src/ssl_introduction.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2015</year>
+ <year>2015</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>Introduction</title>
+ <prepared>OTP team</prepared>
+ <docno></docno>
+ <date>2015-03-05</date>
+ <rev>A</rev>
+ <file>ssl_introduction.xml</file>
+ </header>
+
+ <section>
+ <title>Purpose</title>
+ <p>Transport Layer Security (TLS) and its predecessor, the Secure
+ Sockets Layer (SSL), are cryptographic protocols designed to
+ provide communications security over a computer network. The protocols use
+ use X.509 certificates and hence public key (asymmetric) cryptography to
+ authenticate the counterpart with whom they communicate,
+ and to exchange a symmetric key for payload encryption. The protocol provides
+ data/message confidentiality (encryption), integrity (through message authentication code checks)
+ and host verification (through certificate path validation).</p>
+ </section>
+
+ <section>
+ <title>Prerequisites</title>
+ <p>It is assumed that the reader is familiar with the Erlang
+ programming language, the concepts of OTP, and has a basic
+ understanding of SSL/TLS.</p>
+ </section>
+
+</chapter>
diff --git a/lib/ssl/doc/src/ssl_protocol.xml b/lib/ssl/doc/src/ssl_protocol.xml
index 80d9cc4ee8..cc49515066 100644
--- a/lib/ssl/doc/src/ssl_protocol.xml
+++ b/lib/ssl/doc/src/ssl_protocol.xml
@@ -4,7 +4,7 @@
<chapter>
<header>
<copyright>
- <year>2003</year><year>2013</year>
+ <year>2003</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -21,33 +21,42 @@
</legalnotice>
- <title>Transport Layer Security (TLS) and its predecessor, Secure Socket Layer (SSL)</title>
+ <title>TLS and its Predecessor, SSL</title>
+ <prepared></prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
<file>ssl_protocol.xml</file>
</header>
- <p>The erlang SSL application currently implements the protocol SSL/TLS
- for currently supported versions see <seealso marker="ssl">ssl(3)</seealso>
+ <p>The Erlang SSL application implements the SSL/TLS protocol
+ for the currently supported versions, see the
+ <seealso marker="ssl">ssl(3)</seealso> manual page.
</p>
- <p>By default erlang SSL is run over the TCP/IP protocol even
- though you could plug in any other reliable transport protocol
- with the same API as gen_tcp.</p>
+ <p>By default SSL/TLS is run over the TCP/IP protocol even
+ though you can plug in any other reliable transport protocol
+ with the same Application Programming Interface (API) as the
+ <c>gen_tcp</c> module in Kernel.</p>
- <p>If a client and server wants to use an upgrade mechanism, such as
- defined by RFC2817, to upgrade a regular TCP/IP connection to an SSL
- connection the erlang SSL API supports this. This can be useful for
- things such as supporting HTTP and HTTPS on the same port and
+ <p>If a client and a server wants to use an upgrade mechanism, such as
+ defined by RFC 2817, to upgrade a regular TCP/IP connection to an SSL
+ connection, this is supported by the Erlang SSL application API. This can be
+ useful for, for example, supporting HTTP and HTTPS on the same port and
implementing virtual hosting.
</p>
<section>
- <title>Security overview</title>
+ <title>Security Overview</title>
- <p>To achieve authentication and privacy the client and server will
- perform a TLS Handshake procedure before transmitting or receiving
- any data. During the handshake they agree on a protocol version and
- cryptographic algorithms, they generate shared secrets using public
- key cryptographics and optionally authenticate each other with
+ <p>To achieve authentication and privacy, the client and server
+ perform a TLS handshake procedure before transmitting or receiving
+ any data. During the handshake, they agree on a protocol version and
+ cryptographic algorithms, generate shared secrets using public
+ key cryptographies, and optionally authenticate each other with
digital certificates.</p>
</section>
@@ -55,20 +64,21 @@
<title>Data Privacy and Integrity</title>
<p>A <em>symmetric key</em> algorithm has one key only. The key is
- used for both encryption and decryption. These algorithms are fast
- compared to public key algorithms (using two keys, a public and a
- private one) and are therefore typically used for encrypting bulk
+ used for both encryption and decryption. These algorithms are fast,
+ compared to public key algorithms (using two keys, one public and one
+ private) and are therefore typically used for encrypting bulk
data.
</p>
<p>The keys for the symmetric encryption are generated uniquely
for each connection and are based on a secret negotiated
- in the TLS handshake. </p>
+ in the TLS handshake.</p>
- <p>The TLS handshake protocol and data transfer is run on top of
- the TLS Record Protocol that uses a keyed-hash MAC (Message
- Authenticity Code), or HMAC, to protect the message's data
- integrity. From the TLS RFC "A Message Authentication Code is a
+ <p>The TLS handshake protocol and data transfer is run on top of
+ the TLS Record Protocol, which uses a keyed-hash Message
+ Authenticity Code (MAC), or a Hash-based MAC (HMAC),
+ to protect the message data
+ integrity. From the TLS RFC: "A Message Authentication Code is a
one-way hash computed from a message and some secret data. It is
difficult to forge without knowing the secret data. Its purpose is
to detect if the message has been altered."
@@ -82,40 +92,43 @@
passport. The holder of the certificate is called the
<em>subject</em>. The certificate is signed
with the private key of the issuer of the certificate. A chain
- of trust is build by having the issuer in its turn being
- certified by another certificate and so on until you reach the
- so called root certificate that is self signed i.e. issued
+ of trust is built by having the issuer in its turn being
+ certified by another certificate, and so on, until you reach the
+ so called root certificate, which is self-signed, that is, issued
by itself.</p>
- <p>Certificates are issued by <em>certification
- authorities</em> (<em>CA</em>s) only. There are a handful of
- top CAs in the world that issue root certificates. You can
- examine the certificates of several of them by clicking
+ <p>Certificates are issued by Certification Authorities (CAs) only.
+ A handful of top CAs in the world issue root certificates. You can
+ examine several of these certificates by clicking
through the menus of your web browser.
</p>
</section>
<section>
- <title>Authentication of Sender</title>
+ <title>Peer Authentication</title>
- <p>Authentication of the sender is done by public key path
- validation as defined in RFC 3280. Simplified that means that
- each certificate in the certificate chain is issued by the one
- before, the certificates attributes are valid ones, and the
- root cert is a trusted cert that is present in the trusted
- certs database kept by the peer.</p>
+ <p>Authentication of the peer is done by public key path
+ validation as defined in RFC 3280. This means basically
+ the following:</p>
+ <list type="bulleted">
+ <item>Each certificate in the certificate chain is issued by the
+ previous one.</item>
+ <item>The certificates attributes are valid.</item>
+ <item>The root certificate is a trusted certificate that is present
+ in the trusted certificate database kept by the peer.</item>
+ </list>
- <p>The server will always send a certificate chain as part of
- the TLS handshake, but the client will only send one if
- the server requests it. If the client does not have
- an appropriate certificate it may send an "empty" certificate
+ <p>The server always sends a certificate chain as part of
+ the TLS handshake, but the client only sends one if requested
+ by the server. If the client does not have
+ an appropriate certificate, it can send an "empty" certificate
to the server.</p>
- <p>The client may choose to accept some path evaluation errors
- for instance a web browser may ask the user if they want to
- accept an unknown CA root certificate. The server, if it request
- a certificate, will on the other hand not accept any path validation
- errors. It is configurable if the server should accept
+ <p>The client can choose to accept some path evaluation errors,
+ for example, a web browser can ask the user whether to
+ accept an unknown CA root certificate. The server, if it requests
+ a certificate, does however not accept any path validation
+ errors. It is configurable if the server is to accept
or reject an "empty" certificate as response to
a certificate request.</p>
</section>
@@ -123,25 +136,24 @@
<section>
<title>TLS Sessions</title>
- <p>From the TLS RFC "A TLS session is an association between a
- client and a server. Sessions are created by the handshake
+ <p>From the TLS RFC: "A TLS session is an association between a
+ client and a server. Sessions are created by the handshake
protocol. Sessions define a set of cryptographic security
parameters, which can be shared among multiple
connections. Sessions are used to avoid the expensive negotiation
of new security parameters for each connection."</p>
<p>Session data is by default kept by the SSL application in a
- memory storage hence session data will be lost at application
- restart or takeover. Users may define their own callback module
+ memory storage, hence session data is lost at application
+ restart or takeover. Users can define their own callback module
to handle session data storage if persistent data storage is
- required. Session data will also be invalidated after 24 hours
- from it was saved, for security reasons. It is of course
- possible to configure the amount of time the session data should be
- saved.</p>
+ required. Session data is also invalidated after 24 hours
+ from it was saved, for security reasons. The amount of time the
+ session data is to be saved can be configured.</p>
- <p>SSL clients will by default try to reuse an available session,
- SSL servers will by default agree to reuse sessions when clients
- ask to do so.</p>
+ <p>By default the SSL clients try to reuse an available session and
+ by default the SSL servers agree to reuse sessions when clients
+ ask for it.</p>
</section>
</chapter>
diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml
index cb97bbfbb2..28b5f4ce23 100644
--- a/lib/ssl/doc/src/ssl_session_cache_api.xml
+++ b/lib/ssl/doc/src/ssl_session_cache_api.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1999</year><year>2014</year>
+ <year>1999</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -21,42 +21,54 @@
</legalnotice>
<title>ssl</title>
+ <prepared></prepared>
+ <docno></docno>
+ <date></date>
+ <rev></rev>
<file>ssl_session_cache_api.xml</file>
</header>
<module>ssl_session_cache_api</module>
- <modulesummary>Defines the API for the TLS session cache so
- that the data storage scheme can be replaced by
- defining a new callback module implementing this API.</modulesummary>
+ <modulesummary>TLS session cache API</modulesummary>
+ <description>Defines the API for the TLS session cache so
+ that the data storage scheme can be replaced by
+ defining a new callback module implementing this API.</description>
<section>
- <title>Common Data Types</title>
+ <title>DATA TYPES</title>
- <p>The following data types are used in the functions below:
- </p>
+ <p>The following data types are used in the functions for
+ <c>ssl_session_cache_api</c>:</p>
- <p><c>cache_ref() = opaque()</c></p>
-
- <p><c>key() = {partialkey(), session_id()}</c></p>
-
- <p><c>partialkey() = opaque()</c></p>
-
- <p><c>session_id() = binary()</c></p>
+ <taglist>
+ <tag><c>cache_ref() =</c></tag>
+ <item><p><c>opaque()</c></p></item>
+
+ <tag><c>key() =</c></tag>
+ <item><p><c>{partialkey(), session_id()}</c></p></item>
+
+ <tag><c>partialkey() =</c></tag>
+ <item><p><c>opaque()</c></p></item>
+
+ <tag><c>session_id() =</c></tag>
+ <item><p><c>binary()</c></p></item>
+
+ <tag><c>session()</c> =</tag>
+ <item><p><c>opaque()</c></p></item>
+ </taglist>
- <p><c>session() = opaque()</c></p>
-
</section>
<funcs>
<func>
<name>delete(Cache, Key) -> _</name>
- <fsummary></fsummary>
+ <fsummary>Deletes a cache entry.</fsummary>
<type>
- <v> Cache = cache_ref()</v>
- <v> Key = key()</v>
+ <v>Cache = cache_ref()</v>
+ <v>Key = key()</v>
</type>
<desc>
- <p> Deletes a cache entry. Will only be called from the cache
+ <p>Deletes a cache entry. Is only called from the cache
handling process.
</p>
</desc>
@@ -69,49 +81,50 @@
<v></v>
</type>
<desc>
- <p>Calls Fun(Elem, AccIn) on successive elements of the
- cache, starting with AccIn == Acc0. Fun/2 must return a new
- accumulator which is passed to the next call. The function returns
- the final value of the accumulator. Acc0 is returned if the cache is
- empty.
+ <p>Calls <c>Fun(Elem, AccIn)</c> on successive elements of the
+ cache, starting with <c>AccIn == Acc0</c>. <c>Fun/2</c> must
+ return a new accumulator, which is passed to the next call.
+ The function returns the final value of the accumulator.
+ <c>Acc0</c> is returned if the cache is empty.
</p>
</desc>
</func>
<func>
<name>init(Args) -> opaque() </name>
- <fsummary>Return cache reference</fsummary>
+ <fsummary>Returns cache reference.</fsummary>
<type>
<v>Args = proplists:proplist()</v>
- <d>Will always include the property {role, client | server}. Currently this
- is the only predefined property, there may also be user defined properties.
- <seealso marker="ssl_app"> See also application environment variable
- session_cb_init_args</seealso>
- </d>
</type>
<desc>
+ <p>Includes property <c>{role, client | server}</c>.
+ Currently this is the only predefined property,
+ there can also be user-defined properties. See also
+ application environment variable
+ <seealso marker="ssl_app">session_cb_init_args</seealso>.
+ </p>
<p>Performs possible initializations of the cache and returns
- a reference to it that will be used as parameter to the other
- API functions. Will be called by the cache handling processes
- init function, hence putting the same requirements on it as a
- normal process init function. Note that this function will be
- called twice when starting the ssl application, once with the
- role client and once with the role server, as the ssl application
- must be prepared to take on both roles.
+ a reference to it that is used as parameter to the other
+ API functions. Is called by the cache handling processes
+ <c>init</c> function, hence putting the same requirements on it
+ as a normal process <c>init</c> function. This function is
+ called twice when starting the SSL application, once with
+ the role client and once with the role server, as the SSL
+ application must be prepared to take on both roles.
</p>
</desc>
</func>
<func>
<name>lookup(Cache, Key) -> Entry</name>
- <fsummary> Looks up a cache entry.</fsummary>
+ <fsummary>Looks up a cache entry.</fsummary>
<type>
- <v> Cache = cache_ref()</v>
- <v> Key = key()</v>
- <v> Entry = session() | undefined </v>
+ <v>Cache = cache_ref()</v>
+ <v>Key = key()</v>
+ <v>Entry = session() | undefined</v>
</type>
<desc>
- <p>Looks up a cache entry. Should be callable from any
+ <p>Looks up a cache entry. Is to be callable from any
process.
</p>
</desc>
@@ -119,14 +132,14 @@
<func>
<name>select_session(Cache, PartialKey) -> [session()]</name>
- <fsummary>>Selects sessions that could be reused.</fsummary>
+ <fsummary>Selects sessions that can be reused.</fsummary>
<type>
- <v> Cache = cache_ref()</v>
- <v> PartialKey = partialkey()</v>
- <v> Session = session()</v>
+ <v>Cache = cache_ref()</v>
+ <v>PartialKey = partialkey()</v>
+ <v>Session = session()</v>
</type>
<desc>
- <p>Selects sessions that could be reused. Should be callable
+ <p>Selects sessions that can be reused. Is to be callable
from any process.
</p>
</desc>
@@ -137,7 +150,7 @@
<fsummary>Called by the process that handles the cache when it
is about to terminate.</fsummary>
<type>
- <v>Cache = term() - as returned by init/0</v>
+ <v>Cache = term() - as returned by init/0</v>
</type>
<desc>
<p>Takes care of possible cleanup that is needed when the
@@ -148,15 +161,15 @@
<func>
<name>update(Cache, Key, Session) -> _</name>
- <fsummary> Caches a new session or updates a already cached one.</fsummary>
+ <fsummary>Caches a new session or updates an already cached one.</fsummary>
<type>
- <v> Cache = cache_ref()</v>
- <v> Key = key()</v>
- <v> Session = session()</v>
+ <v>Cache = cache_ref()</v>
+ <v>Key = key()</v>
+ <v>Session = session()</v>
</type>
<desc>
- <p> Caches a new session or updates a already cached one. Will
- only be called from the cache handling process.
+ <p>Caches a new session or updates an already cached one. Is
+ only called from the cache handling process.
</p>
</desc>
</func>
diff --git a/lib/ssl/doc/src/usersguide.xml b/lib/ssl/doc/src/usersguide.xml
index b1c7190085..6fce022507 100644
--- a/lib/ssl/doc/src/usersguide.xml
+++ b/lib/ssl/doc/src/usersguide.xml
@@ -23,14 +23,17 @@
<title>SSL User's Guide</title>
<prepared>OTP Team</prepared>
+ <docno></docno>
<date>2003-05-26</date>
+ <rev></rev>
<file>usersguide.sgml</file>
</header>
<description>
- <p>The <em>SSL</em> application provides secure communication over
+ <p>The Secure Socket Layer (SSL) application provides secure communication over
sockets.
</p>
</description>
+ <xi:include href="ssl_introduction.xml"/>
<xi:include href="ssl_protocol.xml"/>
<xi:include href="using_ssl.xml"/>
<xi:include href="ssl_distribution.xml"/>
diff --git a/lib/ssl/doc/src/using_ssl.xml b/lib/ssl/doc/src/using_ssl.xml
index cce388d02a..dbbc1aa9d3 100644
--- a/lib/ssl/doc/src/using_ssl.xml
+++ b/lib/ssl/doc/src/using_ssl.xml
@@ -21,126 +21,131 @@
</legalnotice>
- <title>Using the SSL API</title>
+ <title>Using SSL API</title>
+ <prepared></prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev></rev>
<file>using_ssl.xml</file>
</header>
-
- <section>
- <title>General information</title>
- <p>To see relevant version information for ssl you can
- call ssl:versions/0</p>
+ <p>To see relevant version information for ssl, call
+ <seealso marker="ssl:ssl#versions-0"><c>ssl:versions/0</c></seealso>
+ .</p>
- <p>To see all supported cipher suites
- call ssl:cipher_suites/0. Note that available cipher suites
- for a connection will depend on your certificate. It is also
- possible to specify a specific cipher suite(s) that you
- want your connection to use. Default is to use the strongest
- available.</p>
-
- </section>
+ <p>To see all supported cipher suites, call <seealso marker="ssl:ssl#cipher_suites-1"><c>ssl:cipher_suites(all)</c> </seealso>.
+ The available cipher suites for a connection depend on your certificate.
+ Specific cipher suites that you want your connection to use can also be
+ specified. Default is to use the strongest available.</p>
<section>
- <title>Setting up connections</title>
+ <title>Setting up Connections</title>
- <p>Here follows some small example of how to set up client/server connections
- using the erlang shell. The returned value of the sslsocket has been abbreviated with
- <c>[...]</c> as it can be fairly large and is opaque.</p>
+ <p>This section shows a small example of how to set up client/server connections
+ using the Erlang shell. The returned value of the <c>sslsocket</c> is abbreviated
+ with <c>[...]</c> as it can be fairly large and is opaque.</p>
<section>
- <title>Minmal example</title>
+ <title>Minimal Example</title>
- <note><p> The minimal setup is not the most secure setup of ssl.</p>
+ <note><p> The minimal setup is not the most secure setup of SSL.</p>
</note>
-
- <p> Start server side</p>
+
+ <p>To set up client/server connections:</p>
+
+ <p><em>Step 1:</em> Start the server side:</p>
<code type="erl">1 server> ssl:start().
ok</code>
- <p>Create an ssl listen socket</p>
+ <p><em>Step 2:</em> Create an SSL listen socket:</p>
<code type="erl">2 server> {ok, ListenSocket} =
ssl:listen(9999, [{certfile, "cert.pem"}, {keyfile, "key.pem"},{reuseaddr, true}]).
{ok,{sslsocket, [...]}}</code>
- <p>Do a transport accept on the ssl listen socket</p>
+ <p><em>Step 3:</em> Do a transport accept on the SSL listen socket:</p>
<code type="erl">3 server> {ok, Socket} = ssl:transport_accept(ListenSocket).
{ok,{sslsocket, [...]}}</code>
- <p>Start client side</p>
+ <p><em>Step 4:</em> Start the client side:</p>
<code type="erl">1 client> ssl:start().
ok</code>
<code type="erl">2 client> {ok, Socket} = ssl:connect("localhost", 9999, [], infinity).
{ok,{sslsocket, [...]}}</code>
- <p>Do the ssl handshake</p>
+ <p><em>Step 5:</em> Do the SSL handshake:</p>
<code type="erl">4 server> ok = ssl:ssl_accept(Socket).
ok</code>
- <p>Send a messag over ssl</p>
+ <p><em>Step 6:</em> Send a message over SSL:</p>
<code type="erl">5 server> ssl:send(Socket, "foo").
ok</code>
- <p>Flush the shell message queue to see that we got the message
- sent on the server side</p>
+ <p><em>Step 7:</em> Flush the shell message queue to see that the message
+ was sent on the server side:</p>
<code type="erl">3 client> flush().
Shell got {ssl,{sslsocket,[...]},"foo"}
ok</code>
</section>
<section>
- <title>Upgrade example</title>
+ <title>Upgrade Example</title>
- <note><p> To upgrade a TCP/IP connection to an ssl connection the
- client and server have to aggre to do so. Agreement
- may be accompliced by using a protocol such the one used by HTTP
- specified in RFC 2817.</p> </note>
+ <note><p>To upgrade a TCP/IP connection to an SSL connection, the
+ client and server must agree to do so. The agreement
+ can be accomplished by using a protocol, for example, the one used by HTTP
+ specified in RFC 2817.</p></note>
+
+ <p>To upgrade to an SSL connection:</p>
- <p>Start server side</p>
+ <p><em>Step 1:</em> Start the server side:</p>
<code type="erl">1 server> ssl:start().
ok</code>
- <p>Create a normal tcp listen socket</p>
+ <p><em>Step 2:</em> Create a normal TCP listen socket:</p>
<code type="erl">2 server> {ok, ListenSocket} = gen_tcp:listen(9999, [{reuseaddr, true}]).
{ok, #Port&lt;0.475&gt;}</code>
- <p>Accept client connection</p>
+ <p><em>Step 3:</em> Accept client connection:</p>
<code type="erl">3 server> {ok, Socket} = gen_tcp:accept(ListenSocket).
{ok, #Port&lt;0.476&gt;}</code>
- <p>Start client side</p>
+ <p><em>Step 4:</em> Start the client side:</p>
<code type="erl">1 client> ssl:start().
ok</code>
<code type="erl">2 client> {ok, Socket} = gen_tcp:connect("localhost", 9999, [], infinity).</code>
- <p>Make sure active is set to false before trying
- to upgrade a connection to an ssl connection, otherwhise
- ssl handshake messages may be deliverd to the wrong process.</p>
+ <p><em>Step 5:</em> Ensure <c>active</c> is set to <c>false</c> before trying
+ to upgrade a connection to an SSL connection, otherwise
+ SSL handshake messages can be delivered to the wrong process:</p>
<code type="erl">4 server> inet:setopts(Socket, [{active, false}]).
ok</code>
- <p>Do the ssl handshake.</p>
+ <p><em>Step 6:</em> Do the SSL handshake:</p>
<code type="erl">5 server> {ok, SSLSocket} = ssl:ssl_accept(Socket, [{cacertfile, "cacerts.pem"},
{certfile, "cert.pem"}, {keyfile, "key.pem"}]).
{ok,{sslsocket,[...]}}</code>
- <p> Upgrade to an ssl connection. Note that the client and server
- must agree upon the upgrade and the server must call
- ssl:accept/2 before the client calls ssl:connect/3.</p>
+ <p><em>Step 7:</em> Upgrade to an SSL connection. The client and server
+ must agree upon the upgrade. The server must call
+ <c>ssl:accept/2</c> before the client calls <c>ssl:connect/3.</c></p>
<code type="erl">3 client>{ok, SSLSocket} = ssl:connect(Socket, [{cacertfile, "cacerts.pem"},
{certfile, "cert.pem"}, {keyfile, "key.pem"}], infinity).
{ok,{sslsocket,[...]}}</code>
- <p>Send a messag over ssl</p>
+ <p><em>Step 8:</em> Send a message over SSL:</p>
<code type="erl">4 client> ssl:send(SSLSocket, "foo").
ok</code>
- <p>Set active true on the ssl socket</p>
+ <p><em>Step 9:</em> Set <c>active true</c> on the SSL socket:</p>
<code type="erl">4 server> ssl:setopts(SSLSocket, [{active, true}]).
ok</code>
- <p>Flush the shell message queue to see that we got the message
- sent on the client side</p>
+ <p><em>Step 10:</em> Flush the shell message queue to see that the message
+ was sent on the client side:</p>
<code type="erl">5 server> flush().
Shell got {ssl,{sslsocket,[...]},"foo"}
ok</code>
diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile
index 0c00a650b9..d71d3fc445 100644
--- a/lib/ssl/src/Makefile
+++ b/lib/ssl/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2014. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,8 @@ RELSYSDIR = $(RELEASE_PATH)/lib/ssl-$(VSN)
# ----------------------------------------------------
BEHAVIOUR_MODULES= \
- ssl_session_cache_api
+ ssl_session_cache_api \
+ ssl_crl_cache_api
MODULES= \
ssl \
@@ -65,6 +66,8 @@ MODULES= \
ssl_manager \
ssl_session \
ssl_session_cache \
+ ssl_crl\
+ ssl_crl_cache \
ssl_socket \
ssl_listen_tracker_sup \
tls_record \
@@ -164,5 +167,5 @@ $(EBIN)/ssl_session_cache.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl
$(EBIN)/ssl_session_cache_api.$(EMULATOR): ssl_internal.hrl ssl_handshake.hrl
$(EBIN)/ssl_ssl3.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl
$(EBIN)/ssl_tls1.$(EMULATOR): ssl_internal.hrl ssl_record.hrl ssl_cipher.hrl
-
+$(EBIN)/ssl_cache.$(EMULATOR): ssl_cache.erl ssl_internal.hrl ../../public_key/include/public_key.hrl
diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl
index 508983ddac..610e2c4e41 100644
--- a/lib/ssl/src/dtls_connection.erl
+++ b/lib/ssl/src/dtls_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -146,7 +146,7 @@ init([Role, Host, Port, Socket, {SSLOpts0, _} = Options, User, CbInfo]) ->
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}),
try ssl_config:init(SSLOpts0, Role) of
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} ->
+ {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} ->
Session = State0#state.session,
State = State0#state{
tls_handshake_history = Handshake,
@@ -155,6 +155,7 @@ init([Role, Host, Port, Socket, {SSLOpts0, _} = Options, User, CbInfo]) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
+ crl_db = CRLDbInfo,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams},
@@ -227,9 +228,9 @@ hello(Hello,
case dtls_handshake:hello(Hello, SslOptions, ConnectionStates0, Renegotiation) of
#alert{} = Alert ->
handle_own_alert(Alert, ReqVersion, hello, State);
- {Version, NewId, ConnectionStates, NextProtocol} ->
+ {Version, NewId, ConnectionStates, ProtoExt, Protocol} ->
ssl_connection:handle_session(Hello,
- Version, NewId, ConnectionStates, NextProtocol, State)
+ Version, NewId, ConnectionStates, ProtoExt, Protocol, State)
end;
hello(Msg, State) ->
diff --git a/lib/ssl/src/dtls_handshake.erl b/lib/ssl/src/dtls_handshake.erl
index 31d525b295..30381df050 100644
--- a/lib/ssl/src/dtls_handshake.erl
+++ b/lib/ssl/src/dtls_handshake.erl
@@ -181,8 +181,8 @@ handle_server_hello_extensions(Version, SessionId, Random, CipherSuite,
SslOpt, ConnectionStates0, Renegotiation) of
#alert{} = Alert ->
Alert;
- {ConnectionStates, Protocol} ->
- {Version, SessionId, ConnectionStates, Protocol}
+ {ConnectionStates, ProtoExt, Protocol} ->
+ {Version, SessionId, ConnectionStates, ProtoExt, Protocol}
end.
dtls_fragment(Mss, MsgType, Len, MsgSeq, Bin, Offset, Acc)
diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl
index c0776e822b..59b3ddec5c 100644
--- a/lib/ssl/src/dtls_record.erl
+++ b/lib/ssl/src/dtls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -194,7 +194,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
compression_algorithm=CompAlg}
} = ReadState0}= ConnnectionStates0) ->
{PlainFragment, Mac, ReadState1} = ssl_record:decipher(dtls_v1:corresponding_tls_version(Version),
- CipherFragment, ReadState0),
+ CipherFragment, ReadState0, true),
MacHash = calc_mac_hash(ReadState1, Type, Version, Epoch, Seq, PlainFragment),
case ssl_record:is_correct_mac(Mac, MacHash) of
true ->
diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src
index 36681e2897..be8ef6f85f 100644
--- a/lib/ssl/src/ssl.app.src
+++ b/lib/ssl/src/ssl.app.src
@@ -39,6 +39,10 @@
ssl_manager,
ssl_pkix_db,
ssl_certificate,
+ %% CRL handling
+ ssl_crl,
+ ssl_crl_cache,
+ ssl_crl_cache_api,
%% App structure
ssl_app,
ssl_sup,
@@ -49,7 +53,7 @@
{applications, [crypto, public_key, kernel, stdlib]},
{env, []},
{mod, {ssl_app, []}},
- {runtime_dependencies, ["stdlib-2.0","public_key-0.22","kernel-3.0",
- "erts-6.0","crypto-3.3"]}]}.
+ {runtime_dependencies, ["stdlib-2.0","public_key-1.0","kernel-3.0",
+ "erts-6.0","crypto-3.3", "inets-5.10.7"]}]}.
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index 7986722094..d100e41930 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,12 +1,14 @@
%% -*- erlang -*-
{"%VSN%",
[
+ {<<"6.0">>, [{load_module, ssl_handshake, soft_purge, soft_purge, []}]},
{<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
{<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
{<<"3\\..*">>, [{restart_application, ssl}]}
],
[
+ {<<"6.0">>, [{load_module, ssl_handshake, soft_purge, soft_purge, []}]},
{<<"5\\.3\\.[1-7]($|\\..*)">>, [{restart_application, ssl}]},
{<<"5\\.[0-2]($|\\..*)">>, [{restart_application, ssl}]},
{<<"4\\..*">>, [{restart_application, ssl}]},
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index dcba69a65e..225a9be66f 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,9 +38,13 @@
%% SSL/TLS protocol handling
-export([cipher_suites/0, cipher_suites/1, suite_definition/1,
connection_info/1, versions/0, session_info/1, format_error/1,
- renegotiate/1, prf/5, negotiated_next_protocol/1]).
+ renegotiate/1, prf/5, negotiated_protocol/1, negotiated_next_protocol/1,
+ connection_information/1, connection_information/2]).
%% Misc
--export([random_bytes/1]).
+-export([random_bytes/1, handle_options/2]).
+
+-deprecated({negotiated_next_protocol, 1, next_major_release}).
+-deprecated({connection_info, 1, next_major_release}).
-include("ssl_api.hrl").
-include("ssl_internal.hrl").
@@ -284,16 +288,42 @@ controlling_process(#sslsocket{pid = {Listen,
is_pid(NewOwner) ->
Transport:controlling_process(Listen, NewOwner).
+
+%%--------------------------------------------------------------------
+-spec connection_information(#sslsocket{}) -> {ok, list()} | {error, reason()}.
+%%
+%% Description: Return SSL information for the connection
+%%--------------------------------------------------------------------
+connection_information(#sslsocket{pid = Pid}) when is_pid(Pid) -> ssl_connection:connection_information(Pid);
+connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) -> {error, enotconn}.
+
+
+%%--------------------------------------------------------------------
+-spec connection_information(#sslsocket{}, [atom]) -> {ok, list()} | {error, reason()}.
+%%
+%% Description: Return SSL information for the connection
+%%--------------------------------------------------------------------
+connection_information(#sslsocket{} = SSLSocket, Items) ->
+ case connection_information(SSLSocket) of
+ {ok, I} ->
+ {ok, lists:filter(fun({K, _}) -> lists:foldl(fun(K1, Acc) when K1 =:= K -> Acc + 1; (_, Acc) -> Acc end, 0, Items) > 0 end, I)};
+ E ->
+ E
+ end.
+
%%--------------------------------------------------------------------
-spec connection_info(#sslsocket{}) -> {ok, {tls_record:tls_atom_version(), ssl_cipher:erl_cipher_suite()}} |
{error, reason()}.
%%
%% Description: Returns ssl protocol and cipher used for the connection
%%--------------------------------------------------------------------
-connection_info(#sslsocket{pid = Pid}) when is_pid(Pid) ->
- ssl_connection:info(Pid);
-connection_info(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
- {error, enotconn}.
+connection_info(#sslsocket{} = SSLSocket) ->
+ case connection_information(SSLSocket) of
+ {ok, Result} ->
+ {ok, {proplists:get_value(protocol, Result), proplists:get_value(cipher_suite, Result)}};
+ Error ->
+ Error
+ end.
%%--------------------------------------------------------------------
-spec peername(#sslsocket{}) -> {ok, {inet:ip_address(), inet:port_number()}} | {error, reason()}.
@@ -330,13 +360,27 @@ suite_definition(S) ->
{KeyExchange, Cipher, Hash}.
%%--------------------------------------------------------------------
+-spec negotiated_protocol(#sslsocket{}) -> {ok, binary()} | {error, reason()}.
+%%
+%% Description: Returns the protocol that has been negotiated. If no
+%% protocol has been negotiated will return {error, protocol_not_negotiated}
+%%--------------------------------------------------------------------
+negotiated_protocol(#sslsocket{pid = Pid}) ->
+ ssl_connection:negotiated_protocol(Pid).
+
+%%--------------------------------------------------------------------
-spec negotiated_next_protocol(#sslsocket{}) -> {ok, binary()} | {error, reason()}.
%%
%% Description: Returns the next protocol that has been negotiated. If no
%% protocol has been negotiated will return {error, next_protocol_not_negotiated}
%%--------------------------------------------------------------------
-negotiated_next_protocol(#sslsocket{pid = Pid}) ->
- ssl_connection:negotiated_next_protocol(Pid).
+negotiated_next_protocol(Socket) ->
+ case negotiated_protocol(Socket) of
+ {error, protocol_not_negotiated} ->
+ {error, next_protocol_not_negotiated};
+ Res ->
+ Res
+ end.
%%--------------------------------------------------------------------
-spec cipher_suites(erlang | openssl | all) -> [ssl_cipher:erl_cipher_suite()] |
@@ -353,12 +397,8 @@ cipher_suites(openssl) ->
|| S <- ssl_cipher:filter_suites(ssl_cipher:suites(Version))];
cipher_suites(all) ->
Version = tls_record:highest_protocol_version([]),
- Supported = ssl_cipher:all_suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
- ssl_cipher:filter_suites([suite_definition(S) || S <- Supported]).
-
+ ssl_cipher:filter_suites([suite_definition(S)
+ || S <-ssl_cipher:all_suites(Version)]).
cipher_suites() ->
cipher_suites(erlang).
@@ -454,7 +494,7 @@ session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
versions() ->
Vsns = tls_record:supported_protocol_versions(),
SupportedVsns = [tls_record:protocol_version(Vsn) || Vsn <- Vsns],
- AvailableVsns = ?ALL_SUPPORTED_VERSIONS,
+ AvailableVsns = ?ALL_AVAILABLE_VERSIONS,
%% TODO Add DTLS versions when supported
[{ssl_app, ?VSN}, {supported, SupportedVsns}, {available, AvailableVsns}].
@@ -648,6 +688,10 @@ handle_options(Opts0) ->
renegotiate_at = handle_option(renegotiate_at, Opts, ?DEFAULT_RENEGOTIATE_AT),
hibernate_after = handle_option(hibernate_after, Opts, undefined),
erl_dist = handle_option(erl_dist, Opts, false),
+ alpn_advertised_protocols =
+ handle_option(alpn_advertised_protocols, Opts, undefined),
+ alpn_preferred_protocols =
+ handle_option(alpn_preferred_protocols, Opts, undefined),
next_protocols_advertised =
handle_option(next_protocols_advertised, Opts, undefined),
next_protocol_selector =
@@ -655,8 +699,14 @@ handle_options(Opts0) ->
handle_option(client_preferred_next_protocols, Opts, undefined)),
log_alert = handle_option(log_alert, Opts, true),
server_name_indication = handle_option(server_name_indication, Opts, undefined),
+ sni_hosts = handle_option(sni_hosts, Opts, []),
+ sni_fun = handle_option(sni_fun, Opts, undefined),
honor_cipher_order = handle_option(honor_cipher_order, Opts, false),
- protocol = proplists:get_value(protocol, Opts, tls)
+ protocol = proplists:get_value(protocol, Opts, tls),
+ padding_check = proplists:get_value(padding_check, Opts, true),
+ fallback = proplists:get_value(fallback, Opts, false),
+ crl_check = handle_option(crl_check, Opts, false),
+ crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}})
},
CbInfo = proplists:get_value(cb_info, Opts, {gen_tcp, tcp, tcp_closed, tcp_error}),
@@ -667,9 +717,11 @@ handle_options(Opts0) ->
user_lookup_fun, psk_identity, srp_identity, ciphers,
reuse_session, reuse_sessions, ssl_imp,
cb_info, renegotiate_at, secure_renegotiate, hibernate_after,
- erl_dist, next_protocols_advertised,
+ erl_dist, alpn_advertised_protocols, sni_hosts, sni_fun,
+ alpn_preferred_protocols, next_protocols_advertised,
client_preferred_next_protocols, log_alert,
- server_name_indication, honor_cipher_order],
+ server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache,
+ fallback],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -682,6 +734,18 @@ handle_options(Opts0) ->
inet_user = SockOpts, transport_info = CbInfo, connection_cb = ConnetionCb
}}.
+handle_option(sni_fun, Opts, Default) ->
+ OptFun = validate_option(sni_fun,
+ proplists:get_value(sni_fun, Opts, Default)),
+ OptHosts = proplists:get_value(sni_hosts, Opts, undefined),
+ case {OptFun, OptHosts} of
+ {Default, _} ->
+ Default;
+ {_, undefined} ->
+ OptFun;
+ _ ->
+ throw({error, {conflict_options, [sni_fun, sni_hosts]}})
+ end;
handle_option(OptionName, Opts, Default) ->
validate_option(OptionName,
proplists:get_value(OptionName, Opts, Default)).
@@ -802,6 +866,20 @@ validate_option(hibernate_after, Value) when is_integer(Value), Value >= 0 ->
Value;
validate_option(erl_dist,Value) when is_boolean(Value) ->
Value;
+validate_option(Opt, Value)
+ when Opt =:= alpn_advertised_protocols orelse Opt =:= alpn_preferred_protocols,
+ is_list(Value) ->
+ case tls_record:highest_protocol_version([]) of
+ {3,0} ->
+ throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
+ _ ->
+ validate_binary_list(Opt, Value),
+ Value
+ end;
+validate_option(Opt, Value)
+ when Opt =:= alpn_advertised_protocols orelse Opt =:= alpn_preferred_protocols,
+ Value =:= undefined ->
+ undefined;
validate_option(client_preferred_next_protocols = Opt, {Precedence, PreferredProtocols} = Value)
when is_list(PreferredProtocols) ->
case tls_record:highest_protocol_version([]) of
@@ -845,11 +923,41 @@ validate_option(server_name_indication, disable) ->
disable;
validate_option(server_name_indication, undefined) ->
undefined;
+validate_option(sni_hosts, []) ->
+ [];
+validate_option(sni_hosts, [{Hostname, SSLOptions} | Tail]) when is_list(Hostname) ->
+ RecursiveSNIOptions = proplists:get_value(sni_hosts, SSLOptions, undefined),
+ case RecursiveSNIOptions of
+ undefined ->
+ [{Hostname, validate_options(SSLOptions)} | validate_option(sni_hosts, Tail)];
+ _ ->
+ throw({error, {options, {sni_hosts, RecursiveSNIOptions}}})
+ end;
+validate_option(sni_fun, undefined) ->
+ undefined;
+validate_option(sni_fun, Fun) when is_function(Fun) ->
+ Fun;
validate_option(honor_cipher_order, Value) when is_boolean(Value) ->
Value;
+validate_option(padding_check, Value) when is_boolean(Value) ->
+ Value;
+validate_option(fallback, Value) when is_boolean(Value) ->
+ Value;
+validate_option(crl_check, Value) when is_boolean(Value) ->
+ Value;
+validate_option(crl_check, Value) when (Value == best_effort) or (Value == peer) ->
+ Value;
+validate_option(crl_cache, {Cb, {_Handle, Options}} = Value) when is_atom(Cb) and is_list(Options) ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {options, {Opt, Value}}}).
+
+validate_options([]) ->
+ [];
+validate_options([{Opt, Value} | Tail]) ->
+ [{Opt, validate_option(Opt, Value)} | validate_options(Tail)].
+
validate_npn_ordering(client) ->
ok;
validate_npn_ordering(server) ->
@@ -952,10 +1060,7 @@ binary_cipher_suites(Version, [{_,_,_}| _] = Ciphers0) ->
binary_cipher_suites(Version, Ciphers);
binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
- All = ssl_cipher:suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ All = ssl_cipher:all_suites(Version),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
%% Defaults to all supported suites that does
@@ -1123,6 +1228,10 @@ new_ssl_options([{secure_renegotiate, Value} | Rest], #ssl_options{} = Opts, Rec
new_ssl_options(Rest, Opts#ssl_options{secure_renegotiate = validate_option(secure_renegotiate, Value)}, RecordCB);
new_ssl_options([{hibernate_after, Value} | Rest], #ssl_options{} = Opts, RecordCB) ->
new_ssl_options(Rest, Opts#ssl_options{hibernate_after = validate_option(hibernate_after, Value)}, RecordCB);
+new_ssl_options([{alpn_advertised_protocols, Value} | Rest], #ssl_options{} = Opts, RecordCB) ->
+ new_ssl_options(Rest, Opts#ssl_options{alpn_advertised_protocols = validate_option(alpn_advertised_protocols, Value)}, RecordCB);
+new_ssl_options([{alpn_preferred_protocols, Value} | Rest], #ssl_options{} = Opts, RecordCB) ->
+ new_ssl_options(Rest, Opts#ssl_options{alpn_preferred_protocols = validate_option(alpn_preferred_protocols, Value)}, RecordCB);
new_ssl_options([{next_protocols_advertised, Value} | Rest], #ssl_options{} = Opts, RecordCB) ->
new_ssl_options(Rest, Opts#ssl_options{next_protocols_advertised = validate_option(next_protocols_advertised, Value)}, RecordCB);
new_ssl_options([{client_preferred_next_protocols, Value} | Rest], #ssl_options{} = Opts, RecordCB) ->
@@ -1182,3 +1291,4 @@ handle_verify_options(Opts, CaCerts) ->
Value ->
throw({error, {options, {verify, Value}}})
end.
+
diff --git a/lib/ssl/src/ssl_alert.erl b/lib/ssl/src/ssl_alert.erl
index 78dc98bc25..c46facb75d 100644
--- a/lib/ssl/src/ssl_alert.erl
+++ b/lib/ssl/src/ssl_alert.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -161,5 +161,9 @@ description_txt(?BAD_CERTIFICATE_HASH_VALUE) ->
"bad certificate hash value";
description_txt(?UNKNOWN_PSK_IDENTITY) ->
"unknown psk identity";
+description_txt(?INAPPROPRIATE_FALLBACK) ->
+ "inappropriate fallback";
+description_txt(?NO_APPLICATION_PROTOCOL) ->
+ "no application protocol";
description_txt(Enum) ->
lists:flatten(io_lib:format("unsupported/unknown alert: ~p", [Enum])).
diff --git a/lib/ssl/src/ssl_alert.hrl b/lib/ssl/src/ssl_alert.hrl
index f4f1d74264..70b7523975 100644
--- a/lib/ssl/src/ssl_alert.hrl
+++ b/lib/ssl/src/ssl_alert.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -58,6 +58,7 @@
%% protocol_version(70),
%% insufficient_security(71),
%% internal_error(80),
+%% inappropriate_fallback(86),
%% user_canceled(90),
%% no_renegotiation(100),
%% RFC 4366
@@ -68,6 +69,8 @@
%% bad_certificate_hash_value(114),
%% RFC 4366
%% unknown_psk_identity(115),
+%% RFC 7301
+%% no_application_protocol(120),
%% (255)
%% } AlertDescription;
@@ -93,6 +96,7 @@
-define(PROTOCOL_VERSION, 70).
-define(INSUFFICIENT_SECURITY, 71).
-define(INTERNAL_ERROR, 80).
+-define(INAPPROPRIATE_FALLBACK, 86).
-define(USER_CANCELED, 90).
-define(NO_RENEGOTIATION, 100).
-define(UNSUPPORTED_EXTENSION, 110).
@@ -101,6 +105,7 @@
-define(BAD_CERTIFICATE_STATUS_RESPONSE, 113).
-define(BAD_CERTIFICATE_HASH_VALUE, 114).
-define(UNKNOWN_PSK_IDENTITY, 115).
+-define(NO_APPLICATION_PROTOCOL, 120).
-define(ALERT_REC(Level,Desc), #alert{level=Level,description=Desc,where={?FILE, ?LINE}}).
diff --git a/lib/ssl/src/ssl_api.hrl b/lib/ssl/src/ssl_api.hrl
index 22185ff60a..78127eeafa 100644
--- a/lib/ssl/src/ssl_api.hrl
+++ b/lib/ssl/src/ssl_api.hrl
@@ -49,6 +49,8 @@
{srp_identity, {string(), string()}} |
{ciphers, ciphers()} | {ssl_imp, ssl_imp()} | {reuse_sessions, boolean()} |
{reuse_session, fun()} | {hibernate_after, integer()|undefined} |
+ {alpn_advertised_protocols, [binary()]} |
+ {alpn_preferred_protocols, [binary()]} |
{next_protocols_advertised, list(binary())} |
{client_preferred_next_protocols, binary(), client | server, list(binary())}.
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index 9c0ed181fe..34e4a8b447 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014 All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015 All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,7 +33,8 @@
-export([trusted_cert_and_path/4,
certificate_chain/3,
file_to_certificats/2,
- validate_extension/3,
+ file_to_crls/2,
+ validate/3,
is_valid_extkey_usage/2,
is_valid_key_usage/2,
select_extension/2,
@@ -83,16 +84,19 @@ trusted_cert_and_path(CertChain, CertDbHandle, CertDbRef, PartialChainHandler) -
end.
%%--------------------------------------------------------------------
--spec certificate_chain(undefined | binary(), db_handle(), certdb_ref()) ->
- {error, no_cert} | {ok, [der_cert()]}.
+-spec certificate_chain(undefined | binary() | #'OTPCertificate'{} , db_handle(), certdb_ref()) ->
+ {error, no_cert} | {ok, #'OTPCertificate'{} | undefined, [der_cert()]}.
%%
%% Description: Return the certificate chain to send to peer.
%%--------------------------------------------------------------------
certificate_chain(undefined, _, _) ->
{error, no_cert};
-certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
+certificate_chain(OwnCert, CertDbHandle, CertsDbRef) when is_binary(OwnCert) ->
ErlCert = public_key:pkix_decode_cert(OwnCert, otp),
- certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]).
+ certificate_chain(ErlCert, OwnCert, CertDbHandle, CertsDbRef, [OwnCert]);
+certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
+ DerCert = public_key:pkix_encode('OTPCertificate', OwnCert, otp),
+ certificate_chain(OwnCert, DerCert, CertDbHandle, CertsDbRef, [DerCert]).
%%--------------------------------------------------------------------
-spec file_to_certificats(binary(), term()) -> [der_cert()].
%%
@@ -101,29 +105,39 @@ certificate_chain(OwnCert, CertDbHandle, CertsDbRef) ->
file_to_certificats(File, DbHandle) ->
{ok, List} = ssl_manager:cache_pem_file(File, DbHandle),
[Bin || {'Certificate', Bin, not_encrypted} <- List].
+
%%--------------------------------------------------------------------
--spec validate_extension(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid,
- term()) -> {valid, term()} |
- {fail, tuple()} |
- {unknown, term()}.
+-spec file_to_crls(binary(), term()) -> [der_cert()].
+%%
+%% Description: Return list of DER encoded certificates.
+%%--------------------------------------------------------------------
+file_to_crls(File, DbHandle) ->
+ {ok, List} = ssl_manager:cache_pem_file(File, DbHandle),
+ [Bin || {'CertificateList', Bin, not_encrypted} <- List].
+
+%%--------------------------------------------------------------------
+-spec validate(term(), {extension, #'Extension'{}} | {bad_cert, atom()} | valid,
+ term()) -> {valid, term()} |
+ {fail, tuple()} |
+ {unknown, term()}.
%%
%% Description: Validates ssl/tls specific extensions
%%--------------------------------------------------------------------
-validate_extension(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
- extnValue = KeyUse}}, Role) ->
+validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
+ extnValue = KeyUse}}, {Role, _,_, _, _}) ->
case is_valid_extkey_usage(KeyUse, Role) of
true ->
{valid, Role};
false ->
{fail, {bad_cert, invalid_ext_key_usage}}
end;
-validate_extension(_, {bad_cert, _} = Reason, _) ->
- {fail, Reason};
-validate_extension(_, {extension, _}, Role) ->
+validate(_, {extension, _}, Role) ->
{unknown, Role};
-validate_extension(_, valid, Role) ->
+validate(_, {bad_cert, _} = Reason, _) ->
+ {fail, Reason};
+validate(_, valid, Role) ->
{valid, Role};
-validate_extension(_, valid_peer, Role) ->
+validate(_, valid_peer, Role) ->
{valid, Role}.
%%--------------------------------------------------------------------
@@ -194,14 +208,14 @@ certificate_chain(OtpCert, _Cert, CertDbHandle, CertsDbRef, Chain) ->
%% certificate. The verification of the
%% cert chain will fail if guess is
%% incorrect.
- {ok, lists:reverse(Chain)}
+ {ok, undefined, lists:reverse(Chain)}
end;
{{ok, {SerialNr, Issuer}}, SelfSigned} ->
certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, SelfSigned)
end.
-certificate_chain(_,_, Chain, _SerialNr, _Issuer, true) ->
- {ok, lists:reverse(Chain)};
+certificate_chain(_, _, [RootCert | _] = Chain, _, _, true) ->
+ {ok, RootCert, lists:reverse(Chain)};
certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned) ->
case ssl_manager:lookup_trusted_cert(CertDbHandle, CertsDbRef,
@@ -214,7 +228,7 @@ certificate_chain(CertDbHandle, CertsDbRef, Chain, SerialNr, Issuer, _SelfSigned
%% The trusted cert may be obmitted from the chain as the
%% counter part needs to have it anyway to be able to
%% verify it.
- {ok, lists:reverse(Chain)}
+ {ok, undefined, lists:reverse(Chain)}
end.
find_issuer(OtpCert, CertDbHandle) ->
@@ -282,7 +296,7 @@ other_issuer(OtpCert, CertDbHandle) ->
handle_path({BinCert, OTPCert}, Path, PartialChainHandler) ->
case public_key:pkix_is_self_signed(OTPCert) of
true ->
- {BinCert, Path};
+ {BinCert, lists:delete(BinCert, Path)};
false ->
handle_incomplete_chain(Path, PartialChainHandler)
end.
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index e1d89c149e..8584e56d6c 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,11 +33,11 @@
-include_lib("public_key/include/public_key.hrl").
-export([security_parameters/2, security_parameters/3, suite_definition/1,
- cipher_init/3, decipher/5, cipher/5, decipher_aead/6, cipher_aead/6,
+ cipher_init/3, decipher/6, cipher/5, decipher_aead/6, cipher_aead/6,
suite/1, suites/1, all_suites/1,
ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0,
- openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
- hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2]).
+ rc4_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
+ hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1]).
-export_type([cipher_suite/0,
erl_cipher_suite/0, openssl_cipher_suite/0,
@@ -182,7 +182,8 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
{T, CS0#cipher_state{iv=NextIV}}.
%%--------------------------------------------------------------------
--spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(), ssl_record:ssl_version()) ->
+-spec decipher(cipher_enum(), integer(), #cipher_state{}, binary(),
+ ssl_record:ssl_version(), boolean()) ->
{binary(), binary(), #cipher_state{}} | #alert{}.
%%
%% Description: Decrypts the data and the MAC using cipher described
@@ -190,9 +191,9 @@ block_cipher(Fun, BlockSz, #cipher_state{key=Key, iv=IV} = CS0,
%% Used for "MAC then Cipher" suites where first the data is decrypted
%% and the an HMAC of the decrypted data is checked
%%-------------------------------------------------------------------
-decipher(?NULL, _HashSz, CipherState, Fragment, _) ->
+decipher(?NULL, _HashSz, CipherState, Fragment, _, _) ->
{Fragment, <<>>, CipherState};
-decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _) ->
+decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _, _) ->
try crypto:stream_decrypt(State0, Fragment) of
{State, Text} ->
GSC = generic_stream_cipher_from_bin(Text, HashSz),
@@ -208,20 +209,20 @@ decipher(?RC4, HashSz, CipherState = #cipher_state{state = State0}, Fragment, _)
?ALERT_REC(?FATAL, ?BAD_RECORD_MAC)
end;
-decipher(?DES, HashSz, CipherState, Fragment, Version) ->
+decipher(?DES, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) ->
crypto:block_decrypt(des_cbc, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?'3DES', HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?'3DES', HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(<<K1:8/binary, K2:8/binary, K3:8/binary>>, IV, T) ->
crypto:block_decrypt(des3_cbc, [K1, K2, K3], IV, T)
- end, CipherState, HashSz, Fragment, Version);
-decipher(?AES_CBC, HashSz, CipherState, Fragment, Version) ->
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck);
+decipher(?AES_CBC, HashSz, CipherState, Fragment, Version, PaddingCheck) ->
block_decipher(fun(Key, IV, T) when byte_size(Key) =:= 16 ->
crypto:block_decrypt(aes_cbc128, Key, IV, T);
(Key, IV, T) when byte_size(Key) =:= 32 ->
crypto:block_decrypt(aes_cbc256, Key, IV, T)
- end, CipherState, HashSz, Fragment, Version).
+ end, CipherState, HashSz, Fragment, Version, PaddingCheck).
%%--------------------------------------------------------------------
-spec decipher_aead(cipher_enum(), #cipher_state{}, integer(), binary(), binary(), ssl_record:ssl_version()) ->
@@ -237,7 +238,7 @@ decipher_aead(?CHACHA20_POLY1305, CipherState, SeqNo, AAD, Fragment, Version) ->
aead_decipher(chacha20_poly1305, CipherState, SeqNo, AAD, Fragment, Version).
block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
- HashSz, Fragment, Version) ->
+ HashSz, Fragment, Version, PaddingCheck) ->
try
Text = Fun(Key, IV, Fragment),
NextIV = next_iv(Fragment, IV),
@@ -245,7 +246,7 @@ block_decipher(Fun, #cipher_state{key=Key, iv=IV} = CipherState0,
Content = GBC#generic_block_cipher.content,
Mac = GBC#generic_block_cipher.mac,
CipherState1 = CipherState0#cipher_state{iv=GBC#generic_block_cipher.next_iv},
- case is_correct_padding(GBC, Version) of
+ case is_correct_padding(GBC, Version, PaddingCheck) of
true ->
{Content, Mac, CipherState1};
false ->
@@ -306,9 +307,10 @@ suites({3, N}) ->
all_suites(Version) ->
suites(Version)
- ++ ssl_cipher:anonymous_suites(Version)
- ++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites().
+ ++ anonymous_suites(Version)
+ ++ psk_suites(Version)
+ ++ srp_suites()
+ ++ rc4_suites(Version).
%%--------------------------------------------------------------------
-spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()].
%%
@@ -394,6 +396,24 @@ srp_suites() ->
?TLS_SRP_SHA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA,
?TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA].
+%%--------------------------------------------------------------------
+-spec rc4_suites(Version::ssl_record:ssl_version()) -> [cipher_suite()].
+%%
+%% Description: Returns a list of the RSA|(ECDH/RSA)| (ECDH/ECDSA)
+%% with RC4 cipher suites, only supported if explicitly set by user.
+%% Are not considered secure any more. Other RC4 suites already
+%% belonged to the user configured only category.
+%%--------------------------------------------------------------------
+rc4_suites({3, 0}) ->
+ [?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5];
+rc4_suites({3, N}) when N =< 3 ->
+ [?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_SHA,
+ ?TLS_RSA_WITH_RC4_128_MD5,
+ ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
+ ?TLS_ECDH_RSA_WITH_RC4_128_SHA].
%%--------------------------------------------------------------------
-spec suite_definition(cipher_suite()) -> int_cipher_suite().
@@ -1422,6 +1442,9 @@ is_acceptable_prf(default_prf, _) ->
is_acceptable_prf(Prf, Algos) ->
proplists:get_bool(Prf, Algos).
+is_fallback(CipherSuites)->
+ lists:member(?TLS_FALLBACK_SCSV, CipherSuites).
+
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
@@ -1632,16 +1655,18 @@ generic_stream_cipher_from_bin(T, HashSz) ->
#generic_stream_cipher{content=Content,
mac=Mac}.
-%% For interoperability reasons we do not check the padding content in
-%% SSL 3.0 and TLS 1.0 as it is not strictly required and breaks
-%% interopability with for instance Google.
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, {3, N})
- when N == 0; N == 1 ->
- Len == byte_size(Padding);
-%% Padding must be check in TLS 1.1 and after
+ padding = Padding}, {3, 0}, _) ->
+ Len == byte_size(Padding); %% Only length check is done in SSL 3.0 spec
+%% For interoperability reasons it is possible to disable
+%% the padding check when using TLS 1.0, as it is not strictly required
+%% in the spec (only recommended), howerver this makes TLS 1.0 vunrable to the Poodle attack
+%% so by default this clause will not match
+is_correct_padding(GenBlockCipher, {3, 1}, false) ->
+ is_correct_padding(GenBlockCipher, {3, 0}, false);
+%% Padding must be checked in TLS 1.1 and after
is_correct_padding(#generic_block_cipher{padding_length = Len,
- padding = Padding}, _) ->
+ padding = Padding}, _, _) ->
Len == byte_size(Padding) andalso
list_to_binary(lists:duplicate(Len, Len)) == Padding.
diff --git a/lib/ssl/src/ssl_cipher.hrl b/lib/ssl/src/ssl_cipher.hrl
index 448c2405aa..8689a3c68b 100644
--- a/lib/ssl/src/ssl_cipher.hrl
+++ b/lib/ssl/src/ssl_cipher.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -356,6 +356,10 @@
%% hello extension data as they should.
-define(TLS_EMPTY_RENEGOTIATION_INFO_SCSV, <<?BYTE(16#00), ?BYTE(16#FF)>>).
+%% TLS Fallback Signaling Cipher Suite Value (SCSV) for Preventing Protocol
+%% Downgrade Attacks
+-define(TLS_FALLBACK_SCSV, <<?BYTE(16#56), ?BYTE(16#00)>>).
+
%%% PSK Cipher Suites RFC 4279
%% TLS_PSK_WITH_RC4_128_SHA = { 0x00, 0x8A };
diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl
index 545b8aa0f6..fc8b214a29 100644
--- a/lib/ssl/src/ssl_config.erl
+++ b/lib/ssl/src/ssl_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,13 +31,13 @@ init(SslOpts, Role) ->
init_manager_name(SslOpts#ssl_options.erl_dist),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert}
= init_certificates(SslOpts, Role),
PrivateKey =
init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
SslOpts#ssl_options.password, Role),
DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, PrivateKey, DHParams}.
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}.
init_manager_name(false) ->
put(ssl_manager, ssl_manager:manager_name(normal));
@@ -46,9 +46,11 @@ init_manager_name(true) ->
init_certificates(#ssl_options{cacerts = CaCerts,
cacertfile = CACertFile,
- certfile = CertFile,
- cert = Cert}, Role) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle} =
+ certfile = CertFile,
+ cert = Cert,
+ crl_cache = CRLCache
+ }, Role) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} =
try
Certs = case CaCerts of
undefined ->
@@ -56,39 +58,40 @@ init_certificates(#ssl_options{cacerts = CaCerts,
_ ->
{der, CaCerts}
end,
- {ok, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role)
+ {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache)
catch
_:Reason ->
file_error(CACertFile, {cacertfile, Reason})
end,
init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CertFile, Role).
+ CacheHandle, CRLDbInfo, CertFile, Role).
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, <<>>, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined};
+init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle,
+ CRLDbInfo, <<>>, _) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined};
init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CertFile, client) ->
+ CacheHandle, CRLDbInfo, CertFile, client) ->
try
%% Ignoring potential proxy-certificates see:
%% http://dev.globus.org/wiki/Security/ProxyFileFormat
[OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert}
catch _Error:_Reason ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, undefined}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}
end;
init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle,
- PemCacheHandle, CacheRef, CertFile, server) ->
+ PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) ->
try
[OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, OwnCert}
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert}
catch
_:Reason ->
file_error(CertFile, {certfile, Reason})
end;
-init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, _, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, Cert}.
+init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) ->
+ {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}.
init_private_key(_, undefined, <<>>, _Password, _Client) ->
undefined;
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index b6059eac58..64fa7bab0d 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -41,11 +41,12 @@
%% User Events
-export([send/2, recv/3, close/1, shutdown/2,
- new_user/2, get_opts/2, set_opts/2, info/1, session_info/1,
- peer_certificate/1, renegotiation/1, negotiated_next_protocol/1, prf/5
+ new_user/2, get_opts/2, set_opts/2, session_info/1,
+ peer_certificate/1, renegotiation/1, negotiated_protocol/1, prf/5,
+ connection_information/1
]).
--export([handle_session/6]).
+-export([handle_session/7]).
%% SSL FSM state functions
-export([hello/3, abbreviated/3, certify/3, cipher/3, connection/3]).
@@ -161,6 +162,14 @@ recv(Pid, Length, Timeout) ->
sync_send_all_state_event(Pid, {recv, Length, Timeout}).
%%--------------------------------------------------------------------
+-spec connection_information(pid()) -> {ok, list()} | {error, reason()}.
+%%
+%% Description: Get the SNI hostname
+%%--------------------------------------------------------------------
+connection_information(Pid) when is_pid(Pid) ->
+ sync_send_all_state_event(Pid, connection_information).
+
+%%--------------------------------------------------------------------
-spec close(pid()) -> ok | {error, reason()}.
%%
%% Description: Close an ssl connection
@@ -191,12 +200,12 @@ new_user(ConnectionPid, User) ->
sync_send_all_state_event(ConnectionPid, {new_user, User}).
%%--------------------------------------------------------------------
--spec negotiated_next_protocol(pid()) -> {ok, binary()} | {error, reason()}.
+-spec negotiated_protocol(pid()) -> {ok, binary()} | {error, reason()}.
%%
%% Description: Returns the negotiated protocol
%%--------------------------------------------------------------------
-negotiated_next_protocol(ConnectionPid) ->
- sync_send_all_state_event(ConnectionPid, negotiated_next_protocol).
+negotiated_protocol(ConnectionPid) ->
+ sync_send_all_state_event(ConnectionPid, negotiated_protocol).
%%--------------------------------------------------------------------
-spec get_opts(pid(), list()) -> {ok, list()} | {error, reason()}.
@@ -214,14 +223,6 @@ set_opts(ConnectionPid, Options) ->
sync_send_all_state_event(ConnectionPid, {set_opts, Options}).
%%--------------------------------------------------------------------
--spec info(pid()) -> {ok, {atom(), tuple()}} | {error, reason()}.
-%%
-%% Description: Returns ssl protocol and cipher used for the connection
-%%--------------------------------------------------------------------
-info(ConnectionPid) ->
- sync_send_all_state_event(ConnectionPid, info).
-
-%%--------------------------------------------------------------------
-spec session_info(pid()) -> {ok, list()} | {error, reason()}.
%%
%% Description: Returns info about the ssl session
@@ -258,27 +259,26 @@ prf(ConnectionPid, Secret, Label, Seed, WantedLength) ->
handle_session(#server_hello{cipher_suite = CipherSuite,
compression_method = Compression},
- Version, NewId, ConnectionStates, NextProtocol,
+ Version, NewId, ConnectionStates, ProtoExt, Protocol0,
#state{session = #session{session_id = OldId},
- negotiated_version = ReqVersion} = State0) ->
+ negotiated_version = ReqVersion,
+ negotiated_protocol = CurrentProtocol} = State0) ->
{KeyAlgorithm, _, _, _} =
ssl_cipher:suite_definition(CipherSuite),
PremasterSecret = make_premaster_secret(ReqVersion, KeyAlgorithm),
-
- NewNextProtocol = case NextProtocol of
- undefined ->
- State0#state.next_protocol;
- _ ->
- NextProtocol
- end,
-
+
+ {ExpectNPN, Protocol} = case Protocol0 of
+ undefined -> {false, CurrentProtocol};
+ _ -> {ProtoExt =:= npn, Protocol0}
+ end,
+
State = State0#state{key_algorithm = KeyAlgorithm,
negotiated_version = Version,
connection_states = ConnectionStates,
premaster_secret = PremasterSecret,
- expecting_next_protocol_negotiation = NextProtocol =/= undefined,
- next_protocol = NewNextProtocol},
+ expecting_next_protocol_negotiation = ExpectNPN,
+ negotiated_protocol = Protocol},
case ssl_session:is_new(OldId, NewId) of
true ->
@@ -371,7 +371,7 @@ abbreviated(#finished{verify_data = Data} = Finished,
abbreviated(#next_protocol{selected_protocol = SelectedProtocol},
#state{role = server, expecting_next_protocol_negotiation = true} = State0,
Connection) ->
- {Record, State} = Connection:next_record(State0#state{next_protocol = SelectedProtocol}),
+ {Record, State} = Connection:next_record(State0#state{negotiated_protocol = SelectedProtocol}),
Connection:next_state(abbreviated, abbreviated, Record, State#state{expecting_next_protocol_negotiation = false});
abbreviated(timeout, State, _) ->
@@ -411,11 +411,15 @@ certify(#certificate{} = Cert,
role = Role,
cert_db = CertDbHandle,
cert_db_ref = CertDbRef,
+ crl_db = CRLDbInfo,
ssl_options = Opts} = State, Connection) ->
- case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef, Opts#ssl_options.depth,
+ case ssl_handshake:certify(Cert, CertDbHandle, CertDbRef,
+ Opts#ssl_options.depth,
Opts#ssl_options.verify,
Opts#ssl_options.verify_fun,
Opts#ssl_options.partial_chain,
+ Opts#ssl_options.crl_check,
+ CRLDbInfo,
Role) of
{PeerCert, PublicKeyInfo} ->
handle_peer_cert(Role, PeerCert, PublicKeyInfo,
@@ -589,7 +593,7 @@ cipher(#certificate_verify{signature = Signature, hashsign_algorithm = CertHashS
%% client must send a next protocol message if we are expecting it
cipher(#finished{}, #state{role = server, expecting_next_protocol_negotiation = true,
- next_protocol = undefined, negotiated_version = Version} = State0,
+ negotiated_protocol = undefined, negotiated_version = Version} = State0,
Connection) ->
Connection:handle_own_alert(?ALERT_REC(?FATAL,?UNEXPECTED_MESSAGE), Version, cipher, State0);
@@ -619,7 +623,7 @@ cipher(#finished{verify_data = Data} = Finished,
cipher(#next_protocol{selected_protocol = SelectedProtocol},
#state{role = server, expecting_next_protocol_negotiation = true,
expecting_finished = true} = State0, Connection) ->
- {Record, State} = Connection:next_record(State0#state{next_protocol = SelectedProtocol}),
+ {Record, State} = Connection:next_record(State0#state{negotiated_protocol = SelectedProtocol}),
Connection:next_state(cipher, cipher, Record, State#state{expecting_next_protocol_negotiation = false});
cipher(timeout, State, _) ->
@@ -755,10 +759,10 @@ handle_sync_event({get_opts, OptTags}, _From, StateName,
socket_options = SockOpts} = State) ->
OptsReply = get_socket_opts(Transport, Socket, OptTags, SockOpts, []),
{reply, OptsReply, StateName, State, get_timeout(State)};
-handle_sync_event(negotiated_next_protocol, _From, StateName, #state{next_protocol = undefined} = State) ->
- {reply, {error, next_protocol_not_negotiated}, StateName, State, get_timeout(State)};
-handle_sync_event(negotiated_next_protocol, _From, StateName, #state{next_protocol = NextProtocol} = State) ->
- {reply, {ok, NextProtocol}, StateName, State, get_timeout(State)};
+handle_sync_event(negotiated_protocol, _From, StateName, #state{negotiated_protocol = undefined} = State) ->
+ {reply, {error, protocol_not_negotiated}, StateName, State, get_timeout(State)};
+handle_sync_event(negotiated_protocol, _From, StateName, #state{negotiated_protocol = SelectedProtocol} = State) ->
+ {reply, {ok, SelectedProtocol}, StateName, State, get_timeout(State)};
handle_sync_event({set_opts, Opts0}, _From, StateName0,
#state{socket_options = Opts1,
protocol_cb = Connection,
@@ -826,13 +830,6 @@ handle_sync_event({prf, Secret, Label, Seed, WantedLength}, _, StateName,
error:Reason -> {error, Reason}
end,
{reply, Reply, StateName, State, get_timeout(State)};
-handle_sync_event(info, _, StateName,
- #state{negotiated_version = Version,
- session = #session{cipher_suite = Suite}} = State) ->
-
- AtomVersion = tls_record:protocol_version(Version),
- {reply, {ok, {AtomVersion, ssl:suite_definition(Suite)}},
- StateName, State, get_timeout(State)};
handle_sync_event(session_info, _, StateName,
#state{session = #session{session_id = Id,
cipher_suite = Suite}} = State) ->
@@ -842,7 +839,10 @@ handle_sync_event(session_info, _, StateName,
handle_sync_event(peer_certificate, _, StateName,
#state{session = #session{peer_certificate = Cert}}
= State) ->
- {reply, {ok, Cert}, StateName, State, get_timeout(State)}.
+ {reply, {ok, Cert}, StateName, State, get_timeout(State)};
+handle_sync_event(connection_information, _, StateName, #state{sni_hostname = SNIHostname, session = #session{cipher_suite = CipherSuite}, negotiated_version = Version} = State) ->
+ {reply, {ok, [{protocol, tls_record:protocol_version(Version)}, {cipher_suite, ssl:suite_definition(CipherSuite)}, {sni_hostname, SNIHostname}]}, StateName, State, get_timeout(State)}.
+
handle_info({ErrorTag, Socket, econnaborted}, StateName,
#state{socket = Socket, transport_cb = Transport,
@@ -964,7 +964,7 @@ format_status(terminate, [_, State]) ->
%%% Internal functions
%%--------------------------------------------------------------------
ssl_config(Opts, Role, State) ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, OwnCert, Key, DHParams} =
+ {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo, OwnCert, Key, DHParams} =
ssl_config:init(Opts, Role),
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = calendar:datetime_to_gregorian_seconds({date(), time()}),
@@ -975,6 +975,7 @@ ssl_config(Opts, Role, State) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
+ crl_db = CRLDbInfo,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams,
@@ -1479,11 +1480,11 @@ finalize_handshake(State0, StateName, Connection) ->
next_protocol(#state{role = server} = State, _) ->
State;
-next_protocol(#state{next_protocol = undefined} = State, _) ->
+next_protocol(#state{negotiated_protocol = undefined} = State, _) ->
State;
next_protocol(#state{expecting_next_protocol_negotiation = false} = State, _) ->
State;
-next_protocol(#state{next_protocol = NextProtocol} = State0, Connection) ->
+next_protocol(#state{negotiated_protocol = NextProtocol} = State0, Connection) ->
NextProtocolMessage = ssl_handshake:next_protocol(NextProtocol),
Connection:send_handshake(NextProtocolMessage, State0).
diff --git a/lib/ssl/src/ssl_connection.hrl b/lib/ssl/src/ssl_connection.hrl
index b9a1ef3a84..d95b51132a 100644
--- a/lib/ssl/src/ssl_connection.hrl
+++ b/lib/ssl/src/ssl_connection.hrl
@@ -1,8 +1,7 @@
-
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -53,6 +52,7 @@
session :: #session{} | secret_printout(),
session_cache :: db_handle(),
session_cache_cb :: atom(),
+ crl_db :: term(),
negotiated_version :: ssl_record:ssl_version(),
client_certificate_requested = false :: boolean(),
key_algorithm :: ssl_cipher:key_algo(),
@@ -78,9 +78,10 @@
allow_renegotiate = true ::boolean(),
expecting_next_protocol_negotiation = false ::boolean(),
expecting_finished = false ::boolean(),
- next_protocol = undefined :: undefined | binary(),
+ negotiated_protocol = undefined :: undefined | binary(),
client_ecc, % {Curves, PointFmt}
- tracker :: pid() %% Tracker process for listen socket
+ tracker :: pid(), %% Tracker process for listen socket
+ sni_hostname = undefined
}).
-define(DEFAULT_DIFFIE_HELLMAN_PARAMS,
diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl
new file mode 100644
index 0000000000..1a08d3c80a
--- /dev/null
+++ b/lib/ssl/src/ssl_crl.erl
@@ -0,0 +1,80 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+%----------------------------------------------------------------------
+%% Purpose: CRL handling
+%%----------------------------------------------------------------------
+
+-module(ssl_crl).
+
+-include("ssl_alert.hrl").
+-include("ssl_internal.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+-export([trusted_cert_and_path/3]).
+
+trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
+ case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of
+ undefined ->
+ trusted_cert_and_path(CRL, issuer_not_found, DbHandle);
+ {ok, {_, OtpCert}} ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)}
+ end;
+
+trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) ->
+ try find_issuer(CRL, DbHandle) of
+ OtpCert ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)}
+ catch
+ throw:_ ->
+ {error, issuer_not_found}
+ end.
+
+find_issuer(CRL, {Db,_}) ->
+ Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
+ IsIssuerFun =
+ fun({_Key, {_Der,ErlCertCandidate}}, Acc) ->
+ verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc);
+ (_, Acc) ->
+ Acc
+ end,
+
+ try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, IssuerCert} ->
+ IssuerCert
+ end.
+
+
+verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) ->
+ TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate,
+ case public_key:pkix_normalize_name(TBSCert#'OTPTBSCertificate'.subject) of
+ Issuer ->
+ case public_key:pkix_crl_verify(CRL, ErlCertCandidate) of
+ true ->
+ throw({ok, ErlCertCandidate});
+ false ->
+ NotIssuer
+ end;
+ _ ->
+ NotIssuer
+ end.
diff --git a/lib/ssl/src/ssl_crl_cache.erl b/lib/ssl/src/ssl_crl_cache.erl
new file mode 100644
index 0000000000..b9d6a61c3b
--- /dev/null
+++ b/lib/ssl/src/ssl_crl_cache.erl
@@ -0,0 +1,179 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+%----------------------------------------------------------------------
+%% Purpose: Simple default CRL cache
+%%----------------------------------------------------------------------
+
+-module(ssl_crl_cache).
+
+-include("ssl_internal.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+-behaviour(ssl_crl_cache_api).
+
+-export([lookup/2, select/2, fresh_crl/2]).
+-export([insert/1, insert/2, delete/1]).
+
+%%====================================================================
+%% Cache callback API
+%%====================================================================
+
+lookup(#'DistributionPoint'{distributionPoint = {fullName, Names}},
+ CRLDbInfo) ->
+ get_crls(Names, CRLDbInfo);
+lookup(_,_) ->
+ not_available.
+
+select(Issuer, {{_Cache, Mapping},_}) ->
+ case ssl_pkix_db:lookup(Issuer, Mapping) of
+ undefined ->
+ [];
+ CRLs ->
+ CRLs
+ end.
+
+fresh_crl(#'DistributionPoint'{distributionPoint = {fullName, Names}}, CRL) ->
+ case get_crls(Names, undefined) of
+ not_available ->
+ CRL;
+ [NewCRL] ->
+ NewCRL
+ end.
+
+%%====================================================================
+%% API
+%%====================================================================
+
+insert(CRLs) ->
+ insert(?NO_DIST_POINT, CRLs).
+
+insert(URI, {file, File}) when is_list(URI) ->
+ case file:read_file(File) of
+ {ok, PemBin} ->
+ PemEntries = public_key:pem_decode(PemBin),
+ CRLs = [ CRL || {'CertificateList', CRL, not_encrypted}
+ <- PemEntries],
+ do_insert(URI, CRLs);
+ Error ->
+ Error
+ end;
+insert(URI, {der, CRLs}) ->
+ do_insert(URI, CRLs).
+
+delete({file, File}) ->
+ case file:read_file(File) of
+ {ok, PemBin} ->
+ PemEntries = public_key:pem_decode(PemBin),
+ CRLs = [ CRL || {'CertificateList', CRL, not_encrypted}
+ <- PemEntries],
+ ssl_manager:delete_crls({?NO_DIST_POINT, CRLs});
+ Error ->
+ Error
+ end;
+delete({der, CRLs}) ->
+ ssl_manager:delete_crls({?NO_DIST_POINT, CRLs});
+
+delete(URI) ->
+ case http_uri:parse(URI) of
+ {ok, {http, _, _ , _, Path,_}} ->
+ ssl_manager:delete_crls(string:strip(Path, left, $/));
+ _ ->
+ {error, {only_http_distribution_points_supported, URI}}
+ end.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+do_insert(URI, CRLs) ->
+ case http_uri:parse(URI) of
+ {ok, {http, _, _ , _, Path,_}} ->
+ ssl_manager:insert_crls(string:strip(Path, left, $/), CRLs);
+ _ ->
+ {error, {only_http_distribution_points_supported, URI}}
+ end.
+
+get_crls([], _) ->
+ not_available;
+get_crls([{uniformResourceIdentifier, "http"++_ = URL} | Rest],
+ CRLDbInfo) ->
+ case cache_lookup(URL, CRLDbInfo) of
+ [] ->
+ handle_http(URL, Rest, CRLDbInfo);
+ CRLs ->
+ CRLs
+ end;
+get_crls([ _| Rest], CRLDbInfo) ->
+ %% unsupported CRL location
+ get_crls(Rest, CRLDbInfo).
+
+http_lookup(URL, Rest, CRLDbInfo, Timeout) ->
+ case application:ensure_started(inets) of
+ ok ->
+ http_get(URL, Rest, CRLDbInfo, Timeout);
+ _ ->
+ get_crls(Rest, CRLDbInfo)
+ end.
+
+http_get(URL, Rest, CRLDbInfo, Timeout) ->
+ case httpc:request(get, {URL, [{"connection", "close"}]},
+ [{timeout, Timeout}], [{body_format, binary}]) of
+ {ok, {_Status, _Headers, Body}} ->
+ case Body of
+ <<"-----BEGIN", _/binary>> ->
+ Pem = public_key:pem_decode(Body),
+ lists:filtermap(fun({'CertificateList',
+ CRL, not_encrypted}) ->
+ {true, CRL};
+ (_) ->
+ false
+ end, Pem);
+ _ ->
+ try public_key:der_decode('CertificateList', Body) of
+ _ ->
+ [Body]
+ catch
+ _:_ ->
+ get_crls(Rest, CRLDbInfo)
+ end
+ end;
+ {error, _Reason} ->
+ get_crls(Rest, CRLDbInfo)
+ end.
+
+cache_lookup(_, undefined) ->
+ [];
+cache_lookup(URL, {{Cache, _}, _}) ->
+ {ok, {_, _, _ , _, Path,_}} = http_uri:parse(URL),
+ case ssl_pkix_db:lookup(string:strip(Path, left, $/), Cache) of
+ undefined ->
+ [];
+ CRLs ->
+ CRLs
+ end.
+
+handle_http(URI, Rest, {_, [{http, Timeout}]} = CRLDbInfo) ->
+ CRLs = http_lookup(URI, Rest, CRLDbInfo, Timeout),
+ %% Uncomment to improve performance, but need to
+ %% implement cache limit and or cleaning to prevent
+ %% DoS attack possibilities
+ %%insert(URI, {der, CRLs}),
+ CRLs;
+handle_http(_, Rest, CRLDbInfo) ->
+ get_crls(Rest, CRLDbInfo).
+
diff --git a/lib/ssl/src/ssl_crl_cache_api.erl b/lib/ssl/src/ssl_crl_cache_api.erl
new file mode 100644
index 0000000000..79db65104b
--- /dev/null
+++ b/lib/ssl/src/ssl_crl_cache_api.erl
@@ -0,0 +1,30 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_crl_cache_api).
+
+-include_lib("public_key/include/public_key.hrl").
+
+-type db_handle() :: term().
+
+-callback lookup(#'DistributionPoint'{}, db_handle()) -> not_available | [public_key:der_encoded()].
+-callback select(term(), db_handle()) -> [public_key:der_encoded()].
+-callback fresh_crl(#'DistributionPoint'{}, public_key:der_encoded()) -> public_key:der_encoded().
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 07535e79b4..12a17cb6ac 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -49,7 +49,7 @@
finished/5, next_protocol/1]).
%% Handle handshake messages
--export([certify/8, client_certificate_verify/6, certificate_verify/6, verify_signature/5,
+-export([certify/10, client_certificate_verify/6, certificate_verify/6, verify_signature/5,
master_secret/5, server_key_exchange_hash/2, verify_connection/6,
init_handshake_history/0, update_handshake_history/2, verify_server_key/5
]).
@@ -136,6 +136,7 @@ client_hello_extensions(Host, Version, CipherSuites, SslOpts, ConnectionStates,
hash_signs = advertised_hash_signs(Version),
ec_point_formats = EcPointFormats,
elliptic_curves = EllipticCurves,
+ alpn = encode_alpn(SslOpts#ssl_options.alpn_advertised_protocols, Renegotiation),
next_protocol_negotiation =
encode_client_protocol_negotiation(SslOpts#ssl_options.next_protocol_selector,
Renegotiation),
@@ -149,7 +150,7 @@ client_hello_extensions(Host, Version, CipherSuites, SslOpts, ConnectionStates,
certificate(OwnCert, CertDbHandle, CertDbRef, client) ->
Chain =
case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of
- {ok, CertChain} ->
+ {ok, _, CertChain} ->
CertChain;
{error, _} ->
%% If no suitable certificate is available, the client
@@ -161,7 +162,7 @@ certificate(OwnCert, CertDbHandle, CertDbRef, client) ->
certificate(OwnCert, CertDbHandle, CertDbRef, server) ->
case ssl_certificate:certificate_chain(OwnCert, CertDbHandle, CertDbRef) of
- {ok, Chain} ->
+ {ok, _, Chain} ->
#certificate{asn1_certificates = Chain};
{error, _} ->
?ALERT_REC(?FATAL, ?INTERNAL_ERROR)
@@ -242,7 +243,7 @@ key_exchange(client, _Version, {dh, PublicKey}) ->
dh_public = PublicKey}
};
-key_exchange(client, _Version, {ecdh, #'ECPrivateKey'{publicKey = {0, ECPublicKey}}}) ->
+key_exchange(client, _Version, {ecdh, #'ECPrivateKey'{publicKey = ECPublicKey}}) ->
#client_key_exchange{
exchange_keys = #client_ec_diffie_hellman_public{
dh_public = ECPublicKey}
@@ -283,7 +284,7 @@ key_exchange(server, Version, {dh, {PublicKey, _},
enc_server_key_exchange(Version, ServerDHParams, HashSign,
ClientRandom, ServerRandom, PrivateKey);
-key_exchange(server, Version, {ecdh, #'ECPrivateKey'{publicKey = {0, ECPublicKey},
+key_exchange(server, Version, {ecdh, #'ECPrivateKey'{publicKey = ECPublicKey,
parameters = ECCurve}, HashSign,
ClientRandom, ServerRandom, PrivateKey}) ->
ServerECParams = #server_ecdh_params{curve = ECCurve, public = ECPublicKey},
@@ -383,49 +384,24 @@ verify_signature(_Version, Hash, {HashAlgo, ecdsa}, Signature,
%%--------------------------------------------------------------------
-spec certify(#certificate{}, db_handle(), certdb_ref(), integer() | nolimit,
- verify_peer | verify_none, {fun(), term}, fun(),
+ verify_peer | verify_none, {fun(), term}, fun(), term(), term(),
client | server) -> {der_cert(), public_key_info()} | #alert{}.
%%
%% Description: Handles a certificate handshake message
%%--------------------------------------------------------------------
certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef,
- MaxPathLen, _Verify, VerifyFunAndState, PartialChain, Role) ->
+ MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) ->
[PeerCert | _] = ASN1Certs,
-
- ValidationFunAndState =
- case VerifyFunAndState of
- undefined ->
- {fun(OtpCert, ExtensionOrVerifyResult, SslState) ->
- ssl_certificate:validate_extension(OtpCert,
- ExtensionOrVerifyResult, SslState)
- end, Role};
- {Fun, UserState0} ->
- {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
- case ssl_certificate:validate_extension(OtpCert,
- Extension,
- SslState) of
- {valid, NewSslState} ->
- {valid, {NewSslState, UserState}};
- {fail, Reason} ->
- apply_user_fun(Fun, OtpCert, Reason, UserState,
- SslState);
- {unknown, _} ->
- apply_user_fun(Fun, OtpCert,
- Extension, UserState, SslState)
- end;
- (OtpCert, VerifyResult, {SslState, UserState}) ->
- apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
- SslState)
- end, {Role, UserState0}}
- end,
+
+ ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
+ CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle),
try
- {TrustedErlCert, CertPath} =
+ {TrustedCert, CertPath} =
ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain),
- case public_key:pkix_path_validation(TrustedErlCert,
- CertPath,
- [{max_path_length,
- MaxPathLen},
+ case public_key:pkix_path_validation(TrustedCert,
+ CertPath,
+ [{max_path_length, MaxPathLen},
{verify_fun, ValidationFunAndState}]) of
{ok, {PublicKeyInfo,_}} ->
{PeerCert, PublicKeyInfo};
@@ -500,19 +476,27 @@ update_handshake_history({Handshake0, _Prev}, Data) ->
%% end.
premaster_secret(OtherPublicDhKey, MyPrivateKey, #'DHParameter'{} = Params) ->
- public_key:compute_key(OtherPublicDhKey, MyPrivateKey, Params);
-
+ try
+ public_key:compute_key(OtherPublicDhKey, MyPrivateKey, Params)
+ catch
+ error:computation_failed ->
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER))
+ end;
premaster_secret(PublicDhKey, PrivateDhKey, #server_dh_params{dh_p = Prime, dh_g = Base}) ->
- crypto:compute_key(dh, PublicDhKey, PrivateDhKey, [Prime, Base]);
+ try
+ crypto:compute_key(dh, PublicDhKey, PrivateDhKey, [Prime, Base])
+ catch
+ error:computation_failed ->
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER))
+ end;
premaster_secret(#client_srp_public{srp_a = ClientPublicKey}, ServerKey, #srp_user{prime = Prime,
verifier = Verifier}) ->
case crypto:compute_key(srp, ClientPublicKey, ServerKey, {host, [Verifier, Prime, '6a']}) of
error ->
- ?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER);
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER));
PremasterSecret ->
PremasterSecret
end;
-
premaster_secret(#server_srp_params{srp_n = Prime, srp_g = Generator, srp_s = Salt, srp_b = Public},
ClientKeys, {Username, Password}) ->
case ssl_srp_primes:check_srp_params(Generator, Prime) of
@@ -520,21 +504,19 @@ premaster_secret(#server_srp_params{srp_n = Prime, srp_g = Generator, srp_s = Sa
DerivedKey = crypto:hash(sha, [Salt, crypto:hash(sha, [Username, <<$:>>, Password])]),
case crypto:compute_key(srp, Public, ClientKeys, {user, [DerivedKey, Prime, Generator, '6a']}) of
error ->
- ?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER);
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER));
PremasterSecret ->
PremasterSecret
end;
_ ->
- ?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER)
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER))
end;
-
premaster_secret(#client_rsa_psk_identity{
identity = PSKIdentity,
exchange_keys = #encrypted_premaster_secret{premaster_secret = EncPMS}
}, #'RSAPrivateKey'{} = Key, PSKLookup) ->
PremasterSecret = premaster_secret(EncPMS, Key),
psk_secret(PSKIdentity, PSKLookup, PremasterSecret);
-
premaster_secret(#server_dhe_psk_params{
hint = IdentityHint,
dh_params = #server_dh_params{dh_y = PublicDhKey} = Params},
@@ -542,7 +524,6 @@ premaster_secret(#server_dhe_psk_params{
LookupFun) ->
PremasterSecret = premaster_secret(PublicDhKey, PrivateDhKey, Params),
psk_secret(IdentityHint, LookupFun, PremasterSecret);
-
premaster_secret({rsa_psk, PSKIdentity}, PSKLookup, RSAPremasterSecret) ->
psk_secret(PSKIdentity, PSKLookup, RSAPremasterSecret).
@@ -551,13 +532,10 @@ premaster_secret(#client_dhe_psk_identity{
dh_public = PublicDhKey}, PrivateKey, #'DHParameter'{} = Params, PSKLookup) ->
PremasterSecret = premaster_secret(PublicDhKey, PrivateKey, Params),
psk_secret(PSKIdentity, PSKLookup, PremasterSecret).
-
premaster_secret(#client_psk_identity{identity = PSKIdentity}, PSKLookup) ->
psk_secret(PSKIdentity, PSKLookup);
-
premaster_secret({psk, PSKIdentity}, PSKLookup) ->
psk_secret(PSKIdentity, PSKLookup);
-
premaster_secret(#'ECPoint'{} = ECPoint, #'ECPrivateKey'{} = ECDHKeys) ->
public_key:compute_key(ECPoint, ECDHKeys);
premaster_secret(EncSecret, #'RSAPrivateKey'{} = RSAPrivateKey) ->
@@ -602,11 +580,10 @@ prf({3,_N}, Secret, Label, Seed, WantedLength) ->
%%--------------------------------------------------------------------
select_hashsign(_, undefined, _Version) ->
{null, anon};
-select_hashsign(undefined, Cert, Version) ->
- #'OTPCertificate'{tbsCertificate = TBSCert} = public_key:pkix_decode_cert(Cert, otp),
- #'OTPSubjectPublicKeyInfo'{algorithm = {_,Algo, _}} = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo,
- select_hashsign_algs(undefined, Algo, Version);
-select_hashsign(#hash_sign_algos{hash_sign_algos = HashSigns}, Cert, Version) ->
+%% The signature_algorithms extension was introduced with TLS 1.2. Ignore it if we have
+%% negotiated a lower version.
+select_hashsign(#hash_sign_algos{hash_sign_algos = HashSigns}, Cert, {Major, Minor} = Version)
+ when Major >= 3 andalso Minor >= 3 ->
#'OTPCertificate'{tbsCertificate = TBSCert} =public_key:pkix_decode_cert(Cert, otp),
#'OTPSubjectPublicKeyInfo'{algorithm = {_,Algo, _}} = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo,
DefaultHashSign = {_, Sign} = select_hashsign_algs(undefined, Algo, Version),
@@ -624,7 +601,11 @@ select_hashsign(#hash_sign_algos{hash_sign_algos = HashSigns}, Cert, Version) ->
DefaultHashSign;
[HashSign| _] ->
HashSign
- end.
+ end;
+select_hashsign(_, Cert, Version) ->
+ #'OTPCertificate'{tbsCertificate = TBSCert} = public_key:pkix_decode_cert(Cert, otp),
+ #'OTPSubjectPublicKeyInfo'{algorithm = {_,Algo, _}} = TBSCert#'OTPTBSCertificate'.subjectPublicKeyInfo,
+ select_hashsign_algs(undefined, Algo, Version).
%%--------------------------------------------------------------------
-spec select_hashsign_algs(#hash_sign_algos{}| undefined, oid(), ssl_record:ssl_version()) ->
@@ -789,6 +770,11 @@ encode_hello_extensions([], Acc) ->
Size = byte_size(Acc),
<<?UINT16(Size), Acc/binary>>;
+encode_hello_extensions([#alpn{extension_data = ExtensionData} | Rest], Acc) ->
+ Len = byte_size(ExtensionData),
+ ExtLen = Len + 2,
+ encode_hello_extensions(Rest, <<?UINT16(?ALPN_EXT), ?UINT16(ExtLen), ?UINT16(Len),
+ ExtensionData/binary, Acc/binary>>);
encode_hello_extensions([#next_protocol_negotiation{extension_data = ExtensionData} | Rest], Acc) ->
Len = byte_size(ExtensionData),
encode_hello_extensions(Rest, <<?UINT16(?NEXTPROTONEG_EXT), ?UINT16(Len),
@@ -887,6 +873,25 @@ decode_client_key(ClientKey, Type, Version) ->
decode_server_key(ServerKey, Type, Version) ->
dec_server_key(ServerKey, key_exchange_alg(Type), Version).
+%%
+%% Description: Encode and decode functions for ALPN extension data.
+%%--------------------------------------------------------------------
+
+%% While the RFC opens the door to allow ALPN during renegotiation, in practice
+%% this does not work and it is recommended to ignore any ALPN extension during
+%% renegotiation, as done here.
+encode_alpn(_, true) ->
+ undefined;
+encode_alpn(undefined, _) ->
+ undefined;
+encode_alpn(Protocols, _) ->
+ #alpn{extension_data = lists:foldl(fun encode_protocol/2, <<>>, Protocols)}.
+
+decode_alpn(undefined) ->
+ undefined;
+decode_alpn(#alpn{extension_data=Data}) ->
+ decode_protocols(Data, []).
+
encode_client_protocol_negotiation(undefined, _) ->
undefined;
encode_client_protocol_negotiation(_, false) ->
@@ -1149,8 +1154,10 @@ handle_client_hello_extensions(RecordCB, Random, ClientCipherSuites,
#hello_extensions{renegotiation_info = Info,
srp = SRP,
ec_point_formats = ECCFormat,
+ alpn = ALPN,
next_protocol_negotiation = NextProtocolNegotiation}, Version,
- #ssl_options{secure_renegotiate = SecureRenegotation} = Opts,
+ #ssl_options{secure_renegotiate = SecureRenegotation,
+ alpn_preferred_protocols = ALPNPreferredProtocols} = Opts,
#session{cipher_suite = NegotiatedCipherSuite,
compression_method = Compression} = Session0,
ConnectionStates0, Renegotiation) ->
@@ -1159,19 +1166,34 @@ handle_client_hello_extensions(RecordCB, Random, ClientCipherSuites,
Random, NegotiatedCipherSuite,
ClientCipherSuites, Compression,
ConnectionStates0, Renegotiation, SecureRenegotation),
- ProtocolsToAdvertise = handle_next_protocol_extension(NextProtocolNegotiation, Renegotiation, Opts),
-
+
ServerHelloExtensions = #hello_extensions{
renegotiation_info = renegotiation_info(RecordCB, server,
ConnectionStates, Renegotiation),
- ec_point_formats = server_ecc_extension(Version, ECCFormat),
- next_protocol_negotiation =
- encode_protocols_advertised_on_server(ProtocolsToAdvertise)
+ ec_point_formats = server_ecc_extension(Version, ECCFormat)
},
- {Session, ConnectionStates, ServerHelloExtensions}.
+
+ %% If we receive an ALPN extension and have ALPN configured for this connection,
+ %% we handle it. Otherwise we check for the NPN extension.
+ if
+ ALPN =/= undefined, ALPNPreferredProtocols =/= undefined ->
+ case handle_alpn_extension(ALPNPreferredProtocols, decode_alpn(ALPN)) of
+ #alert{} = Alert ->
+ Alert;
+ Protocol ->
+ {Session, ConnectionStates, Protocol,
+ ServerHelloExtensions#hello_extensions{alpn=encode_alpn([Protocol], Renegotiation)}}
+ end;
+ true ->
+ ProtocolsToAdvertise = handle_next_protocol_extension(NextProtocolNegotiation, Renegotiation, Opts),
+ {Session, ConnectionStates, undefined,
+ ServerHelloExtensions#hello_extensions{next_protocol_negotiation=
+ encode_protocols_advertised_on_server(ProtocolsToAdvertise)}}
+ end.
handle_server_hello_extensions(RecordCB, Random, CipherSuite, Compression,
#hello_extensions{renegotiation_info = Info,
+ alpn = ALPN,
next_protocol_negotiation = NextProtocolNegotiation}, Version,
#ssl_options{secure_renegotiate = SecureRenegotation,
next_protocol_selector = NextProtoSelector},
@@ -1180,11 +1202,23 @@ handle_server_hello_extensions(RecordCB, Random, CipherSuite, Compression,
CipherSuite, undefined,
Compression, ConnectionStates0,
Renegotiation, SecureRenegotation),
- case handle_next_protocol(NextProtocolNegotiation, NextProtoSelector, Renegotiation) of
- #alert{} = Alert ->
- Alert;
- Protocol ->
- {ConnectionStates, Protocol}
+
+ %% If we receive an ALPN extension then this is the protocol selected,
+ %% otherwise handle the NPN extension.
+ case decode_alpn(ALPN) of
+ %% ServerHello contains exactly one protocol: the one selected.
+ %% We also ignore the ALPN extension during renegotiation (see encode_alpn/2).
+ [Protocol] when not Renegotiation ->
+ {ConnectionStates, alpn, Protocol};
+ undefined ->
+ case handle_next_protocol(NextProtocolNegotiation, NextProtoSelector, Renegotiation) of
+ #alert{} = Alert ->
+ Alert;
+ Protocol ->
+ {ConnectionStates, npn, Protocol}
+ end;
+ _ -> %% {error, _Reason} or a list of 0/2+ protocols.
+ ?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE)
end.
select_version(RecordCB, ClientVersion, Versions) ->
@@ -1292,10 +1326,11 @@ hello_extensions_list(#hello_extensions{renegotiation_info = RenegotiationInfo,
hash_signs = HashSigns,
ec_point_formats = EcPointFormats,
elliptic_curves = EllipticCurves,
+ alpn = ALPN,
next_protocol_negotiation = NextProtocolNegotiation,
sni = Sni}) ->
[Ext || Ext <- [RenegotiationInfo, SRP, HashSigns,
- EcPointFormats, EllipticCurves, NextProtocolNegotiation, Sni], Ext =/= undefined].
+ EcPointFormats, EllipticCurves, ALPN, NextProtocolNegotiation, Sni], Ext =/= undefined].
srp_user(#ssl_options{srp_identity = {UserName, _}}) ->
#srp{username = UserName};
@@ -1374,15 +1409,66 @@ sni1(Hostname) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
+validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+ {fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
+ case ssl_certificate:validate(OtpCert,
+ Extension,
+ SslState) of
+ {valid, NewSslState} ->
+ {valid, {NewSslState, UserState}};
+ {fail, Reason} ->
+ apply_user_fun(Fun, OtpCert, Reason, UserState,
+ SslState);
+ {unknown, _} ->
+ apply_user_fun(Fun, OtpCert,
+ Extension, UserState, SslState)
+ end;
+ (OtpCert, VerifyResult, {SslState, UserState}) ->
+ apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
+ SslState)
+ end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}};
+validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+ {fun(OtpCert, {extension, _} = Extension, SslState) ->
+ ssl_certificate:validate(OtpCert,
+ Extension,
+ SslState);
+ (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ valid ->
+ {VerifyResult, SslState};
+ Reason ->
+ {fail, Reason}
+ end;
+ (OtpCert, VerifyResult, SslState) ->
+ ssl_certificate:validate(OtpCert,
+ VerifyResult,
+ SslState)
+ end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}.
+
+apply_user_fun(Fun, OtpCert, VerifyResult, UserState0,
+ {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when
+ (VerifyResult == valid) or (VerifyResult == valid_peer) ->
+ case Fun(OtpCert, VerifyResult, UserState0) of
+ {Valid, UserState} when (Valid == valid) or (Valid == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ valid ->
+ {Valid, {SslState, UserState}};
+ Result ->
+ apply_user_fun(Fun, OtpCert, Result, UserState, SslState)
+ end;
+ {fail, _} = Fail ->
+ Fail
+ end;
apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) ->
case Fun(OtpCert, ExtensionOrError, UserState0) of
- {valid, UserState} ->
- {valid, {SslState, UserState}};
+ {Valid, UserState} when (Valid == valid) or (Valid == valid_peer)->
+ {Valid, {SslState, UserState}};
{fail, _} = Fail ->
Fail;
{unknown, UserState} ->
{unknown, {SslState, UserState}}
end.
+
path_validation_alert({bad_cert, cert_expired}) ->
?ALERT_REC(?FATAL, ?CERTIFICATE_EXPIRED);
path_validation_alert({bad_cert, invalid_issuer}) ->
@@ -1393,8 +1479,10 @@ path_validation_alert({bad_cert, name_not_permitted}) ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, unknown_critical_extension}) ->
?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE);
-path_validation_alert({bad_cert, cert_revoked}) ->
+path_validation_alert({bad_cert, {revoked, _}}) ->
?ALERT_REC(?FATAL, ?CERTIFICATE_REVOKED);
+path_validation_alert({bad_cert, revocation_status_undetermined}) ->
+ ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, selfsigned_peer}) ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, unknown_ca}) ->
@@ -1680,6 +1768,10 @@ dec_server_key_signature(_, _, _) ->
dec_hello_extensions(<<>>, Acc) ->
Acc;
+dec_hello_extensions(<<?UINT16(?ALPN_EXT), ?UINT16(ExtLen), ?UINT16(Len), ExtensionData:Len/binary, Rest/binary>>, Acc)
+ when Len + 2 =:= ExtLen ->
+ ALPN = #alpn{extension_data = ExtensionData},
+ dec_hello_extensions(Rest, Acc#hello_extensions{alpn = ALPN});
dec_hello_extensions(<<?UINT16(?NEXTPROTONEG_EXT), ?UINT16(Len), ExtensionData:Len/binary, Rest/binary>>, Acc) ->
NextP = #next_protocol_negotiation{extension_data = ExtensionData},
dec_hello_extensions(Rest, Acc#hello_extensions{next_protocol_negotiation = NextP});
@@ -1760,18 +1852,19 @@ dec_sni(<<?BYTE(_), ?UINT16(Len), _:Len, Rest/binary>>) -> dec_sni(Rest);
dec_sni(_) -> undefined.
decode_next_protocols({next_protocol_negotiation, Protocols}) ->
- decode_next_protocols(Protocols, []).
-decode_next_protocols(<<>>, Acc) ->
+ decode_protocols(Protocols, []).
+
+decode_protocols(<<>>, Acc) ->
lists:reverse(Acc);
-decode_next_protocols(<<?BYTE(Len), Protocol:Len/binary, Rest/binary>>, Acc) ->
+decode_protocols(<<?BYTE(Len), Protocol:Len/binary, Rest/binary>>, Acc) ->
case Len of
0 ->
- {error, invalid_next_protocols};
+ {error, invalid_protocols};
_ ->
- decode_next_protocols(Rest, [Protocol|Acc])
+ decode_protocols(Rest, [Protocol|Acc])
end;
-decode_next_protocols(_Bytes, _Acc) ->
- {error, invalid_next_protocols}.
+decode_protocols(_Bytes, _Acc) ->
+ {error, invalid_protocols}.
%% encode/decode stream of certificate data to/from list of certificate data
certs_to_list(ASN1Certs) ->
@@ -1825,6 +1918,17 @@ key_exchange_alg(_) ->
%%-------------Extension handling --------------------------------
+%% Receive protocols, choose one from the list, return it.
+handle_alpn_extension(_, {error, _Reason}) ->
+ ?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE);
+handle_alpn_extension([], _) ->
+ ?ALERT_REC(?FATAL, ?NO_APPLICATION_PROTOCOL);
+handle_alpn_extension([ServerProtocol|Tail], ClientProtocols) ->
+ case lists:member(ServerProtocol, ClientProtocols) of
+ true -> ServerProtocol;
+ false -> handle_alpn_extension(Tail, ClientProtocols)
+ end.
+
handle_next_protocol(undefined,
_NextProtocolSelector, _Renegotiating) ->
undefined;
@@ -1934,7 +2038,7 @@ psk_secret(PSKIdentity, PSKLookup) ->
#alert{} = Alert ->
Alert;
_ ->
- ?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER)
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER))
end.
psk_secret(PSKIdentity, PSKLookup, PremasterSecret) ->
@@ -1946,7 +2050,7 @@ psk_secret(PSKIdentity, PSKLookup, PremasterSecret) ->
#alert{} = Alert ->
Alert;
_ ->
- ?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER)
+ throw(?ALERT_REC(?FATAL, ?ILLEGAL_PARAMETER))
end.
handle_psk_identity(_PSKIdentity, LookupFun)
@@ -1954,3 +2058,70 @@ handle_psk_identity(_PSKIdentity, LookupFun)
error;
handle_psk_identity(PSKIdentity, {Fun, UserState}) ->
Fun(psk, PSKIdentity, UserState).
+
+crl_check(_, false, _,_,_, _) ->
+ valid;
+crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option.
+ valid;
+crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) ->
+ Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) ->
+ ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo)
+ end, {CertDbHandle, CertDbRef}}},
+ {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end}
+ ],
+ case dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) of
+ no_dps ->
+ case dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) of
+ [] ->
+ valid; %% No relevant CRL existed
+ DpsAndCRls ->
+ crl_check_same_issuer(OtpCert, Check, DpsAndCRls, Options)
+ end;
+ DpsAndCRLs -> %% This DP list may be empty if relevant CRLs existed
+ %% but could not be retrived, will result in {bad_cert, revocation_status_undetermined}
+ case public_key:pkix_crls_validate(OtpCert, DpsAndCRLs, Options) of
+ {bad_cert, revocation_status_undetermined} ->
+ crl_check_same_issuer(OtpCert, Check, dps_and_crls(OtpCert, Callback,
+ CRLDbHandle, same_issuer), Options);
+ Other ->
+ Other
+ end
+ end.
+
+crl_check_same_issuer(OtpCert, best_effort, Dps, Options) ->
+ case public_key:pkix_crls_validate(OtpCert, Dps, Options) of
+ {bad_cert, revocation_status_undetermined} ->
+ valid;
+ Other ->
+ Other
+ end;
+crl_check_same_issuer(OtpCert, _, Dps, Options) ->
+ public_key:pkix_crls_validate(OtpCert, Dps, Options).
+
+dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) ->
+ case public_key:pkix_dist_points(OtpCert) of
+ [] ->
+ no_dps;
+ DistPoints ->
+ distpoints_lookup(DistPoints, Callback, CRLDbHandle)
+ end;
+
+dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
+ DP = #'DistributionPoint'{distributionPoint = {fullName, GenNames}} =
+ public_key:pkix_dist_point(OtpCert),
+ CRLs = lists:flatmap(fun({directoryName, Issuer}) ->
+ Callback:select(Issuer, CRLDbHandle);
+ (_) ->
+ []
+ end, GenNames),
+ [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs].
+
+distpoints_lookup([], _, _) ->
+ [];
+distpoints_lookup([DistPoint | Rest], Callback, CRLDbHandle) ->
+ case Callback:lookup(DistPoint, CRLDbHandle) of
+ not_available ->
+ distpoints_lookup(Rest, Callback, CRLDbHandle);
+ CRLs ->
+ [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]
+ end.
diff --git a/lib/ssl/src/ssl_handshake.hrl b/lib/ssl/src/ssl_handshake.hrl
index 80284faef0..91f674a6fc 100644
--- a/lib/ssl/src/ssl_handshake.hrl
+++ b/lib/ssl/src/ssl_handshake.hrl
@@ -95,6 +95,7 @@
-record(hello_extensions, {
renegotiation_info,
hash_signs, % supported combinations of hashes/signature algos
+ alpn,
next_protocol_negotiation = undefined, % [binary()]
srp,
ec_point_formats,
@@ -301,6 +302,14 @@
}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Application-Layer Protocol Negotiation RFC 7301
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-define(ALPN_EXT, 16).
+
+-record(alpn, {extension_data}).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Next Protocol Negotiation
%% (http://tools.ietf.org/html/draft-agl-tls-nextprotoneg-02)
%% (http://technotes.googlecode.com/git/nextprotoneg.html)
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 75efb64e3f..baeae68bc4 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -61,14 +61,19 @@
-define(CDR_HDR_SIZE, 12).
-define(DEFAULT_TIMEOUT, 5000).
+-define(NO_DIST_POINT, "http://dummy/no_distribution_point").
+-define(NO_DIST_POINT_PATH, "dummy/no_distribution_point").
%% Common enumerate values in for SSL-protocols
-define(NULL, 0).
-define(TRUE, 0).
-define(FALSE, 1).
--define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
--define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1, sslv3]).
+%% sslv3 is considered insecure due to lack of padding check (Poodle attack)
+%% Keep as interop with legacy software but do not support as default
+-define(ALL_AVAILABLE_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1, sslv3]).
+-define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]).
+-define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]).
-define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
-define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
@@ -111,13 +116,21 @@
hibernate_after :: boolean(),
%% This option should only be set to true by inet_tls_dist
erl_dist = false :: boolean(),
- next_protocols_advertised = undefined, %% [binary()],
+ alpn_advertised_protocols = undefined :: [binary()] | undefined ,
+ alpn_preferred_protocols = undefined :: [binary()] | undefined,
+ next_protocols_advertised = undefined :: [binary()] | undefined,
next_protocol_selector = undefined, %% fun([binary()]) -> binary())
log_alert :: boolean(),
server_name_indication = undefined,
+ sni_hosts :: [{inet:hostname(), [tuple()]}],
+ sni_fun :: function() | undefined,
%% Should the server prefer its own cipher order over the one provided by
%% the client?
- honor_cipher_order = false
+ honor_cipher_order = false :: boolean(),
+ padding_check = true :: boolean(),
+ fallback = false :: boolean(),
+ crl_check :: boolean() | peer | best_effort,
+ crl_cache
}).
-record(socket_options,
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 5553fc9220..396013825e 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,14 +26,15 @@
%% Internal application API
-export([start_link/1, start_link_dist/1,
- connection_init/2, cache_pem_file/2,
+ connection_init/3, cache_pem_file/2,
lookup_trusted_cert/4,
new_session_id/1, clean_cert_db/2,
register_session/2, register_session/3, invalidate_session/2,
- invalidate_session/3, clear_pem_cache/0, manager_name/1]).
+ insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2,
+ invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]).
% Spawn export
--export([init_session_validator/1]).
+-export([init_session_validator/1, init_pem_cache_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -50,7 +51,9 @@
session_lifetime,
certificate_db,
session_validation_timer,
- last_delay_timer = {undefined, undefined}%% Keep for testing purposes
+ last_delay_timer = {undefined, undefined},%% Keep for testing purposes
+ last_pem_check,
+ clear_pem_cache
}).
-define('24H_in_msec', 86400000).
@@ -98,19 +101,21 @@ start_link_dist(Opts) ->
gen_server:start_link({local, DistMangerName}, ?MODULE, [DistMangerName, Opts], []).
%%--------------------------------------------------------------------
--spec connection_init(binary()| {der, list()}, client | server) ->
- {ok, certdb_ref(), db_handle(), db_handle(), db_handle(), db_handle()}.
+-spec connection_init(binary()| {der, list()}, client | server,
+ {Cb :: atom(), Handle:: term()}) ->
+ {ok, certdb_ref(), db_handle(), db_handle(),
+ db_handle(), db_handle(), CRLInfo::term()}.
%%
%% Description: Do necessary initializations for a new connection.
%%--------------------------------------------------------------------
-connection_init({der, _} = Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role});
+connection_init({der, _} = Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache});
-connection_init(<<>> = Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role});
+connection_init(<<>> = Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache});
-connection_init(Trustedcerts, Role) ->
- call({connection_init, Trustedcerts, Role}).
+connection_init(Trustedcerts, Role, CRLCache) ->
+ call({connection_init, Trustedcerts, Role, CRLCache}).
%%--------------------------------------------------------------------
-spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}.
@@ -118,14 +123,13 @@ connection_init(Trustedcerts, Role) ->
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
cache_pem_file(File, DbHandle) ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(DbHandle, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(DbHandle, File) of
[{Content,_}] ->
{ok, Content};
[Content] ->
- {ok, Content};
+ {ok, Content};
undefined ->
- call({cache_pem, {MD5, File}})
+ call({cache_pem, File})
end.
%%--------------------------------------------------------------------
@@ -192,6 +196,28 @@ invalidate_session(Host, Port, Session) ->
invalidate_session(Port, Session) ->
cast({invalidate_session, Port, Session}).
+-spec invalidate_pem(File::binary()) -> ok.
+invalidate_pem(File) ->
+ cast({invalidate_pem, File}).
+
+insert_crls(Path, CRLs)->
+ insert_crls(Path, CRLs, normal).
+insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ cast({insert_crls, Path, CRLs});
+insert_crls(Path, CRLs, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ call({insert_crls, Path, CRLs}).
+
+delete_crls(Path)->
+ delete_crls(Path, normal).
+delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ cast({delete_crls, Path});
+delete_crls(Path, ManagerType)->
+ put(ssl_manager, manager_name(ManagerType)),
+ call({delete_crls, Path}).
+
%%====================================================================
%% gen_server callbacks
%%====================================================================
@@ -216,13 +242,17 @@ init([Name, Opts]) ->
proplists:get_value(session_cb_init_args, Opts, [])]),
Timer = erlang:send_after(SessionLifeTime * 1000 + 5000,
self(), validate_sessions),
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
+ Interval = pem_check_interval(),
+ erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{certificate_db = CertDb,
session_cache_client = ClientSessionCache,
session_cache_server = ServerSessionCache,
session_cache_cb = CacheCb,
session_lifetime = SessionLifeTime,
- session_validation_timer = Timer}}.
+ session_validation_timer = Timer,
+ last_pem_check = os:timestamp(),
+ clear_pem_cache = Interval
+ }}.
%%--------------------------------------------------------------------
-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
@@ -235,51 +265,39 @@ init([Name, Opts]) ->
%%
%% Description: Handling call messages
%%--------------------------------------------------------------------
-handle_call({{connection_init, <<>>, client}, _Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace],
- session_cache_client = Cache} = State) ->
- Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache},
- {reply, Result, State};
-handle_call({{connection_init, <<>>, server}, _Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace],
- session_cache_server = Cache} = State) ->
- Result = {ok, make_ref(),CertDb, FileRefDb, PemChace, Cache},
- {reply, Result, State};
-
-handle_call({{connection_init, Trustedcerts, client}, Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db,
- session_cache_client = Cache} = State) ->
- Result =
- try
- {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db),
- {ok, Ref, CertDb, FileRefDb, PemChace, Cache}
- catch
- _:Reason ->
- {error, Reason}
- end,
- {reply, Result, State};
-handle_call({{connection_init, Trustedcerts, server}, Pid}, _From,
- #state{certificate_db = [CertDb, FileRefDb, PemChace] = Db,
- session_cache_server = Cache} = State) ->
- Result =
- try
- {ok, Ref} = ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db),
- {ok, Ref, CertDb, FileRefDb, PemChace, Cache}
- catch
- _:Reason ->
- {error, Reason}
- end,
- {reply, Result, State};
-
-
-handle_call({{new_session_id,Port}, _},
+handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From,
+ #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
+ Ref = make_ref(),
+ Result = {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}},
+ {reply, Result, State#state{certificate_db = Db}};
+
+handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From,
+ #state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
+ case add_trusted_certs(Pid, Trustedcerts, Db) of
+ {ok, Ref} ->
+ {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State),
+ {CRLCb, crl_db_info(Db, UserCRLDb)}}, State};
+ {error, _} = Error ->
+ {reply, Error, State}
+ end;
+
+handle_call({{insert_crls, Path, CRLs}, _}, _From,
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:add_crls(Db, Path, CRLs),
+ {reply, ok, State};
+
+handle_call({{delete_crls, CRLsOrPath}, _}, _From,
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:remove_crls(Db, CRLsOrPath),
+ {reply, ok, State};
+
+handle_call({{new_session_id, Port}, _},
_, #state{session_cache_cb = CacheCb,
session_cache_server = Cache} = State) ->
Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb),
{reply, Id, State};
-
-handle_call({{cache_pem, File}, _Pid}, _,
+handle_call({{cache_pem,File}, _Pid}, _,
#state{certificate_db = Db} = State) ->
try ssl_pkix_db:cache_pem_file(File, Db) of
Result ->
@@ -288,7 +306,7 @@ handle_call({{cache_pem, File}, _Pid}, _,
_:Reason ->
{reply, {error, Reason}, State}
end;
-handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace]} = State) ->
+handle_call({unconditionally_clear_pem_cache, _},_, #state{certificate_db = [_,_,PemChace | _]} = State) ->
ssl_pkix_db:clear(PemChace),
{reply, ok, State}.
@@ -332,7 +350,23 @@ handle_cast({invalidate_session, Host, Port,
handle_cast({invalidate_session, Port, #session{session_id = ID} = Session},
#state{session_cache_server = Cache,
session_cache_cb = CacheCb} = State) ->
- invalidate_session(Cache, CacheCb, {Port, ID}, Session, State).
+ invalidate_session(Cache, CacheCb, {Port, ID}, Session, State);
+
+
+handle_cast({insert_crls, Path, CRLs},
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:add_crls(Db, Path, CRLs),
+ {noreply, State};
+
+handle_cast({delete_crls, CRLsOrPath},
+ #state{certificate_db = Db} = State) ->
+ ssl_pkix_db:remove_crls(Db, CRLsOrPath),
+ {noreply, State};
+
+handle_cast({invalidate_pem, File},
+ #state{certificate_db = [_, _, PemCache | _]} = State) ->
+ ssl_pkix_db:remove(File, PemCache),
+ {noreply, State}.
%%--------------------------------------------------------------------
-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
@@ -353,24 +387,22 @@ handle_info(validate_sessions, #state{session_cache_cb = CacheCb,
start_session_validator(ServerCache, CacheCb, LifeTime),
{noreply, State#state{session_validation_timer = Timer}};
+
handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = CacheCb
} = State) ->
CacheCb:delete(Cache, Key),
{noreply, State};
-handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace]} = State) ->
- case ssl_pkix_db:db_size(PemChace) of
- N when N < ?NOT_TO_BIG ->
- ok;
- _ ->
- ssl_pkix_db:clear(PemChace)
- end,
- erlang:send_after(?CLEAR_PEM_CACHE, self(), clear_pem_cache),
- {noreply, State};
-
+handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _],
+ clear_pem_cache = Interval,
+ last_pem_check = CheckPoint} = State) ->
+ NewCheckPoint = os:timestamp(),
+ start_pem_cache_validator(PemChace, CheckPoint),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {noreply, State#state{last_pem_check = NewCheckPoint}};
handle_info({clean_cert_db, Ref, File},
- #state{certificate_db = [CertDb,RefDb, PemCache]} = State) ->
+ #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) ->
case ssl_pkix_db:lookup(Ref, RefDb) of
undefined -> %% Alredy cleaned
@@ -514,10 +546,9 @@ new_id(Port, Tries, Cache, CacheCb) ->
clean_cert_db(Ref, CertDb, RefDb, PemCache, File) ->
case ssl_pkix_db:ref_count(Ref, RefDb, 0) of
0 ->
- MD5 = crypto:hash(md5, File),
- case ssl_pkix_db:lookup_cached_pem(PemCache, MD5) of
+ case ssl_pkix_db:lookup_cached_pem(PemCache, File) of
[{Content, Ref}] ->
- ssl_pkix_db:insert(MD5, Content, PemCache);
+ ssl_pkix_db:insert(File, Content, PemCache);
_ ->
ok
end,
@@ -557,3 +588,57 @@ exists_equivalent(#session{
true;
exists_equivalent(Session, [ _ | Rest]) ->
exists_equivalent(Session, Rest).
+
+start_pem_cache_validator(PemCache, CheckPoint) ->
+ spawn_link(?MODULE, init_pem_cache_validator,
+ [[get(ssl_manager), PemCache, CheckPoint]]).
+
+init_pem_cache_validator([SslManagerName, PemCache, CheckPoint]) ->
+ put(ssl_manager, SslManagerName),
+ ssl_pkix_db:foldl(fun pem_cache_validate/2,
+ CheckPoint, PemCache).
+
+pem_cache_validate({File, _}, CheckPoint) ->
+ case file:read_file_info(File, []) of
+ {ok, #file_info{mtime = Time}} ->
+ case is_before_checkpoint(Time, CheckPoint) of
+ true ->
+ ok;
+ false ->
+ invalidate_pem(File)
+ end;
+ _ ->
+ invalidate_pem(File)
+ end,
+ CheckPoint.
+
+pem_check_interval() ->
+ case application:get_env(ssl, ssl_pem_cache_clean) of
+ {ok, Interval} when is_integer(Interval) ->
+ Interval;
+ _ ->
+ ?CLEAR_PEM_CACHE
+ end.
+
+is_before_checkpoint(Time, CheckPoint) ->
+ calendar:datetime_to_gregorian_seconds(calendar:now_to_datetime(CheckPoint)) -
+ calendar:datetime_to_gregorian_seconds(Time) > 0.
+
+add_trusted_certs(Pid, Trustedcerts, Db) ->
+ try
+ ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db)
+ catch
+ _:Reason ->
+ {error, Reason}
+ end.
+
+session_cache(client, #state{session_cache_client = Cache}) ->
+ Cache;
+session_cache(server, #state{session_cache_server = Cache}) ->
+ Cache.
+
+crl_db_info([_,_,_,Local], {internal, Info}) ->
+ {Local, Info};
+crl_db_info(_, UserCRLDb) ->
+ UserCRLDb.
+
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index e59aba0618..d7b7e3eae3 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -27,9 +27,9 @@
-include_lib("public_key/include/public_key.hrl").
-include_lib("kernel/include/file.hrl").
--export([create/0, remove/1, add_trusted_certs/3,
+-export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1,
- ref_count/3, lookup_trusted_cert/4, foldl/3,
+ ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2,
lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3,
lookup/2]).
@@ -51,16 +51,24 @@ create() ->
ets:new(ssl_otp_cacertificate_db, [set, public]),
%% Let connection processes call ref_count/3 directly
ets:new(ssl_otp_ca_file_ref, [set, public]),
- ets:new(ssl_otp_pem_cache, [set, protected])
+ ets:new(ssl_otp_pem_cache, [set, protected]),
+ %% Default cache
+ {ets:new(ssl_otp_crl_cache, [set, protected]),
+ ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])}
].
%%--------------------------------------------------------------------
--spec remove([db_handle()]) -> ok.
+-spec remove([db_handle()]) -> ok.
%%
%% Description: Removes database db
%%--------------------------------------------------------------------
remove(Dbs) ->
- lists:foreach(fun(Db) ->
+ lists:foreach(fun({Db0, Db1}) ->
+ true = ets:delete(Db0),
+ true = ets:delete(Db1);
+ (undefined) ->
+ ok;
+ (Db) ->
true = ets:delete(Db)
end, Dbs).
@@ -81,10 +89,10 @@ lookup_trusted_cert(DbHandle, Ref, SerialNumber, Issuer) ->
{ok, Certs}
end.
-lookup_cached_pem([_, _, PemChache], MD5) ->
- lookup_cached_pem(PemChache, MD5);
-lookup_cached_pem(PemChache, MD5) ->
- lookup(MD5, PemChache).
+lookup_cached_pem([_, _, PemChache | _], File) ->
+ lookup_cached_pem(PemChache, File);
+lookup_cached_pem(PemChache, File) ->
+ lookup(File, PemChache).
%%--------------------------------------------------------------------
-spec add_trusted_certs(pid(), {erlang:timestamp(), string()} |
@@ -94,42 +102,42 @@ lookup_cached_pem(PemChache, MD5) ->
%% runtime database. Returns Ref that should be handed to lookup_trusted_cert
%% together with the cert serialnumber and issuer.
%%--------------------------------------------------------------------
-add_trusted_certs(_Pid, {der, DerList}, [CerDb, _,_]) ->
+add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) ->
NewRef = make_ref(),
- add_certs_from_der(DerList, NewRef, CerDb),
+ add_certs_from_der(DerList, NewRef, CertDb),
{ok, NewRef};
-add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache] = Db) ->
- MD5 = crypto:hash(md5, File),
- case lookup_cached_pem(Db, MD5) of
+add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) ->
+ case lookup_cached_pem(Db, File) of
[{_Content, Ref}] ->
ref_count(Ref, RefDb, 1),
{ok, Ref};
[Content] ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref};
undefined ->
- new_trusted_cert_entry({MD5, File}, Db)
+ new_trusted_cert_entry(File, Db)
end.
%%--------------------------------------------------------------------
%%
%% Description: Cache file as binary in DB
%%--------------------------------------------------------------------
--spec cache_pem_file({binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file({MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+-spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, Content, PemChache),
+ insert(File, Content, PemChache),
{ok, Content}.
--spec cache_pem_file(reference(), {binary(), binary()}, [db_handle()]) -> {ok, term()}.
-cache_pem_file(Ref, {MD5, File}, [_CertsDb, _RefDb, PemChache]) ->
+
+-spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}.
+cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) ->
{ok, PemBin} = file:read_file(File),
Content = public_key:pem_decode(PemBin),
- insert(MD5, {Content, Ref}, PemChache),
+ insert(File, {Content, Ref}, PemChache),
{ok, Content}.
%%--------------------------------------------------------------------
@@ -150,6 +158,15 @@ remove(Key, Db) ->
ok.
%%--------------------------------------------------------------------
+-spec remove(term(), term(), db_handle()) -> ok.
+%%
+%% Description: Removes an element in a <Db>.
+%%--------------------------------------------------------------------
+remove(Key, Data, Db) ->
+ ets:delete_object(Db, {Key, Data}),
+ ok.
+
+%%--------------------------------------------------------------------
-spec lookup(term(), db_handle()) -> [term()] | undefined.
%%
%% Description: Looks up an element in a <Db>.
@@ -176,6 +193,10 @@ lookup(Key, Db) ->
foldl(Fun, Acc0, Cache) ->
ets:foldl(Fun, Acc0, Cache).
+
+select_cert_by_issuer(Cache, Issuer) ->
+ ets:select(Cache, [{{{'_','_', Issuer},{'_', '$1'}},[],['$$']}]).
+
%%--------------------------------------------------------------------
-spec ref_count(term(), db_handle(), integer()) -> integer().
%%
@@ -245,9 +266,39 @@ add_certs(Cert, Ref, CertsDb) ->
error_logger:info_report(Report)
end.
-new_trusted_cert_entry(FileRef, [CertsDb, RefDb, _] = Db) ->
+new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) ->
Ref = make_ref(),
update_counter(Ref, 1, RefDb),
- {ok, Content} = cache_pem_file(Ref, FileRef, Db),
+ {ok, Content} = cache_pem_file(Ref, File, Db),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref}.
+
+add_crls([_,_,_, {_, Mapping} | _], ?NO_DIST_POINT, CRLs) ->
+ [add_crls(CRL, Mapping) || CRL <- CRLs];
+add_crls([_,_,_, {Cache, Mapping} | _], Path, CRLs) ->
+ insert(Path, CRLs, Cache),
+ [add_crls(CRL, Mapping) || CRL <- CRLs].
+
+add_crls(CRL, Mapping) ->
+ insert(crl_issuer(CRL), CRL, Mapping).
+
+remove_crls([_,_,_, {_, Mapping} | _], {?NO_DIST_POINT, CRLs}) ->
+ [rm_crls(CRL, Mapping) || CRL <- CRLs];
+
+remove_crls([_,_,_, {Cache, Mapping} | _], Path) ->
+ case lookup(Path, Cache) of
+ undefined ->
+ ok;
+ CRLs ->
+ remove(Path, Cache),
+ [rm_crls(CRL, Mapping) || CRL <- CRLs]
+ end.
+
+rm_crls(CRL, Mapping) ->
+ remove(crl_issuer(CRL), CRL, Mapping).
+
+crl_issuer(DerCRL) ->
+ CRL = public_key:der_decode('CertificateList', DerCRL),
+ TBSCRL = CRL#'CertificateList'.tbsCertList,
+ TBSCRL#'TBSCertList'.issuer.
+
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index 63fc57edad..a02375a947 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -48,7 +48,7 @@
-export([compress/3, uncompress/3, compressions/0]).
%% Payload encryption/decryption
--export([cipher/4, decipher/3, is_correct_mac/2,
+-export([cipher/4, decipher/4, is_correct_mac/2,
cipher_aead/4, decipher_aead/4]).
-export_type([ssl_version/0, ssl_atom_version/0]).
@@ -396,7 +396,7 @@ cipher_aead(Version, Fragment,
{CipherFragment, WriteState0#connection_state{cipher_state = CipherS1}}.
%%--------------------------------------------------------------------
--spec decipher(ssl_version(), binary(), #connection_state{}) -> {binary(), binary(), #connection_state{}} | #alert{}.
+-spec decipher(ssl_version(), binary(), #connection_state{}, boolean()) -> {binary(), binary(), #connection_state{}} | #alert{}.
%%
%% Description: Payload decryption
%%--------------------------------------------------------------------
@@ -406,8 +406,8 @@ decipher(Version, CipherFragment,
BulkCipherAlgo,
hash_size = HashSz},
cipher_state = CipherS0
- } = ReadState) ->
- case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version) of
+ } = ReadState, PaddingCheck) ->
+ case ssl_cipher:decipher(BulkCipherAlgo, HashSz, CipherS0, CipherFragment, Version, PaddingCheck) of
{PlainFragment, Mac, CipherS1} ->
CS1 = ReadState#connection_state{cipher_state = CipherS1},
{PlainFragment, Mac, CS1};
diff --git a/lib/ssl/src/ssl_tls_dist_proxy.erl b/lib/ssl/src/ssl_tls_dist_proxy.erl
index a22af6b960..d23b42ace5 100644
--- a/lib/ssl/src/ssl_tls_dist_proxy.erl
+++ b/lib/ssl/src/ssl_tls_dist_proxy.erl
@@ -227,7 +227,10 @@ loop_conn_setup(World, Erts) ->
{tcp_closed, Erts} ->
ssl:close(World);
{ssl_closed, World} ->
- gen_tcp:close(Erts)
+ gen_tcp:close(Erts);
+ {ssl_error, World, _} ->
+
+ ssl:close(World)
end.
loop_conn(World, Erts) ->
@@ -241,7 +244,9 @@ loop_conn(World, Erts) ->
{tcp_closed, Erts} ->
ssl:close(World);
{ssl_closed, World} ->
- gen_tcp:close(Erts)
+ gen_tcp:close(Erts);
+ {ssl_error, World, _} ->
+ ssl:close(World)
end.
get_ssl_options(Type) ->
diff --git a/lib/ssl/src/ssl_v3.erl b/lib/ssl/src/ssl_v3.erl
index 68f7f5dee2..169b39be32 100644
--- a/lib/ssl/src/ssl_v3.erl
+++ b/lib/ssl/src/ssl_v3.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -143,9 +143,6 @@ suites() ->
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
- %%?TLS_RSA_WITH_IDEA_CBC_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_RSA_WITH_DES_CBC_SHA
].
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 7df73fb581..3304ffcddb 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -188,19 +188,27 @@ hello(Hello = #client_hello{client_version = ClientVersion,
renegotiation = {Renegotiation, _},
session_cache = Cache,
session_cache_cb = CacheCb,
+ negotiated_protocol = CurrentProtocol,
ssl_options = SslOpts}) ->
case tls_handshake:hello(Hello, SslOpts, {Port, Session0, Cache, CacheCb,
ConnectionStates0, Cert}, Renegotiation) of
+ #alert{} = Alert ->
+ handle_own_alert(Alert, ClientVersion, hello, State);
{Version, {Type, Session},
- ConnectionStates, ServerHelloExt} ->
+ ConnectionStates, Protocol0, ServerHelloExt} ->
+
+ Protocol = case Protocol0 of
+ undefined -> CurrentProtocol;
+ _ -> Protocol0
+ end,
+
HashSign = ssl_handshake:select_hashsign(HashSigns, Cert, Version),
ssl_connection:hello({common_client_hello, Type, ServerHelloExt, HashSign},
State#state{connection_states = ConnectionStates,
negotiated_version = Version,
session = Session,
- client_ecc = {EllipticCurves, EcPointFormats}}, ?MODULE);
- #alert{} = Alert ->
- handle_own_alert(Alert, ClientVersion, hello, State)
+ client_ecc = {EllipticCurves, EcPointFormats},
+ negotiated_protocol = Protocol}, ?MODULE)
end;
hello(Hello,
#state{connection_states = ConnectionStates0,
@@ -211,9 +219,9 @@ hello(Hello,
case tls_handshake:hello(Hello, SslOptions, ConnectionStates0, Renegotiation) of
#alert{} = Alert ->
handle_own_alert(Alert, ReqVersion, hello, State);
- {Version, NewId, ConnectionStates, NextProtocol} ->
+ {Version, NewId, ConnectionStates, ProtoExt, Protocol} ->
ssl_connection:handle_session(Hello,
- Version, NewId, ConnectionStates, NextProtocol, State)
+ Version, NewId, ConnectionStates, ProtoExt, Protocol, State)
end;
hello(Msg, State) ->
@@ -390,6 +398,23 @@ initial_state(Role, Host, Port, Socket, {SSLOptions, SocketOptions, Tracker}, Us
tracker = Tracker
}.
+
+update_ssl_options_from_sni(OrigSSLOptions, SNIHostname) ->
+ SSLOption =
+ case OrigSSLOptions#ssl_options.sni_fun of
+ undefined ->
+ proplists:get_value(SNIHostname,
+ OrigSSLOptions#ssl_options.sni_hosts);
+ SNIFun ->
+ SNIFun(SNIHostname)
+ end,
+ case SSLOption of
+ undefined ->
+ undefined;
+ _ ->
+ ssl:handle_options(SSLOption, OrigSSLOptions)
+ end.
+
next_state(Current,_, #alert{} = Alert, #state{negotiated_version = Version} = State) ->
handle_own_alert(Alert, Version, Current, State);
@@ -418,15 +443,17 @@ next_state(Current, Next, #ssl_tls{type = ?HANDSHAKE, fragment = Data},
%% This message should not be included in handshake
%% message hashes. Already in negotiation so it will be ignored!
?MODULE:SName(Packet, State);
- ({#client_hello{} = Packet, Raw}, {next_state, connection = SName, State}) ->
+ ({#client_hello{} = Packet, Raw}, {next_state, connection = SName, HState0}) ->
+ HState = handle_sni_extension(Packet, HState0),
Version = Packet#client_hello.client_version,
Hs0 = ssl_handshake:init_handshake_history(),
Hs1 = ssl_handshake:update_handshake_history(Hs0, Raw),
- ?MODULE:SName(Packet, State#state{tls_handshake_history=Hs1,
- renegotiation = {true, peer}});
- ({Packet, Raw}, {next_state, SName, State = #state{tls_handshake_history=Hs0}}) ->
+ ?MODULE:SName(Packet, HState#state{tls_handshake_history=Hs1,
+ renegotiation = {true, peer}});
+ ({Packet, Raw}, {next_state, SName, HState0 = #state{tls_handshake_history=Hs0}}) ->
+ HState = handle_sni_extension(Packet, HState0),
Hs1 = ssl_handshake:update_handshake_history(Hs0, Raw),
- ?MODULE:SName(Packet, State#state{tls_handshake_history=Hs1});
+ ?MODULE:SName(Packet, HState#state{tls_handshake_history=Hs1});
(_, StopState) -> StopState
end,
try
@@ -482,8 +509,9 @@ next_record(#state{protocol_buffers = #protocol_buffers{tls_packets = [], tls_ci
next_record(#state{protocol_buffers =
#protocol_buffers{tls_packets = [], tls_cipher_texts = [CT | Rest]}
= Buffers,
- connection_states = ConnStates0} = State) ->
- case tls_record:decode_cipher_text(CT, ConnStates0) of
+ connection_states = ConnStates0,
+ ssl_options = #ssl_options{padding_check = Check}} = State) ->
+ case tls_record:decode_cipher_text(CT, ConnStates0, Check) of
{Plain, ConnStates} ->
{Plain, State#state{protocol_buffers =
Buffers#protocol_buffers{tls_cipher_texts = Rest},
@@ -972,3 +1000,32 @@ convert_options_partial_chain(Options, up) ->
list_to_tuple(Head ++ [{partial_chain, fun(_) -> unknown_ca end}] ++ Tail);
convert_options_partial_chain(Options, down) ->
list_to_tuple(proplists:delete(partial_chain, tuple_to_list(Options))).
+
+handle_sni_extension(#client_hello{extensions = HelloExtensions}, State0) ->
+ case HelloExtensions#hello_extensions.sni of
+ undefined ->
+ State0;
+ #sni{hostname = Hostname} ->
+ NewOptions = update_ssl_options_from_sni(State0#state.ssl_options, Hostname),
+ case NewOptions of
+ undefined ->
+ State0;
+ _ ->
+ {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, Key, DHParams} =
+ ssl_config:init(NewOptions, State0#state.role),
+ State0#state{
+ session = State0#state.session#session{own_certificate = OwnCert},
+ file_ref_db = FileRefHandle,
+ cert_db_ref = Ref,
+ cert_db = CertDbHandle,
+ crl_db = CRLDbHandle,
+ session_cache = CacheHandle,
+ private_key = Key,
+ diffie_hellman_params = DHParams,
+ ssl_options = NewOptions,
+ sni_hostname = Hostname
+ }
+ end
+ end;
+handle_sni_extension(_, State0) ->
+ State0.
diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl
index 183cabcfcd..d936310991 100644
--- a/lib/ssl/src/tls_handshake.erl
+++ b/lib/ssl/src/tls_handshake.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,6 +28,7 @@
-include("tls_record.hrl").
-include("ssl_alert.hrl").
-include("ssl_internal.hrl").
+-include("ssl_cipher.hrl").
-include_lib("public_key/include/public_key.hrl").
-export([client_hello/8, hello/4,
@@ -47,22 +48,28 @@
%%--------------------------------------------------------------------
client_hello(Host, Port, ConnectionStates,
#ssl_options{versions = Versions,
- ciphers = UserSuites
+ ciphers = UserSuites,
+ fallback = Fallback
} = SslOpts,
Cache, CacheCb, Renegotiation, OwnCert) ->
Version = tls_record:highest_protocol_version(Versions),
Pending = ssl_record:pending_connection_state(ConnectionStates, read),
SecParams = Pending#connection_state.security_parameters,
- CipherSuites = ssl_handshake:available_suites(UserSuites, Version),
+ AvailableCipherSuites = ssl_handshake:available_suites(UserSuites, Version),
Extensions = ssl_handshake:client_hello_extensions(Host, Version,
- CipherSuites,
+ AvailableCipherSuites,
SslOpts, ConnectionStates, Renegotiation),
-
- Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert),
-
+ CipherSuites =
+ case Fallback of
+ true ->
+ [?TLS_FALLBACK_SCSV | ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation)];
+ false ->
+ ssl_handshake:cipher_suites(AvailableCipherSuites, Renegotiation)
+ end,
+ Id = ssl_session:client_id({Host, Port, SslOpts}, Cache, CacheCb, OwnCert),
#client_hello{session_id = Id,
client_version = Version,
- cipher_suites = ssl_handshake:cipher_suites(CipherSuites, Renegotiation),
+ cipher_suites = CipherSuites,
compression_methods = ssl_record:compressions(),
random = SecParams#security_parameters.client_random,
extensions = Extensions
@@ -71,12 +78,14 @@ client_hello(Host, Port, ConnectionStates,
%%--------------------------------------------------------------------
-spec hello(#server_hello{} | #client_hello{}, #ssl_options{},
#connection_states{} | {inet:port_number(), #session{}, db_handle(),
- atom(), #connection_states{}, binary() | undefined},
+ atom(), #connection_states{},
+ binary() | undefined},
boolean()) ->
- {tls_record:tls_version(), session_id(), #connection_states{}, binary() | undefined}|
- {tls_record:tls_version(), {resumed | new, #session{}}, #connection_states{},
- [binary()] | undefined,
- [ssl_handshake:oid()] | undefined, [ssl_handshake:oid()] | undefined} |
+ {tls_record:tls_version(), session_id(),
+ #connection_states{}, alpn | npn, binary() | undefined}|
+ {tls_record:tls_version(), {resumed | new, #session{}},
+ #connection_states{}, binary() | undefined,
+ #hello_extensions{}} |
#alert{}.
%%
%% Description: Handles a recieved hello message
@@ -96,33 +105,22 @@ hello(#server_hello{server_version = Version, random = Random,
end;
hello(#client_hello{client_version = ClientVersion,
- session_id = SugesstedId,
- cipher_suites = CipherSuites,
- compression_methods = Compressions,
- random = Random,
- extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt},
+ cipher_suites = CipherSuites} = Hello,
#ssl_options{versions = Versions} = SslOpts,
- {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) ->
+ Info, Renegotiation) ->
Version = ssl_handshake:select_version(tls_record, ClientVersion, Versions),
- case tls_record:is_acceptable_version(Version, Versions) of
- true ->
- ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)),
- {Type, #session{cipher_suite = CipherSuite} = Session1}
- = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions,
- Port, Session0#session{ecc = ECCCurve}, Version,
- SslOpts, Cache, CacheCb, Cert),
- case CipherSuite of
- no_suite ->
- ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY);
- _ ->
- handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt,
- SslOpts, Session1, ConnectionStates0,
- Renegotiation)
+ case ssl_cipher:is_fallback(CipherSuites) of
+ true ->
+ Highest = tls_record:highest_protocol_version(Versions),
+ case tls_record:is_higher(Highest, Version) of
+ true ->
+ ?ALERT_REC(?FATAL, ?INAPPROPRIATE_FALLBACK);
+ false ->
+ handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation)
end;
false ->
- ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION)
+ handle_client_hello(Version, Hello, SslOpts, Info, Renegotiation)
end.
-
%%--------------------------------------------------------------------
-spec encode_handshake(tls_handshake(), tls_record:tls_version()) -> iolist().
%%
@@ -149,6 +147,32 @@ get_tls_handshake(Version, Data, Buffer) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
+handle_client_hello(Version, #client_hello{session_id = SugesstedId,
+ cipher_suites = CipherSuites,
+ compression_methods = Compressions,
+ random = Random,
+ extensions = #hello_extensions{elliptic_curves = Curves} = HelloExt},
+ #ssl_options{versions = Versions} = SslOpts,
+ {Port, Session0, Cache, CacheCb, ConnectionStates0, Cert}, Renegotiation) ->
+ case tls_record:is_acceptable_version(Version, Versions) of
+ true ->
+ ECCCurve = ssl_handshake:select_curve(Curves, ssl_handshake:supported_ecc(Version)),
+ {Type, #session{cipher_suite = CipherSuite} = Session1}
+ = ssl_handshake:select_session(SugesstedId, CipherSuites, Compressions,
+ Port, Session0#session{ecc = ECCCurve}, Version,
+ SslOpts, Cache, CacheCb, Cert),
+ case CipherSuite of
+ no_suite ->
+ ?ALERT_REC(?FATAL, ?INSUFFICIENT_SECURITY);
+ _ ->
+ handle_client_hello_extensions(Version, Type, Random, CipherSuites, HelloExt,
+ SslOpts, Session1, ConnectionStates0,
+ Renegotiation)
+ end;
+ false ->
+ ?ALERT_REC(?FATAL, ?PROTOCOL_VERSION)
+ end.
+
get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length),
Body:Length/binary,Rest/binary>>, Acc) ->
Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>,
@@ -224,8 +248,10 @@ handle_client_hello_extensions(Version, Type, Random, CipherSuites,
try ssl_handshake:handle_client_hello_extensions(tls_record, Random, CipherSuites,
HelloExt, Version, SslOpts,
Session0, ConnectionStates0, Renegotiation) of
- {Session, ConnectionStates, ServerHelloExt} ->
- {Version, {Type, Session}, ConnectionStates, ServerHelloExt}
+ #alert{} = Alert ->
+ Alert;
+ {Session, ConnectionStates, Protocol, ServerHelloExt} ->
+ {Version, {Type, Session}, ConnectionStates, Protocol, ServerHelloExt}
catch throw:Alert ->
Alert
end.
@@ -238,7 +264,7 @@ handle_server_hello_extensions(Version, SessionId, Random, CipherSuite,
SslOpt, ConnectionStates0, Renegotiation) of
#alert{} = Alert ->
Alert;
- {ConnectionStates, Protocol} ->
- {Version, SessionId, ConnectionStates, Protocol}
+ {ConnectionStates, ProtoExt, Protocol} ->
+ {Version, SessionId, ConnectionStates, ProtoExt, Protocol}
end.
diff --git a/lib/ssl/src/tls_record.erl b/lib/ssl/src/tls_record.erl
index 544d200f70..14a49ac7da 100644
--- a/lib/ssl/src/tls_record.erl
+++ b/lib/ssl/src/tls_record.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,14 +34,14 @@
-export([get_tls_records/2]).
%% Decoding
--export([decode_cipher_text/2]).
+-export([decode_cipher_text/3]).
%% Encoding
-export([encode_plain_text/4]).
%% Protocol version handling
-export([protocol_version/1, lowest_protocol_version/2,
- highest_protocol_version/1, supported_protocol_versions/0,
+ highest_protocol_version/1, is_higher/2, supported_protocol_versions/0,
is_acceptable_version/1, is_acceptable_version/2]).
-export_type([tls_version/0, tls_atom_version/0]).
@@ -159,7 +159,7 @@ encode_plain_text(Type, Version, Data,
{CipherText, ConnectionStates#connection_states{current_write = WriteState#connection_state{sequence_number = Seq +1}}}.
%%--------------------------------------------------------------------
--spec decode_cipher_text(#ssl_tls{}, #connection_states{}) ->
+-spec decode_cipher_text(#ssl_tls{}, #connection_states{}, boolean()) ->
{#ssl_tls{}, #connection_states{}}| #alert{}.
%%
%% Description: Decode cipher text
@@ -174,7 +174,7 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
#security_parameters{
cipher_type = ?AEAD,
compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
+ } = ReadState0} = ConnnectionStates0, _) ->
AAD = calc_aad(Type, Version, ReadState0),
case ssl_record:decipher_aead(Version, CipherFragment, ReadState0, AAD) of
{PlainFragment, ReadState1} ->
@@ -197,8 +197,8 @@ decode_cipher_text(#ssl_tls{type = Type, version = Version,
sequence_number = Seq,
security_parameters=
#security_parameters{compression_algorithm=CompAlg}
- } = ReadState0} = ConnnectionStates0) ->
- case ssl_record:decipher(Version, CipherFragment, ReadState0) of
+ } = ReadState0} = ConnnectionStates0, PaddingCheck) ->
+ case ssl_record:decipher(Version, CipherFragment, ReadState0, PaddingCheck) of
{PlainFragment, Mac, ReadState1} ->
MacHash = calc_mac_hash(Type, Version, PlainFragment, ReadState1),
case ssl_record:is_correct_mac(Mac, MacHash) of
@@ -278,6 +278,13 @@ highest_protocol_version(Version = {M,_}, [{N,_} | Rest]) when M > N ->
highest_protocol_version(_, [Version | Rest]) ->
highest_protocol_version(Version, Rest).
+is_higher({M, N}, {M, O}) when N > O ->
+ true;
+is_higher({M, _}, {N, _}) when M > N ->
+ true;
+is_higher(_, _) ->
+ false.
+
%%--------------------------------------------------------------------
-spec supported_protocol_versions() -> [tls_version()].
%%
@@ -311,8 +318,17 @@ supported_protocol_versions([]) ->
Vsns;
supported_protocol_versions([_|_] = Vsns) ->
- Vsns.
-
+ case sufficient_tlsv1_2_crypto_support() of
+ true ->
+ Vsns;
+ false ->
+ case Vsns -- ['tlsv1.2'] of
+ [] ->
+ ?MIN_SUPPORTED_VERSIONS;
+ NewVsns ->
+ NewVsns
+ end
+ end.
%%--------------------------------------------------------------------
%%
%% Description: ssl version 2 is not acceptable security risks are too big.
diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl
index c4114278a4..559fc1d6a8 100644
--- a/lib/ssl/src/tls_v1.erl
+++ b/lib/ssl/src/tls_v1.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -208,15 +208,7 @@ suites(Minor) when Minor == 1; Minor == 2 ->
?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,
?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,
?TLS_RSA_WITH_AES_128_CBC_SHA,
-
- ?TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDHE_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_SHA,
- ?TLS_RSA_WITH_RC4_128_MD5,
?TLS_DHE_RSA_WITH_DES_CBC_SHA,
- ?TLS_ECDH_ECDSA_WITH_RC4_128_SHA,
- ?TLS_ECDH_RSA_WITH_RC4_128_SHA,
-
?TLS_RSA_WITH_DES_CBC_SHA
];
suites(3) ->
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index 3639c2b2da..886cc7726b 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1999-2013. All Rights Reserved.
+# Copyright Ericsson AB 1999-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -36,6 +36,7 @@ VSN=$(GS_VSN)
MODULES = \
ssl_test_lib \
+ ssl_alpn_handshake_SUITE \
ssl_basic_SUITE \
ssl_bench_SUITE \
ssl_cipher_SUITE \
@@ -47,9 +48,12 @@ MODULES = \
ssl_npn_handshake_SUITE \
ssl_packet_SUITE \
ssl_payload_SUITE \
+ ssl_pem_cache_SUITE \
ssl_session_cache_SUITE \
ssl_to_openssl_SUITE \
ssl_ECC_SUITE \
+ ssl_upgrade_SUITE\
+ ssl_sni_SUITE \
make_certs\
erl_make_certs
diff --git a/lib/ssl/test/erl_make_certs.erl b/lib/ssl/test/erl_make_certs.erl
index daf4466f11..12ad1e5402 100644
--- a/lib/ssl/test/erl_make_certs.erl
+++ b/lib/ssl/test/erl_make_certs.erl
@@ -114,7 +114,7 @@ verify_signature(DerEncodedCert, DerKey, _KeyParams) ->
#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y} ->
public_key:pkix_verify(DerEncodedCert, {Y, #'Dss-Parms'{p=P, q=Q, g=G}});
#'ECPrivateKey'{version = _Version, privateKey = _PrivKey,
- parameters = Params, publicKey = {0, PubKey}} ->
+ parameters = Params, publicKey = PubKey} ->
public_key:pkix_verify(DerEncodedCert, {#'ECPoint'{point = PubKey}, Params})
end.
@@ -204,7 +204,7 @@ issuer_der(Issuer) ->
Subject.
subject(undefined, IsRootCA) ->
- User = if IsRootCA -> "RootCA"; true -> user() end,
+ User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end,
Opts = [{email, User ++ "@erlang.org"},
{name, User},
{city, "Stockholm"},
@@ -215,14 +215,6 @@ subject(undefined, IsRootCA) ->
subject(Opts, _) ->
subject(Opts).
-user() ->
- case os:getenv("USER") of
- false ->
- "test_user";
- User ->
- User
- end.
-
subject(SubjectOpts) when is_list(SubjectOpts) ->
Encode = fun(Opt) ->
{Type,Value} = subject_enc(Opt),
@@ -300,7 +292,7 @@ publickey(#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y}) ->
publickey(#'ECPrivateKey'{version = _Version,
privateKey = _PrivKey,
parameters = Params,
- publicKey = {0, PubKey}}) ->
+ publicKey = PubKey}) ->
Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-ecPublicKey', parameters=Params},
#'OTPSubjectPublicKeyInfo'{algorithm = Algo,
subjectPublicKey = #'ECPoint'{point = PubKey}}.
@@ -409,9 +401,9 @@ gen_ec2(CurveId) ->
{PubKey, PrivKey} = crypto:generate_key(ecdh, CurveId),
#'ECPrivateKey'{version = 1,
- privateKey = binary_to_list(PrivKey),
+ privateKey = PrivKey,
parameters = {namedCurve, pubkey_cert_records:namedCurves(CurveId)},
- publicKey = {0, PubKey}}.
+ publicKey = PubKey}.
%% See fips_186-3.pdf
dsa_search(T, P0, Q, Iter) when Iter > 0 ->
diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl
index 15a7e118ff..4a193d48fe 100644
--- a/lib/ssl/test/make_certs.erl
+++ b/lib/ssl/test/make_certs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2012. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -81,7 +81,7 @@ all(DataDir, PrivDir, C = #config{}) ->
create_rnd(DataDir, PrivDir), % For all requests
rootCA(PrivDir, "erlangCA", C),
intermediateCA(PrivDir, "otpCA", "erlangCA", C),
- endusers(PrivDir, "otpCA", ["client", "server", "revoked"], C),
+ endusers(PrivDir, "otpCA", ["client", "server", "revoked", "a.server", "b.server"], C),
endusers(PrivDir, "erlangCA", ["localhost"], C),
%% Create keycert files
SDir = filename:join([PrivDir, "server"]),
@@ -324,8 +324,9 @@ eval_cmd(Port, Cmd) ->
ok
end,
receive
- {Port, {exit_status, Status}} when Status /= 0 ->
- %% io:fwrite("exit status: ~w~n", [Status]),
+ {Port, {exit_status, 0}} ->
+ ok;
+ {Port, {exit_status, Status}} ->
exit({eval_cmd, Cmd, Status})
after 0 ->
ok
@@ -369,7 +370,7 @@ req_cnf(C) ->
"subjectKeyIdentifier = hash\n"
"subjectAltName = email:copy\n"].
-ca_cnf(C) ->
+ca_cnf(C = #config{issuing_distribution_point = true}) ->
["# Purpose: Configuration for CAs.\n"
"\n"
"ROOTDIR = $ENV::ROOTDIR\n"
@@ -446,5 +447,83 @@ ca_cnf(C) ->
"subjectAltName = email:copy\n"
"issuerAltName = issuer:copy\n"
"crlDistributionPoints=@crl_section\n"
- ].
+ ];
+ca_cnf(C = #config{issuing_distribution_point = false}) ->
+ ["# Purpose: Configuration for CAs.\n"
+ "\n"
+ "ROOTDIR = $ENV::ROOTDIR\n"
+ "default_ca = ca\n"
+ "\n"
+
+ "[ca]\n"
+ "dir = $ROOTDIR/", C#config.commonName, "\n"
+ "certs = $dir/certs\n"
+ "crl_dir = $dir/crl\n"
+ "database = $dir/index.txt\n"
+ "new_certs_dir = $dir/newcerts\n"
+ "certificate = $dir/cert.pem\n"
+ "serial = $dir/serial\n"
+ "crl = $dir/crl.pem\n",
+ ["crlnumber = $dir/crlnumber\n" || C#config.v2_crls],
+ "private_key = $dir/private/key.pem\n"
+ "RANDFILE = $dir/private/RAND\n"
+ "\n"
+ "x509_extensions = user_cert\n",
+ ["crl_extensions = crl_ext\n" || C#config.v2_crls],
+ "unique_subject = no\n"
+ "default_days = 3600\n"
+ "default_md = md5\n"
+ "preserve = no\n"
+ "policy = policy_match\n"
+ "\n"
+
+ "[policy_match]\n"
+ "commonName = supplied\n"
+ "organizationalUnitName = optional\n"
+ "organizationName = match\n"
+ "countryName = match\n"
+ "localityName = match\n"
+ "emailAddress = supplied\n"
+ "\n"
+
+ "[crl_ext]\n"
+ "authorityKeyIdentifier=keyid:always,issuer:always\n",
+ %["issuingDistributionPoint=critical, @idpsec\n" || C#config.issuing_distribution_point],
+
+ %"[idpsec]\n"
+ %"fullname=URI:http://localhost:8000/",C#config.commonName,"/crl.pem\n"
+
+ "[user_cert]\n"
+ "basicConstraints = CA:false\n"
+ "keyUsage = nonRepudiation, digitalSignature, keyEncipherment\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ %"crlDistributionPoints=@crl_section\n"
+
+ %%"[crl_section]\n"
+ %% intentionally invalid
+ %%"URI.1=http://localhost/",C#config.commonName,"/crl.pem\n"
+ %%"URI.2=http://localhost:",integer_to_list(C#config.crl_port),"/",C#config.commonName,"/crl.pem\n"
+ %%"\n"
+
+ "[user_cert_digital_signature_only]\n"
+ "basicConstraints = CA:false\n"
+ "keyUsage = digitalSignature\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ "\n"
+
+ "[ca_cert]\n"
+ "basicConstraints = critical,CA:true\n"
+ "keyUsage = cRLSign, keyCertSign\n"
+ "subjectKeyIdentifier = hash\n"
+ "authorityKeyIdentifier = keyid:always,issuer:always\n"
+ "subjectAltName = email:copy\n"
+ "issuerAltName = issuer:copy\n"
+ %"crlDistributionPoints=@crl_section\n"
+ ].
diff --git a/lib/ssl/test/ssl_alpn_handshake_SUITE.erl b/lib/ssl/test/ssl_alpn_handshake_SUITE.erl
new file mode 100644
index 0000000000..ae76f5849e
--- /dev/null
+++ b/lib/ssl/test/ssl_alpn_handshake_SUITE.erl
@@ -0,0 +1,414 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+-module(ssl_alpn_handshake_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+-include_lib("common_test/include/ct.hrl").
+
+-define(SLEEP, 500).
+
+%%--------------------------------------------------------------------
+%% Common Test interface functions -----------------------------------
+%%--------------------------------------------------------------------
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [{group, 'tlsv1.2'},
+ {group, 'tlsv1.1'},
+ {group, 'tlsv1'},
+ {group, 'sslv3'}].
+
+groups() ->
+ [
+ {'tlsv1.2', [], alpn_tests()},
+ {'tlsv1.1', [], alpn_tests()},
+ {'tlsv1', [], alpn_tests()},
+ {'sslv3', [], alpn_not_supported()}
+ ].
+
+alpn_tests() ->
+ [empty_protocols_are_not_allowed,
+ protocols_must_be_a_binary_list,
+ empty_client,
+ empty_server,
+ empty_client_empty_server,
+ no_matching_protocol,
+ client_alpn_and_server_alpn,
+ client_alpn_and_server_no_support,
+ client_no_support_and_server_alpn,
+ client_alpn_npn_and_server_alpn,
+ client_alpn_npn_and_server_alpn_npn,
+ client_alpn_and_server_alpn_npn,
+ client_renegotiate,
+ session_reused
+ ].
+
+alpn_not_supported() ->
+ [alpn_not_supported_client,
+ alpn_not_supported_server
+ ].
+
+init_per_suite(Config) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ ssl:start(),
+ Result =
+ (catch make_certs:all(?config(data_dir, Config),
+ ?config(priv_dir, Config))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(_Config) ->
+ ssl:stop(),
+ application:unload(ssl),
+ application:stop(crypto).
+
+
+init_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ case ssl_test_lib:sufficient_crypto_support(GroupName) of
+ true ->
+ ssl_test_lib:init_tls_version(GroupName),
+ Config;
+ false ->
+ {skip, "Missing crypto support"}
+ end;
+ _ ->
+ ssl:start(),
+ Config
+ end.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+%%--------------------------------------------------------------------
+%% Test Cases --------------------------------------------------------
+%%--------------------------------------------------------------------
+
+empty_protocols_are_not_allowed(Config) when is_list(Config) ->
+ {error, {options, {alpn_preferred_protocols, {invalid_protocol, <<>>}}}}
+ = (catch ssl:listen(9443,
+ [{alpn_preferred_protocols, [<<"foo/1">>, <<"">>]}])),
+ {error, {options, {alpn_advertised_protocols, {invalid_protocol, <<>>}}}}
+ = (catch ssl:connect({127,0,0,1}, 9443,
+ [{alpn_advertised_protocols, [<<"foo/1">>, <<"">>]}])).
+
+%--------------------------------------------------------------------------------
+
+protocols_must_be_a_binary_list(Config) when is_list(Config) ->
+ Option1 = {alpn_preferred_protocols, hello},
+ {error, {options, Option1}} = (catch ssl:listen(9443, [Option1])),
+ Option2 = {alpn_preferred_protocols, [<<"foo/1">>, hello]},
+ {error, {options, {alpn_preferred_protocols, {invalid_protocol, hello}}}}
+ = (catch ssl:listen(9443, [Option2])),
+ Option3 = {alpn_advertised_protocols, hello},
+ {error, {options, Option3}} = (catch ssl:connect({127,0,0,1}, 9443, [Option3])),
+ Option4 = {alpn_advertised_protocols, [<<"foo/1">>, hello]},
+ {error, {options, {alpn_advertised_protocols, {invalid_protocol, hello}}}}
+ = (catch ssl:connect({127,0,0,1}, 9443, [Option4])).
+
+%--------------------------------------------------------------------------------
+
+empty_client(Config) when is_list(Config) ->
+ run_failing_handshake(Config,
+ [{alpn_advertised_protocols, []}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"spdy/3">>, <<"http/2">>]}],
+ {connect_failed,{tls_alert,"no application protocol"}}).
+
+%--------------------------------------------------------------------------------
+
+empty_server(Config) when is_list(Config) ->
+ run_failing_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]}],
+ [{alpn_preferred_protocols, []}],
+ {connect_failed,{tls_alert,"no application protocol"}}).
+
+%--------------------------------------------------------------------------------
+
+empty_client_empty_server(Config) when is_list(Config) ->
+ run_failing_handshake(Config,
+ [{alpn_advertised_protocols, []}],
+ [{alpn_preferred_protocols, []}],
+ {connect_failed,{tls_alert,"no application protocol"}}).
+
+%--------------------------------------------------------------------------------
+
+no_matching_protocol(Config) when is_list(Config) ->
+ run_failing_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"spdy/3">>, <<"http/2">>]}],
+ {connect_failed,{tls_alert,"no application protocol"}}).
+
+%--------------------------------------------------------------------------------
+
+client_alpn_and_server_alpn(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}],
+ {ok, <<"http/1.1">>}).
+
+%--------------------------------------------------------------------------------
+
+client_alpn_and_server_no_support(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]}],
+ [],
+ {error, protocol_not_negotiated}).
+
+%--------------------------------------------------------------------------------
+
+client_no_support_and_server_alpn(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}],
+ {error, protocol_not_negotiated}).
+
+%--------------------------------------------------------------------------------
+
+client_alpn_npn_and_server_alpn(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]},
+ {client_preferred_next_protocols, {client, [<<"spdy/2">>], <<"spdy/3">>}}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}],
+ {ok, <<"http/1.1">>}).
+
+%--------------------------------------------------------------------------------
+
+client_alpn_npn_and_server_alpn_npn(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]},
+ {client_preferred_next_protocols, {client, [<<"spdy/2">>], <<"spdy/3">>}}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]},
+ {next_protocols_advertised, [<<"spdy/2">>, <<"http/1.0">>]}],
+ {ok, <<"http/1.1">>}).
+
+%--------------------------------------------------------------------------------
+
+client_alpn_and_server_alpn_npn(Config) when is_list(Config) ->
+ run_handshake(Config,
+ [{alpn_advertised_protocols, [<<"http/1.0">>, <<"http/1.1">>]}],
+ [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]},
+ {next_protocols_advertised, [<<"spdy/2">>, <<"http/1.0">>]}],
+ {ok, <<"http/1.1">>}).
+
+%--------------------------------------------------------------------------------
+
+client_renegotiate(Config) when is_list(Config) ->
+ Data = "hello world",
+
+ ClientOpts0 = ?config(client_opts, Config),
+ ClientOpts = [{alpn_advertised_protocols, [<<"http/1.0">>]}] ++ ClientOpts0,
+ ServerOpts0 = ?config(server_opts, Config),
+ ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}] ++ ServerOpts0,
+ ExpectedProtocol = {ok, <<"http/1.0">>},
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, ssl_receive_and_assert_alpn, [ExpectedProtocol, Data]}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, assert_alpn_and_renegotiate_and_send_data, [ExpectedProtocol, Data]}},
+ {options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok).
+
+%--------------------------------------------------------------------------------
+
+session_reused(Config) when is_list(Config)->
+ ClientOpts0 = ?config(client_opts, Config),
+ ClientOpts = [{alpn_advertised_protocols, [<<"http/1.0">>]}] ++ ClientOpts0,
+ ServerOpts0 = ?config(server_opts, Config),
+ ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}] ++ ServerOpts0,
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, session_info_result, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, no_result_msg, []}},
+ {options, ClientOpts}]),
+
+ SessionInfo =
+ receive
+ {Server, Info} ->
+ Info
+ end,
+
+ Server ! {listen, {mfa, {ssl_test_lib, no_result, []}}},
+
+ %% Make sure session is registered
+ ct:sleep(?SLEEP),
+
+ Client1 =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, session_info_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ receive
+ {Client1, SessionInfo} ->
+ ok;
+ {Client1, Other} ->
+ ct:fail(Other)
+ end,
+
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ ssl_test_lib:close(Client1).
+
+%--------------------------------------------------------------------------------
+
+alpn_not_supported_client(Config) when is_list(Config) ->
+ ClientOpts0 = ?config(client_opts, Config),
+ PrefProtocols = {client_preferred_next_protocols,
+ {client, [<<"http/1.0">>], <<"http/1.1">>}},
+ ClientOpts = [PrefProtocols] ++ ClientOpts0,
+ {ClientNode, _ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Client = ssl_test_lib:start_client_error([{node, ClientNode},
+ {port, 8888}, {host, Hostname},
+ {from, self()}, {options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Client, {error,
+ {options,
+ {not_supported_in_sslv3, PrefProtocols}}}).
+
+%--------------------------------------------------------------------------------
+
+alpn_not_supported_server(Config) when is_list(Config)->
+ ServerOpts0 = ?config(server_opts, Config),
+ AdvProtocols = {next_protocols_advertised, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]},
+ ServerOpts = [AdvProtocols] ++ ServerOpts0,
+
+ {error, {options, {not_supported_in_sslv3, AdvProtocols}}} = ssl:listen(0, ServerOpts).
+
+%%--------------------------------------------------------------------
+%% Internal functions ------------------------------------------------
+%%--------------------------------------------------------------------
+
+run_failing_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedResult) ->
+ ClientOpts = ClientExtraOpts ++ ?config(client_opts, Config),
+ ServerOpts = ServerExtraOpts ++ ?config(server_opts, Config),
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, placeholder, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ ExpectedResult
+ = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, placeholder, []}},
+ {options, ClientOpts}]).
+
+run_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedProtocol) ->
+ Data = "hello world",
+
+ ClientOpts0 = ?config(client_opts, Config),
+ ClientOpts = ClientExtraOpts ++ ClientOpts0,
+ ServerOpts0 = ?config(server_opts, Config),
+ ServerOpts = ServerExtraOpts ++ ServerOpts0,
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, ssl_receive_and_assert_alpn, [ExpectedProtocol, Data]}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, ssl_send_and_assert_alpn, [ExpectedProtocol, Data]}},
+ {options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok).
+
+assert_alpn(Socket, Protocol) ->
+ ct:log("Negotiated Protocol ~p, Expecting: ~p ~n",
+ [ssl:negotiated_protocol(Socket), Protocol]),
+ Protocol = ssl:negotiated_protocol(Socket).
+
+assert_alpn_and_renegotiate_and_send_data(Socket, Protocol, Data) ->
+ assert_alpn(Socket, Protocol),
+ ct:log("Renegotiating ~n", []),
+ ok = ssl:renegotiate(Socket),
+ ssl:send(Socket, Data),
+ assert_alpn(Socket, Protocol),
+ ok.
+
+ssl_send_and_assert_alpn(Socket, Protocol, Data) ->
+ assert_alpn(Socket, Protocol),
+ ssl_send(Socket, Data).
+
+ssl_receive_and_assert_alpn(Socket, Protocol, Data) ->
+ assert_alpn(Socket, Protocol),
+ ssl_receive(Socket, Data).
+
+ssl_send(Socket, Data) ->
+ ct:log("Connection info: ~p~n",
+ [ssl:connection_information(Socket)]),
+ ssl:send(Socket, Data).
+
+ssl_receive(Socket, Data) ->
+ ssl_receive(Socket, Data, []).
+
+ssl_receive(Socket, Data, Buffer) ->
+ ct:log("Connection info: ~p~n",
+ [ssl:connection_information(Socket)]),
+ receive
+ {ssl, Socket, MoreData} ->
+ ct:log("Received ~p~n",[MoreData]),
+ NewBuffer = Buffer ++ MoreData,
+ case NewBuffer of
+ Data ->
+ ssl:send(Socket, "Got it"),
+ ok;
+ _ ->
+ ssl_receive(Socket, Data, NewBuffer)
+ end;
+ Other ->
+ ct:fail({unexpected_message, Other})
+ after 4000 ->
+ ct:fail({did_not_get, Data})
+ end.
+
+connection_info_result(Socket) ->
+ ssl:connection_information(Socket).
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index dc9e8934e6..e1a36dbbd4 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -65,7 +65,7 @@ groups() ->
{'tlsv1.2', [], all_versions_groups()},
{'tlsv1.1', [], all_versions_groups()},
{'tlsv1', [], all_versions_groups() ++ rizzo_tests()},
- {'sslv3', [], all_versions_groups() ++ rizzo_tests()},
+ {'sslv3', [], all_versions_groups() ++ rizzo_tests() ++ [ciphersuite_vs_version]},
{api,[], api_tests()},
{session, [], session_tests()},
{renegotiate, [], renegotiate_tests()},
@@ -90,7 +90,9 @@ basic_tests() ->
version_option,
connect_twice,
connect_dist,
- clear_pem_cache
+ clear_pem_cache,
+ defaults,
+ fallback
].
options_tests() ->
@@ -116,7 +118,6 @@ options_tests() ->
tcp_reuseaddr,
honor_server_cipher_order,
honor_client_cipher_order,
- ciphersuite_vs_version,
unordered_protocol_versions_server,
unordered_protocol_versions_client
].
@@ -177,6 +178,9 @@ cipher_tests() ->
srp_cipher_suites,
srp_anon_cipher_suites,
srp_dsa_cipher_suites,
+ rc4_rsa_cipher_suites,
+ rc4_ecdh_rsa_cipher_suites,
+ rc4_ecdsa_cipher_suites,
default_reject_anonymous].
cipher_tests_ec() ->
@@ -256,11 +260,6 @@ init_per_testcase(Case, Config) when Case == unordered_protocol_versions_client
_ ->
{skip, "TLS 1.2 need but not supported on this platform"}
end;
-init_per_testcase(no_authority_key_identifier, Config) ->
- %% Clear cach so that root cert will not
- %% be found.
- ssl:clear_pem_cache(),
- Config;
init_per_testcase(protocol_versions, Config) ->
ssl:stop(),
@@ -286,6 +285,14 @@ init_per_testcase(empty_protocol_versions, Config) ->
ssl:start(),
Config;
+init_per_testcase(fallback, Config) ->
+ case tls_record:highest_protocol_version([]) of
+ {3, N} when N > 1 ->
+ Config;
+ _ ->
+ {skip, "Not relevant if highest supported version is less than 3.2"}
+ end;
+
%% init_per_testcase(different_ca_peer_sign, Config0) ->
%% ssl_test_lib:make_mix_cert(Config0);
@@ -343,7 +350,7 @@ alerts(Config) when is_list(Config) ->
end, Alerts).
%%--------------------------------------------------------------------
new_options_in_accept() ->
- [{doc,"Test that you can set ssl options in ssl_accept/3 and not tcp upgrade"}].
+ [{doc,"Test that you can set ssl options in ssl_accept/3 and not only in tcp upgrade"}].
new_options_in_accept(Config) when is_list(Config) ->
ClientOpts = ?config(client_opts, Config),
ServerOpts0 = ?config(server_dsa_opts, Config),
@@ -361,7 +368,9 @@ new_options_in_accept(Config) when is_list(Config) ->
{host, Hostname},
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
- {options, [{versions, [sslv3]} | ClientOpts]}]),
+ {options, [{versions, [sslv3]},
+ {ciphers,[{rsa,rc4_128,sha}
+ ]} | ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
[self(), Client, Server]),
@@ -375,7 +384,7 @@ new_options_in_accept(Config) when is_list(Config) ->
%%--------------------------------------------------------------------
connection_info() ->
- [{doc,"Test the API function ssl:connection_info/1"}].
+ [{doc,"Test the API function ssl:connection_information/1"}].
connection_info(Config) when is_list(Config) ->
ClientOpts = ?config(client_opts, Config),
ServerOpts = ?config(server_opts, Config),
@@ -391,7 +400,7 @@ connection_info(Config) when is_list(Config) ->
{from, self()},
{mfa, {?MODULE, connection_info_result, []}},
{options,
- [{ciphers,[{rsa,rc4_128,sha,no_export}]} |
+ [{ciphers,[{rsa,des_cbc,sha,no_export}]} |
ClientOpts]}]),
ct:log("Testcase ~p, Client ~p Server ~p ~n",
@@ -400,7 +409,7 @@ connection_info(Config) when is_list(Config) ->
Version =
tls_record:protocol_version(tls_record:highest_protocol_version([])),
- ServerMsg = ClientMsg = {ok, {Version, {rsa,rc4_128,sha}}},
+ ServerMsg = ClientMsg = {ok, {Version, {rsa, des_cbc, sha}}},
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg),
@@ -629,7 +638,7 @@ clear_pem_cache(Config) when is_list(Config) ->
{status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
[_, _,_, _, Prop] = StatusInfo,
State = ssl_test_lib:state(Prop),
- [_,FilRefDb, _] = element(6, State),
+ [_,FilRefDb |_] = element(6, State),
{Server, Client} = basic_verify_test_no_close(Config),
2 = ets:info(FilRefDb, size),
ssl:clear_pem_cache(),
@@ -648,6 +657,34 @@ clear_pem_cache(Config) when is_list(Config) ->
0 = ets:info(FilRefDb, size).
%%--------------------------------------------------------------------
+
+fallback() ->
+ [{doc, "Test TLS_FALLBACK_SCSV downgrade prevention"}].
+
+fallback(Config) when is_list(Config) ->
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server =
+ ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+
+ Client =
+ ssl_test_lib:start_client_error([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {from, self()}, {options,
+ [{fallback, true},
+ {versions, ['tlsv1']}
+ | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, {error,{tls_alert,"inappropriate fallback"}},
+ Client, {error,{tls_alert,"inappropriate fallback"}}).
+
+%%--------------------------------------------------------------------
peername() ->
[{doc,"Test API function peername/1"}].
@@ -1779,6 +1816,32 @@ srp_dsa_cipher_suites(Config) when is_list(Config) ->
Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
Ciphers = ssl_test_lib:srp_dss_suites(),
run_suites(Ciphers, Version, Config, srp_dsa).
+%%-------------------------------------------------------------------
+rc4_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_rsa).
+%-------------------------------------------------------------------
+rc4_ecdh_rsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdh_rsa).
+
+%%-------------------------------------------------------------------
+rc4_ecdsa_cipher_suites()->
+ [{doc, "Test the RC4 ciphersuites"}].
+rc4_ecdsa_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
+ Version = tls_record:protocol_version(NVersion),
+ Ciphers = ssl_test_lib:rc4_suites(NVersion),
+ run_suites(Ciphers, Version, Config, rc4_ecdsa).
+
%%--------------------------------------------------------------------
default_reject_anonymous()->
[{doc,"Test that by default anonymous cipher suites are rejected "}].
@@ -2507,6 +2570,16 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) ->
ssl_test_lib:close(Client1).
%%--------------------------------------------------------------------
+defaults(Config) when is_list(Config)->
+ [_,
+ {supported, Supported},
+ {available, Available}]
+ = ssl:versions(),
+ true = lists:member(sslv3, Available),
+ false = lists:member(sslv3, Supported),
+ false = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites()),
+ true = lists:member({rsa,rc4_128,sha}, ssl:cipher_suites(all)).
+%%--------------------------------------------------------------------
reuseaddr() ->
[{doc,"Test reuseaddr option"}].
@@ -2631,6 +2704,8 @@ honor_cipher_order(Config, Honor, ServerCiphers, ClientCiphers, Expected) ->
ssl_test_lib:close(Client).
%%--------------------------------------------------------------------
+ciphersuite_vs_version() ->
+ [{doc,"Test a SSLv3 client can not negotiate a TLSv* cipher suite."}].
ciphersuite_vs_version(Config) when is_list(Config) ->
{_ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -2756,7 +2831,7 @@ listen_socket(Config) ->
{error, enotconn} = ssl:send(ListenSocket, <<"data">>),
{error, enotconn} = ssl:recv(ListenSocket, 0),
- {error, enotconn} = ssl:connection_info(ListenSocket),
+ {error, enotconn} = ssl:connection_information(ListenSocket),
{error, enotconn} = ssl:peername(ListenSocket),
{error, enotconn} = ssl:peercert(ListenSocket),
{error, enotconn} = ssl:session_info(ListenSocket),
@@ -3370,7 +3445,7 @@ renegotiate_immediately(Socket) ->
end,
ok = ssl:renegotiate(Socket),
{error, renegotiation_rejected} = ssl:renegotiate(Socket),
- ct:sleep(?RENEGOTIATION_DISABLE_TIME +1),
+ ct:sleep(?RENEGOTIATION_DISABLE_TIME + ?SLEEP),
ok = ssl:renegotiate(Socket),
ct:log("Renegotiated again"),
ssl:send(Socket, "Hello world"),
@@ -3694,8 +3769,20 @@ run_suites(Ciphers, Version, Config, Type) ->
?config(server_ecdsa_opts, Config)};
ecdh_rsa ->
{?config(client_opts, Config),
- ?config(server_ecdh_rsa_opts, Config)}
- end,
+ ?config(server_ecdh_rsa_opts, Config)};
+ rc4_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_opts, Config)]};
+ rc4_ecdh_rsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdh_rsa_opts, Config)]};
+ rc4_ecdsa ->
+ {?config(client_opts, Config),
+ [{ciphers, Ciphers} |
+ ?config(server_ecdsa_opts, Config)]}
+ end,
Result = lists:map(fun(Cipher) ->
cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
@@ -3716,6 +3803,7 @@ erlang_cipher_suite(Suite) ->
cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
%% process_flag(trap_exit, true),
ct:log("Testing CipherSuite ~p~n", [CipherSuite]),
+ ct:log("Server Opts ~p~n", [ServerOpts]),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
ErlangCipherSuite = erlang_cipher_suite(CipherSuite),
@@ -3748,10 +3836,10 @@ cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
end.
connection_info_result(Socket) ->
- ssl:connection_info(Socket).
-
+ {ok, Info} = ssl:connection_information(Socket, [protocol, cipher_suite]),
+ {ok, {proplists:get_value(protocol, Info), proplists:get_value(cipher_suite, Info)}}.
version_info_result(Socket) ->
- {ok, {Version, _}} = ssl:connection_info(Socket),
+ {ok, [{version, Version}]} = ssl:connection_information(Socket, [version]),
{ok, Version}.
connect_dist_s(S) ->
diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
index b7864ba6e7..dab7a941db 100644
--- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl
+++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
@@ -443,7 +443,7 @@ verify_fun_always_run_client(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
@@ -482,7 +482,7 @@ verify_fun_always_run_server(Config) when is_list(Config) ->
{unknown, UserState};
(_, valid, [ChainLen]) ->
{valid, [ChainLen + 1]};
- (_, valid_peer, [2]) ->
+ (_, valid_peer, [1]) ->
{fail, "verify_fun_was_always_run"};
(_, valid_peer, UserState) ->
{valid, UserState}
diff --git a/lib/ssl/test/ssl_cipher_SUITE.erl b/lib/ssl/test/ssl_cipher_SUITE.erl
index f2dc1b52c1..3433f9a445 100644
--- a/lib/ssl/test/ssl_cipher_SUITE.erl
+++ b/lib/ssl/test/ssl_cipher_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,7 +38,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [aes_decipher_good, aes_decipher_good_tls11, aes_decipher_fail, aes_decipher_fail_tls11].
+ [aes_decipher_good, aes_decipher_fail, padding_test].
groups() ->
[].
@@ -73,93 +73,122 @@ end_per_testcase(_TestCase, Config) ->
%% Test Cases --------------------------------------------------------
%%--------------------------------------------------------------------
aes_decipher_good() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a correct key"}].
aes_decipher_good(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56, "HELLO\n">>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,1},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
+ CipherState = correct_cipher_state(),
+ decipher_check_good(HashSz, CipherState, {3,0}),
+ decipher_check_good(HashSz, CipherState, {3,1}),
+ decipher_check_good(HashSz, CipherState, {3,2}),
+ decipher_check_good(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_good_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% the fragment is actuall a TLS 1.1 record, with
-%% Version = TLS 1.1, we get the correct NextIV in #cipher_state
-aes_decipher_good_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Content = <<"HELLO\n">>,
- NextIV = <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
- Mac = <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>,
- Version = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,2},
- {Content, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
-%%--------------------------------------------------------------------
-
aes_decipher_fail() ->
- [{doc,"Decipher a known cryptotext."}].
+ [{doc,"Decipher a known cryptotext using a incorrect key"}].
-%% same as above, last byte of key replaced
aes_decipher_fail(Config) when is_list(Config) ->
HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,0},
- {Content, Mac, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- 32 = byte_size(Content),
- 32 = byte_size(Mac),
- Version1 = {3,1},
- {Content1, Mac1, _} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- 32 = byte_size(Content1),
- 32 = byte_size(Mac1),
- ok.
+ CipherState = incorrect_cipher_state(),
+ decipher_check_fail(HashSz, CipherState, {3,0}),
+ decipher_check_fail(HashSz, CipherState, {3,1}),
+ decipher_check_fail(HashSz, CipherState, {3,2}),
+ decipher_check_fail(HashSz, CipherState, {3,3}).
%%--------------------------------------------------------------------
-
-aes_decipher_fail_tls11() ->
- [{doc,"Decipher a known TLS 1.1 cryptotext."}].
-
-%% same as above, last byte of key replaced
-%% stricter padding checks in TLS 1.1 mean we get an alert instead
-aes_decipher_fail_tls11(Config) when is_list(Config) ->
- HashSz = 32,
- CipherState = #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
- key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>},
- Fragment = <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
- 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
- 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
- 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>,
- Version = {3,2},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version),
- Version1 = {3,3},
- #alert{level = ?FATAL, description = ?BAD_RECORD_MAC} =
- ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, Fragment, Version1),
- ok.
-
+padding_test(Config) when is_list(Config) ->
+ HashSz = 16,
+ CipherState = correct_cipher_state(),
+ pad_test(HashSz, CipherState, {3,0}),
+ pad_test(HashSz, CipherState, {3,1}),
+ pad_test(HashSz, CipherState, {3,2}),
+ pad_test(HashSz, CipherState, {3,3}).
+
+%%--------------------------------------------------------------------
+% Internal functions --------------------------------------------------------
%%--------------------------------------------------------------------
+decipher_check_good(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ {Content, Mac, _} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+decipher_check_fail(HashSz, CipherState, Version) ->
+ {Content, NextIV, Mac} = content_nextiv_mac(Version),
+ true = {Content, Mac, #cipher_state{iv = NextIV}} =/=
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, aes_fragment(Version), Version, true).
+
+pad_test(HashSz, CipherState, {3,0} = Version) ->
+ %% 3.0 does not have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, true),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,0}), {3,0}, false);
+pad_test(HashSz, CipherState, {3,1} = Version) ->
+ %% 3.1 should have padding test, but may be disabled
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {Content, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}) , {3,1}, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} =
+ ssl_cipher:decipher(?AES_CBC, HashSz, CipherState, badpad_aes_fragment({3,1}), {3,1}, true);
+pad_test(HashSz, CipherState, Version) ->
+ %% 3.2 and 3.3 must have padding test
+ {Content, NextIV, Mac} = badpad_content_nextiv_mac(Version),
+ BadCont = badpad_content(Content),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, false),
+ {BadCont, Mac, #cipher_state{iv = NextIV}} = ssl_cipher:decipher(?AES_CBC, HashSz, CipherState,
+ badpad_aes_fragment(Version), Version, true).
+
+aes_fragment({3,N}) when N == 0; N == 1->
+ <<197,9,6,109,242,87,80,154,85,250,110,81,119,95,65,185,53,206,216,153,246,169,
+ 119,177,178,238,248,174,253,220,242,81,33,0,177,251,91,44,247,53,183,198,165,
+ 63,20,194,159,107>>;
+
+aes_fragment(_) ->
+ <<220,193,179,139,171,33,143,245,202,47,123,251,13,232,114,8,
+ 190,162,74,31,186,227,119,155,94,74,119,79,169,193,240,160,
+ 198,181,81,19,98,162,213,228,74,224,253,168,156,59,195,122,
+ 108,101,107,242,20,15,169,150,163,107,101,94,93,104,241,165>>.
+
+badpad_aes_fragment({3,N}) when N == 0; N == 1 ->
+ <<186,139,125,10,118,21,26,248,120,108,193,104,87,118,145,79,225,55,228,10,105,
+ 30,190,37,1,88,139,243,210,99,65,41>>;
+badpad_aes_fragment(_) ->
+ <<137,31,14,77,228,80,76,103,183,125,55,250,68,190,123,131,117,23,229,180,207,
+ 94,121,137,117,157,109,99,113,61,190,138,131,229,201,120,142,179,172,48,77,
+ 234,19,240,33,38,91,93>>.
+
+content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<72,196,247,97,62,213,222,109,210,204,217,186,172,184, 197,148>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>};
+content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>,
+ <<71,136,212,107,223,200,70,232,127,116,148,205,232,35,158,113,237,174,15,217,192,168,35,8,6,107,107,233,25,174,90,111>>}.
+
+badpad_content_nextiv_mac({3,N}) when N == 0; N == 1 ->
+ {<<"HELLO\n">>,
+ <<225,55,228,10,105,30,190,37,1,88,139,243,210,99,65,41>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ };
+badpad_content_nextiv_mac(_) ->
+ {<<"HELLO\n">>,
+ <<133,211,45,189,179,229,56,86,11,178,239,159,14,160,253,140>>,
+ <<183,139,16,132,10,209,67,86,168,100,61,217,145,57,36,56>>
+ }.
+
+badpad_content(Content) ->
+ %% BadContent will fail mac test
+ <<16#F0, Content/binary>>.
+
+correct_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,148>>}.
+
+incorrect_cipher_state() ->
+ #cipher_state{iv = <<59,201,85,117,188,206,224,136,5,109,46,70,104,79,4,9>>,
+ key = <<72,196,247,97,62,213,222,109,210,204,217,186,172,184,197,254>>}.
+
diff --git a/lib/ssl/test/ssl_crl_SUITE.erl b/lib/ssl/test/ssl_crl_SUITE.erl
index bad0949ec4..c6bf8898ad 100644
--- a/lib/ssl/test/ssl_crl_SUITE.erl
+++ b/lib/ssl/test/ssl_crl_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -26,43 +26,40 @@
-include_lib("common_test/include/ct.hrl").
-include_lib("public_key/include/public_key.hrl").
--define(TIMEOUT, 120000).
-define(LONG_TIMEOUT, 600000).
--define(SLEEP, 1000).
--define(OPENSSL_RENEGOTIATE, "R\n").
--define(OPENSSL_QUIT, "Q\n").
--define(OPENSSL_GARBAGE, "P\n").
--define(EXPIRE, 10).
%%--------------------------------------------------------------------
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
-suite() -> [{ct_hooks,[ts_install_cth]}].
+suite() ->
+ [{ct_hooks,[ts_install_cth]}].
all() ->
[
- {group, basic},
- {group, v1_crl},
- {group, idp_crl}
+ {group, check_true},
+ {group, check_peer},
+ {group, check_best_effort}
].
groups() ->
- [{basic, [], basic_tests()},
- {v1_crl, [], v1_crl_tests()},
- {idp_crl, [], idp_crl_tests()}].
+ [
+ {check_true, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {check_peer, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {check_best_effort, [], [{group, v2_crl},
+ {group, v1_crl},
+ {group, idp_crl}]},
+ {v2_crl, [], basic_tests()},
+ {v1_crl, [], basic_tests()},
+ {idp_crl, [], basic_tests()}].
basic_tests() ->
[crl_verify_valid, crl_verify_revoked].
-v1_crl_tests() ->
- [crl_verify_valid, crl_verify_revoked].
-
-idp_crl_tests() ->
- [crl_verify_valid, crl_verify_revoked].
-
-%%%================================================================
-%%% Suite init/end
init_per_suite(Config0) ->
Dog = ct:timetrap(?LONG_TIMEOUT *2),
@@ -70,10 +67,7 @@ init_per_suite(Config0) ->
false ->
{skip, "Openssl not found"};
_ ->
- TLSVersion = ?config(tls_version, Config0),
OpenSSL_version = (catch os:cmd("openssl version")),
- ct:log("TLS version: ~p~nOpenSSL version: ~p~n~n~p:module_info(): ~p~n~nssl:module_info(): ~p~n",
- [TLSVersion, OpenSSL_version, ?MODULE, ?MODULE:module_info(), ssl:module_info()]),
case ssl_test_lib:enough_openssl_crl_support(OpenSSL_version) of
false ->
{skip, io_lib:format("Bad openssl version: ~p",[OpenSSL_version])};
@@ -81,7 +75,6 @@ init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
- ssl:start(),
{ok, Hostname0} = inet:gethostname(),
IPfamily =
case lists:member(list_to_atom(Hostname0), ct:get_config(ipv6_hosts,[])) of
@@ -89,8 +82,7 @@ init_per_suite(Config0) ->
false -> inet
end,
[{ipfamily,IPfamily}, {watchdog, Dog}, {openssl_version,OpenSSL_version} | Config0]
- catch _C:_E ->
- ct:log("crypto:start() caught ~p:~p",[_C,_E]),
+ catch _:_ ->
{skip, "Crypto did not start"}
end
end
@@ -100,443 +92,175 @@ end_per_suite(_Config) ->
ssl:stop(),
application:stop(crypto).
-%%%================================================================
-%%% Group init/end
-
-init_per_group(Group, Config) ->
- ssl:start(),
- inets:start(),
- CertDir = filename:join(?config(priv_dir, Config), Group),
- DataDir = ?config(data_dir, Config),
- ServerRoot = make_dir_path([?config(priv_dir,Config), Group, tmp]),
- %% start a HTTP server to serve the CRLs
- {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily,Config)},
- {server_name, "localhost"}, {port, 0},
- {server_root, ServerRoot},
- {document_root, CertDir},
- {modules, [mod_get]}
- ]),
- [{port,Port}] = httpd:info(Httpd, [port]),
- ct:log("~p:~p~nHTTPD IP family=~p, port=~p~n", [?MODULE, ?LINE, ?config(ipfamily,Config), Port]),
- CertOpts = [{crl_port,Port}|cert_opts(Group)],
- Result = make_certs:all(DataDir, CertDir, CertOpts),
- ct:log("~p:~p~nmake_certs:all(~n DataDir=~p,~n CertDir=~p,~n ServerRoot=~p~n Opts=~p~n) returned ~p~n", [?MODULE,?LINE,DataDir, CertDir, ServerRoot, CertOpts, Result]),
- [{make_cert_result, Result}, {cert_dir, CertDir}, {httpd, Httpd} | Config].
-
-cert_opts(v1_crl) -> [{v2_crls, false}];
-cert_opts(idp_crl) -> [{issuing_distribution_point, true}];
-cert_opts(_) -> [].
-
-make_dir_path(PathComponents) ->
- lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end,
- "",
- PathComponents).
-
+init_per_group(check_true, Config) ->
+ [{crl_check, true} | Config];
+init_per_group(check_peer, Config) ->
+ [{crl_check, peer} | Config];
+init_per_group(check_best_effort, Config) ->
+ [{crl_check, best_effort} | Config];
+init_per_group(Group, Config0) ->
+ case is_idp(Group) of
+ true ->
+ [{idp_crl, true} | Config0];
+ false ->
+ DataDir = ?config(data_dir, Config0),
+ CertDir = filename:join(?config(priv_dir, Config0), Group),
+ {CertOpts, Config} = init_certs(CertDir, Group, Config0),
+ Result = make_certs:all(DataDir, CertDir, CertOpts),
+ [{make_cert_result, Result}, {cert_dir, CertDir}, {idp_crl, false} | Config]
+ end.
end_per_group(_GroupName, Config) ->
- case ?config(httpd, Config) of
- undefined -> ok;
- Pid ->
- ct:log("Stop httpd ~p",[Pid]),
- ok = inets:stop(httpd, Pid)
- ,ct:log("Stopped",[])
- end,
- inets:stop(),
+
Config.
+init_per_testcase(Case, Config0) ->
+ case ?config(idp_crl, Config0) of
+ true ->
+ end_per_testcase(Case, Config0),
+ inets:start(),
+ ssl:start(),
+ ServerRoot = make_dir_path([?config(priv_dir, Config0), idp_crl, tmp]),
+ %% start a HTTP server to serve the CRLs
+ {ok, Httpd} = inets:start(httpd, [{ipfamily, ?config(ipfamily, Config0)},
+ {server_name, "localhost"}, {port, 0},
+ {server_root, ServerRoot},
+ {document_root,
+ filename:join(?config(priv_dir, Config0), idp_crl)}
+ ]),
+ [{port,Port}] = httpd:info(Httpd, [port]),
+ Config = [{httpd_port, Port} | Config0],
+ DataDir = ?config(data_dir, Config),
+ CertDir = filename:join(?config(priv_dir, Config0), idp_crl),
+ {CertOpts, Config} = init_certs(CertDir, idp_crl, Config),
+ Result = make_certs:all(DataDir, CertDir, CertOpts),
+ [{make_cert_result, Result}, {cert_dir, CertDir} | Config];
+ false ->
+ end_per_testcase(Case, Config0),
+ ssl:start(),
+ Config0
+ end.
+
+end_per_testcase(_, Config) ->
+ case ?config(idp_crl, Config) of
+ true ->
+ ssl:stop(),
+ inets:stop();
+ false ->
+ ssl:stop()
+ end.
+
%%%================================================================
%%% Test cases
+%%%================================================================
crl_verify_valid() ->
[{doc,"Verify a simple valid CRL chain"}].
crl_verify_valid(Config) when is_list(Config) ->
- process_flag(trap_exit, true),
PrivDir = ?config(cert_dir, Config),
- ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])},
- {certfile, filename:join([PrivDir, "server", "cert.pem"])},
- {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}],
-
+ Check = ?config(crl_check, Config),
+ ServerOpts = [{keyfile, filename:join([PrivDir, "server", "key.pem"])},
+ {certfile, filename:join([PrivDir, "server", "cert.pem"])},
+ {cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])}],
+ ClientOpts = case ?config(idp_crl, Config) of
+ true ->
+ [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])},
+ {crl_check, Check},
+ {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}},
+ {verify, verify_peer}];
+ false ->
+ [{cacertfile, filename:join([PrivDir, "server", "cacerts.pem"])},
+ {crl_check, Check},
+ {verify, verify_peer}]
+ end,
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
- Data = "From openssl to erlang",
- Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- {mfa, {?MODULE, erlang_ssl_receive, [Data]}},
- %{mfa, {ssl_test_lib, no_result, []}},
- {options, ServerOpts}]),
- ct:log("~p:~p~nreturn from ssl_test_lib:start_server:~n~p",[?MODULE,?LINE,Server]),
- Port = ssl_test_lib:inet_port(Server),
-
- CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])),
-
- ClientOpts = [{cacerts, CACerts},
- {verify, verify_peer},
- {verify_fun, {fun validate_function/3, {CACerts, []}}}],
-
-
- ct:log("~p:~p~ncalling ssl_test_lib:start_client",[?MODULE,?LINE]),
- Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
- {host, Hostname},
- {from, self()},
- {mfa, {?MODULE,
- erlang_ssl_send, [Data]}},
- %{mfa, {ssl_test_lib, no_result, []}},
- {options, ClientOpts}]),
- ct:log("~p:~p~nreturn from ssl_test_lib:start_client:~n~p",[?MODULE,?LINE,Client]),
-
- ssl_test_lib:check_result(Client, ok, Server, ok),
-
- %% Clean close down! Server needs to be closed first !!
- ssl_test_lib:close(Server),
- ssl_test_lib:close(Client),
- process_flag(trap_exit, false).
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}),
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}),
+
+ crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts).
crl_verify_revoked() ->
- [{doc,"Verify a simple valid CRL chain"}].
-crl_verify_revoked(Config) when is_list(Config) ->
- process_flag(trap_exit, true),
+ [{doc,"Verify a simple CRL chain when peer cert is reveoked"}].
+crl_verify_revoked(Config) when is_list(Config) ->
PrivDir = ?config(cert_dir, Config),
+ Check = ?config(crl_check, Config),
ServerOpts = [{keyfile, filename:join([PrivDir, "revoked", "key.pem"])},
- {certfile, filename:join([PrivDir, "revoked", "cert.pem"])},
- {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}],
- ct:log("~p:~p~nserver opts ~p~n", [?MODULE,?LINE, ServerOpts]),
+ {certfile, filename:join([PrivDir, "revoked", "cert.pem"])},
+ {cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])}],
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
- Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
- {from, self()},
- %{mfa, {?MODULE, erlang_ssl_receive, [Data]}},
- {mfa, {ssl_test_lib, no_result, []}},
- {options, ServerOpts}]),
+ Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {options, ServerOpts}]),
Port = ssl_test_lib:inet_port(Server),
+
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "erlangCA", "crl.pem"])}),
+ ssl_crl_cache:insert({file, filename:join([PrivDir, "otpCA", "crl.pem"])}),
+
+ ClientOpts = case ?config(idp_crl, Config) of
+ true ->
+ [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])},
+ {crl_cache, {ssl_crl_cache, {internal, [{http, 5000}]}}},
+ {crl_check, Check},
+ {verify, verify_peer}];
+ false ->
+ [{cacertfile, filename:join([PrivDir, "revoked", "cacerts.pem"])},
+ {crl_check, Check},
+ {verify, verify_peer}]
+ end,
+
+ Client = ssl_test_lib:start_client_error([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {options, ClientOpts}]),
+ receive
+ {Server, AlertOrColse} ->
+ ct:pal("Server Alert or Close ~p", [AlertOrColse])
+ end,
+ ssl_test_lib:check_result(Client, {error, {tls_alert, "certificate revoked"}}).
- CACerts = load_cert(filename:join([PrivDir, "erlangCA", "cacerts.pem"])),
- ClientOpts = [{cacerts, CACerts},
- {verify, verify_peer},
- {verify_fun, {fun validate_function/3, {CACerts, []}}}],
- {connect_failed, _} = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+crl_verify_valid(Hostname, ServerNode, ServerOpts, ClientNode, ClientOpts) ->
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib,
+ send_recv_result_active, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
{from, self()},
- %{mfa, {?MODULE,
- %erlang_ssl_receive, [Data]}},
- {mfa, {ssl_test_lib, no_result, []}},
+ {mfa, {ssl_test_lib,
+ send_recv_result_active, []}},
{options, ClientOpts}]),
+
+ ssl_test_lib:check_result(Client, ok, Server, ok),
- %% Clean close down! Server needs to be closed first !!
ssl_test_lib:close(Server),
- process_flag(trap_exit, false).
-
-%%%================================================================
-%%% Lib
-
-erlang_ssl_receive(Socket, Data) ->
- ct:log("~p:~p~nConnection info: ~p~n",
- [?MODULE,?LINE, ssl:connection_info(Socket)]),
- receive
- {ssl, Socket, Data} ->
- ct:log("~p:~p~nReceived ~p~n",[?MODULE,?LINE, Data]),
- %% open_ssl server sometimes hangs waiting in blocking read
- ssl:send(Socket, "Got it"),
- ok;
- {ssl, Socket, Byte} when length(Byte) == 1 ->
- erlang_ssl_receive(Socket, tl(Data));
- {Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
- erlang_ssl_receive(Socket,Data);
- Other ->
- ct:fail({unexpected_message, Other})
- after 4000 ->
- ct:fail({did_not_get, Data})
- end.
-
-
-erlang_ssl_send(Socket, Data) ->
- ct:log("~p:~p~nConnection info: ~p~n",
- [?MODULE,?LINE, ssl:connection_info(Socket)]),
- ssl:send(Socket, Data),
- ok.
-
-load_certs(undefined) ->
- undefined;
-load_certs(CertDir) ->
- case file:list_dir(CertDir) of
- {ok, Certs} ->
- load_certs(lists:map(fun(Cert) -> filename:join(CertDir, Cert)
- end, Certs), []);
- {error, _} ->
- undefined
- end.
-
-load_certs([], Acc) ->
- ct:log("~p:~p~nSuccessfully loaded ~p CA certificates~n", [?MODULE,?LINE, length(Acc)]),
- Acc;
-load_certs([Cert|Certs], Acc) ->
- case filelib:is_dir(Cert) of
- true ->
- load_certs(Certs, Acc);
- _ ->
- %ct:log("~p:~p~nLoading certificate ~p~n", [?MODULE,?LINE, Cert]),
- load_certs(Certs, load_cert(Cert) ++ Acc)
- end.
-
-load_cert(Cert) ->
- {ok, Bin} = file:read_file(Cert),
- case filename:extension(Cert) of
- ".der" ->
- %% no decoding necessary
- [Bin];
- _ ->
- %% assume PEM otherwise
- Contents = public_key:pem_decode(Bin),
- [DER || {Type, DER, Cipher} <- Contents, Type == 'Certificate', Cipher == 'not_encrypted']
- end.
-
-%% @doc Validator function for SSL negotiation.
-%%
-validate_function(Cert, valid_peer, State) ->
- ct:log("~p:~p~nvaliding peer ~p with ~p intermediate certs~n",
- [?MODULE,?LINE, get_common_name(Cert),
- length(element(2, State))]),
- %% peer certificate validated, now check the CRL
- Res = (catch check_crl(Cert, State)),
- ct:log("~p:~p~nCRL validate result for ~p: ~p~n",
- [?MODULE,?LINE, get_common_name(Cert), Res]),
- {Res, State};
-validate_function(Cert, valid, {TrustedCAs, IntermediateCerts}=State) ->
- case public_key:pkix_is_self_signed(Cert) of
- true ->
- ct:log("~p:~p~nroot certificate~n",[?MODULE,?LINE]),
- %% this is a root cert, no CRL
- {valid, {TrustedCAs, [Cert|IntermediateCerts]}};
- false ->
- %% check is valid CA certificate, add to the list of
- %% intermediates
- Res = (catch check_crl(Cert, State)),
- ct:log("~p:~p~nCRL intermediate CA validate result for ~p: ~p~n",
- [?MODULE,?LINE, get_common_name(Cert), Res]),
- {Res, {TrustedCAs, [Cert|IntermediateCerts]}}
- end;
-validate_function(_Cert, _Event, State) ->
- %ct:log("~p:~p~nignoring event ~p~n", [?MODULE,?LINE, _Event]),
- {valid, State}.
+ ssl_test_lib:close(Client).
-%% @doc Given a certificate, find CRL distribution points for the given
-%% certificate, fetch, and attempt to validate each CRL through
-%% issuer_function/4.
-%%
-check_crl(Cert, State) ->
- %% pull the CRL distribution point(s) out of the certificate, if any
- ct:log("~p:~p~ncheck_crl(~n Cert=~p,~nState=~p~n)",[?MODULE,?LINE,Cert,State]),
- case pubkey_cert:select_extension(
- ?'id-ce-cRLDistributionPoints',
- pubkey_cert:extensions_list(Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.extensions)) of
- undefined ->
- ct:log("~p:~p~nno CRL distribution points for ~p~n",
- [?MODULE,?LINE, get_common_name(Cert)]),
- %% fail; we can't validate if there's no CRL
- no_crl;
- CRLExtension ->
- ct:log("~p:~p~nCRLExtension=~p)",[?MODULE,?LINE,CRLExtension]),
- CRLDistPoints = CRLExtension#'Extension'.extnValue,
- DPointsAndCRLs = lists:foldl(fun(Point, Acc) ->
- %% try to read the CRL over http or from a
- %% local file
- case fetch_point(Point) of
- not_available ->
- ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,not_available]),
- Acc;
- Res ->
- ct:log("~p:~p~nfetch_point returned~n~p~n)",[?MODULE,?LINE,Res]),
- [{Point, Res} | Acc]
- end
- end, [], CRLDistPoints),
- public_key:pkix_crls_validate(Cert,
- DPointsAndCRLs,
- [{issuer_fun,
- {fun issuer_function/4, State}}])
- end.
-
-%% @doc Given a list of distribution points for CRLs, certificates and
-%% both trusted and intermediary certificates, attempt to build and
-%% authority chain back via build_chain to verify that it is valid.
-%%
-issuer_function(_DP, CRL, _Issuer, {TrustedCAs, IntermediateCerts}) ->
- %% XXX the 'Issuer' we get passed here is the AuthorityKeyIdentifier,
- %% which we are not currently smart enough to understand
- %% Read the CA certs out of the file
- ct:log("~p:~p~nissuer_function(~nCRL=~p,~nLast param=~p)",[?MODULE,?LINE,CRL, {TrustedCAs, IntermediateCerts}]),
- Certs = [public_key:pkix_decode_cert(DER, otp) || DER <- TrustedCAs],
- %% get the real issuer out of the CRL
- Issuer = public_key:pkix_normalize_name(
- pubkey_cert_records:transform(
- CRL#'CertificateList'.tbsCertList#'TBSCertList'.issuer, decode)),
- %% assume certificates are ordered from root to tip
- case find_issuer(Issuer, IntermediateCerts ++ Certs) of
- undefined ->
- ct:log("~p:~p~nunable to find certificate matching CRL issuer ~p~n",
- [?MODULE,?LINE, Issuer]),
- error;
- IssuerCert ->
- ct:log("~p:~p~nIssuerCert=~p~n)",[?MODULE,?LINE,IssuerCert]),
- case build_chain({public_key:pkix_encode('OTPCertificate',
- IssuerCert,
- otp),
- IssuerCert}, IntermediateCerts, Certs, []) of
- undefined ->
- error;
- {OTPCert, Path} ->
- {ok, OTPCert, Path}
- end
- end.
-
-%% @doc Attempt to build authority chain back using intermediary
-%% certificates, falling back on trusted certificates if the
-%% intermediary chain of certificates does not fully extend to the
-%% root.
-%%
-%% Returns: {RootCA :: #OTPCertificate{}, Chain :: [der_encoded()]}
-%%
-build_chain({DER, Cert}, IntCerts, TrustedCerts, Acc) ->
- %% check if this cert is self-signed, if it is, we've reached the
- %% root of the chain
- Issuer = public_key:pkix_normalize_name(
- Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer),
- Subject = public_key:pkix_normalize_name(
- Cert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- case Issuer == Subject of
- true ->
- case find_issuer(Issuer, TrustedCerts) of
- undefined ->
- ct:log("~p:~p~nself-signed certificate is NOT trusted~n",[?MODULE,?LINE]),
- undefined;
- TrustedCert ->
- %% return the cert from the trusted list, to prevent
- %% issuer spoofing
- {TrustedCert,
- [public_key:pkix_encode(
- 'OTPCertificate', TrustedCert, otp)|Acc]}
- end;
- false ->
- Match = lists:foldl(
- fun(C, undefined) ->
- S = public_key:pkix_normalize_name(C#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- %% compare the subject to the current issuer
- case Issuer == S of
- true ->
- %% we've found our man
- {public_key:pkix_encode('OTPCertificate', C, otp), C};
- false ->
- undefined
- end;
- (_E, A) ->
- %% already matched
- A
- end, undefined, IntCerts),
- case Match of
- undefined when IntCerts /= TrustedCerts ->
- %% continue the chain by using the trusted CAs
- ct:log("~p:~p~nRan out of intermediate certs, switching to trusted certs~n",[?MODULE,?LINE]),
- build_chain({DER, Cert}, TrustedCerts, TrustedCerts, Acc);
- undefined ->
- ct:log("Can't construct chain of trust beyond ~p~n",
- [?MODULE,?LINE, get_common_name(Cert)]),
- %% can't find the current cert's issuer
- undefined;
- Match ->
- build_chain(Match, IntCerts, TrustedCerts, [DER|Acc])
- end
- end.
-
-%% @doc Given a certificate and a list of trusted or intermediary
-%% certificates, attempt to find a match in the list or bail with
-%% undefined.
-find_issuer(Issuer, Certs) ->
- lists:foldl(
- fun(OTPCert, undefined) ->
- %% check if this certificate matches the issuer
- Normal = public_key:pkix_normalize_name(
- OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject),
- case Normal == Issuer of
- true ->
- OTPCert;
- false ->
- undefined
- end;
- (_E, Acc) ->
- %% already found a match
- Acc
- end, undefined, Certs).
-
-%% @doc Find distribution points for a given CRL and then attempt to
-%% fetch the CRL from the first available.
-fetch_point(#'DistributionPoint'{distributionPoint={fullName, Names}}) ->
- Decoded = [{NameType,
- pubkey_cert_records:transform(Name, decode)}
- || {NameType, Name} <- Names],
- ct:log("~p:~p~ncall fetch(~nDecoded=~p~n)",[?MODULE,?LINE,Decoded]),
- fetch(Decoded).
-
-%% @doc Given a list of locations to retrieve a CRL from, attempt to
-%% retrieve either from a file or http resource and bail as soon as
-%% it can be found.
-%%
-%% Currently, only hand a armored PEM or DER encoded file, with
-%% defaulting to DER.
-%%
-fetch([]) ->
- not_available;
-fetch([{uniformResourceIdentifier, "http"++_=URL}|Rest]) ->
- ct:log("~p:~p~ngetting CRL from ~p~n", [?MODULE,?LINE, URL]),
- case httpc:request(get, {URL, []}, [], [{body_format, binary}]) of
- {ok, {_Status, _Headers, Body}} ->
- case Body of
- <<"-----BEGIN", _/binary>> ->
- ct:log("~p:~p~npublic_key:pem_decode,~nBody=~p~n)",[?MODULE,?LINE,Body]),
- [{'CertificateList',
- DER, _}=CertList] = public_key:pem_decode(Body),
- ct:log("~p:~p~npublic_key:pem_entry_decode,~nCertList=~p~n)",[?MODULE,?LINE,CertList]),
- {DER, public_key:pem_entry_decode(CertList)};
- _ ->
- ct:log("~p:~p~npublic_key:pem_entry_decode,~nBody=~p~n)",[?MODULE,?LINE,{'CertificateList', Body, not_encrypted}]),
- %% assume DER encoded
- try
- public_key:pem_entry_decode({'CertificateList', Body, not_encrypted})
- of
- CertList -> {Body, CertList}
- catch
- _C:_E ->
- ct:log("~p:~p~nfailed DER assumption~nRest=~p", [?MODULE,?LINE,Rest]),
- fetch(Rest)
- end
- end;
- {error, _Reason} ->
- ct:log("~p:~p~nfailed to get CRL ~p~n", [?MODULE,?LINE, _Reason]),
- fetch(Rest);
- Other ->
- ct:log("~p:~p~nreally failed to get CRL ~p~n", [?MODULE,?LINE, Other]),
- fetch(Rest)
- end;
-fetch([Loc|Rest]) ->
- %% unsupported CRL location
- ct:log("~p:~p~nunable to fetch CRL from unsupported location ~p~n",
- [?MODULE,?LINE, Loc]),
- fetch(Rest).
+%%--------------------------------------------------------------------
+%% Internal functions ------------------------------------------------
+%%--------------------------------------------------------------------
+is_idp(idp_crl) ->
+ true;
+is_idp(_) ->
+ false.
+
+init_certs(_,v1_crl, Config) ->
+ {[{v2_crls, false}], Config};
+init_certs(_, idp_crl, Config) ->
+ Port = ?config(httpd_port, Config),
+ {[{crl_port,Port},
+ {issuing_distribution_point, true}], Config
+ };
+init_certs(_,_,Config) ->
+ {[], Config}.
-%% get the common name attribute out of an OTPCertificate record
-get_common_name(OTPCert) ->
- %% You'd think there'd be an easier way than this giant mess, but I
- %% couldn't find one.
- {rdnSequence, Subject} = OTPCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.subject,
- case [Attribute#'AttributeTypeAndValue'.value || [Attribute] <- Subject,
- Attribute#'AttributeTypeAndValue'.type == ?'id-at-commonName'] of
- [Att] ->
- case Att of
- {teletexString, Str} -> Str;
- {printableString, Str} -> Str;
- {utf8String, Bin} -> binary_to_list(Bin)
- end;
- _ ->
- unknown
- end.
+make_dir_path(PathComponents) ->
+ lists:foldl(fun(F,P0) -> file:make_dir(P=filename:join(P0,F)), P end,
+ "",
+ PathComponents).
diff --git a/lib/ssl/test/ssl_handshake_SUITE.erl b/lib/ssl/test/ssl_handshake_SUITE.erl
index 8dca733526..d4433393a1 100644
--- a/lib/ssl/test/ssl_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_handshake_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -40,7 +40,47 @@ all() -> [decode_hello_handshake,
encode_single_hello_sni_extension_correctly,
decode_single_hello_sni_extension_correctly,
decode_empty_server_sni_correctly,
- select_proper_tls_1_2_rsa_default_hashsign].
+ select_proper_tls_1_2_rsa_default_hashsign,
+ ignore_hassign_extension_pre_tls_1_2].
+
+%%--------------------------------------------------------------------
+init_per_suite(Config) ->
+ Config.
+end_per_suite(Config) ->
+ Config.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_,Config) ->
+ Config.
+
+init_per_testcase(ignore_hassign_extension_pre_tls_1_2, Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ case is_supported(sha512) of
+ true ->
+ ssl:start(),
+ %% make rsa certs using oppenssl
+ Result =
+ (catch make_certs:all(?config(data_dir, Config0),
+ ?config(priv_dir, Config0))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config0);
+ false ->
+ {skip, "Crypto did not support sha512"}
+ end
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end;
+init_per_testcase(_, Config0) ->
+ Config0.
+
+end_per_testcase(ignore_hassign_extension_pre_tls_1_2, _) ->
+ crypto:stop();
+end_per_testcase(_TestCase, Config) ->
+ Config.
%%--------------------------------------------------------------------
%% Test Cases --------------------------------------------------------
@@ -121,3 +161,18 @@ select_proper_tls_1_2_rsa_default_hashsign(_Config) ->
{md5sha, rsa} = ssl_handshake:select_hashsign_algs(undefined, ?rsaEncryption, {3,2}),
{md5sha, rsa} = ssl_handshake:select_hashsign_algs(undefined, ?rsaEncryption, {3,0}).
+
+ignore_hassign_extension_pre_tls_1_2(Config) ->
+ Opts = ?config(server_opts, Config),
+ CertFile = proplists:get_value(certfile, Opts),
+ [{_, Cert, _}] = ssl_test_lib:pem_to_der(CertFile),
+ HashSigns = #hash_sign_algos{hash_sign_algos = [{sha512, rsa}, {sha, dsa}]},
+ {sha512, rsa} = ssl_handshake:select_hashsign(HashSigns, Cert, {3,3}),
+ %%% Ignore
+ {md5sha, rsa} = ssl_handshake:select_hashsign(HashSigns, Cert, {3,2}),
+ {md5sha, rsa} = ssl_handshake:select_hashsign(HashSigns, Cert, {3,0}).
+
+is_supported(Hash) ->
+ Algos = crypto:supports(),
+ Hashs = proplists:get_value(hashs, Algos),
+ lists:member(Hash, Hashs).
diff --git a/lib/ssl/test/ssl_npn_handshake_SUITE.erl b/lib/ssl/test/ssl_npn_handshake_SUITE.erl
index 30c0a67a36..8e95679306 100644
--- a/lib/ssl/test/ssl_npn_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_npn_handshake_SUITE.erl
@@ -172,7 +172,7 @@ no_client_negotiate_but_server_supports_npn(Config) when is_list(Config) ->
run_npn_handshake(Config,
[],
[{next_protocols_advertised, [<<"spdy/1">>, <<"http/1.1">>, <<"http/1.0">>]}],
- {error, next_protocol_not_negotiated}).
+ {error, protocol_not_negotiated}).
%--------------------------------------------------------------------------------
@@ -180,7 +180,7 @@ client_negotiate_server_does_not_support(Config) when is_list(Config) ->
run_npn_handshake(Config,
[{client_preferred_next_protocols, {client, [<<"spdy/2">>], <<"http/1.1">>}}],
[],
- {error, next_protocol_not_negotiated}).
+ {error, protocol_not_negotiated}).
%--------------------------------------------------------------------------------
renegotiate_from_client_after_npn_handshake(Config) when is_list(Config) ->
@@ -311,8 +311,8 @@ run_npn_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedProtocol) ->
assert_npn(Socket, Protocol) ->
ct:log("Negotiated Protocol ~p, Expecting: ~p ~n",
- [ssl:negotiated_next_protocol(Socket), Protocol]),
- Protocol = ssl:negotiated_next_protocol(Socket).
+ [ssl:negotiated_protocol(Socket), Protocol]),
+ Protocol = ssl:negotiated_protocol(Socket).
assert_npn_and_renegotiate_and_send_data(Socket, Protocol, Data) ->
assert_npn(Socket, Protocol),
@@ -332,7 +332,7 @@ ssl_receive_and_assert_npn(Socket, Protocol, Data) ->
ssl_send(Socket, Data) ->
ct:log("Connection info: ~p~n",
- [ssl:connection_info(Socket)]),
+ [ssl:connection_information(Socket)]),
ssl:send(Socket, Data).
ssl_receive(Socket, Data) ->
@@ -340,7 +340,7 @@ ssl_receive(Socket, Data) ->
ssl_receive(Socket, Data, Buffer) ->
ct:log("Connection info: ~p~n",
- [ssl:connection_info(Socket)]),
+ [ssl:connection_information(Socket)]),
receive
{ssl, Socket, MoreData} ->
ct:log("Received ~p~n",[MoreData]),
@@ -360,4 +360,4 @@ ssl_receive(Socket, Data, Buffer) ->
connection_info_result(Socket) ->
- ssl:connection_info(Socket).
+ ssl:connection_information(Socket).
diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl
new file mode 100644
index 0000000000..23584dfcdf
--- /dev/null
+++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl
@@ -0,0 +1,127 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_pem_cache_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-define(CLEANUP_INTERVAL, 5000).
+
+%%--------------------------------------------------------------------
+%% Common Test interface functions -----------------------------------
+%%--------------------------------------------------------------------
+all() ->
+ [pem_cleanup].
+
+groups() ->
+ [].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ ssl:start(),
+ %% make rsa certs using oppenssl
+ Result =
+ (catch make_certs:all(?config(data_dir, Config0),
+ ?config(priv_dir, Config0))),
+ ct:log("Make certs ~p~n", [Result]),
+
+ Config1 = ssl_test_lib:make_dsa_cert(Config0),
+ ssl_test_lib:cert_options(Config1)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(_Config) ->
+ application:stop(crypto).
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+init_per_testcase(pem_cleanup, Config) ->
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, ssl_pem_cache_clean, ?CLEANUP_INTERVAL),
+ ssl:start(),
+ Config.
+
+end_per_testcase(_TestCase, Config) ->
+ %%ssl:stop(),
+ Config.
+
+%%--------------------------------------------------------------------
+%% Test Cases --------------------------------------------------------
+%%--------------------------------------------------------------------
+pem_cleanup() ->
+ [{doc, "Test pem cache invalidate mechanism"}].
+pem_cleanup(Config)when is_list(Config) ->
+ process_flag(trap_exit, true),
+ ClientOpts = ?config(client_opts, Config),
+ ServerOpts = ?config(server_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+
+ Server =
+ ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client =
+ ssl_test_lib:start_client([{node, ClientNode},
+ {port, Port}, {host, Hostname},
+ {mfa, {ssl_test_lib, no_result, []}},
+ {from, self()}, {options, ClientOpts}]),
+
+ Size = ssl_pkix_db:db_size(get_pem_cache()),
+ Certfile = proplists:get_value(certfile, ServerOpts),
+ {ok, FileInfo} = file:read_file_info(Certfile),
+ Time = later(),
+ ok = file:write_file_info(Certfile, FileInfo#file_info{mtime = Time}),
+ ct:sleep(2 * ?CLEANUP_INTERVAL),
+ Size1 = ssl_pkix_db:db_size(get_pem_cache()),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ false = Size == Size1.
+
+get_pem_cache() ->
+ {status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
+ [_, _,_, _, Prop] = StatusInfo,
+ State = ssl_test_lib:state(Prop),
+ case element(6, State) of
+ [_CertDb, _FileRefDb, PemCache| _] ->
+ PemCache;
+ _ ->
+ undefined
+ end.
+
+later()->
+ DateTime = calendar:now_to_local_time(os:timestamp()),
+ Gregorian = calendar:datetime_to_gregorian_seconds(DateTime),
+ calendar:gregorian_seconds_to_datetime(Gregorian + (2 * ?CLEANUP_INTERVAL)).
+
diff --git a/lib/ssl/test/ssl_session_cache_SUITE.erl b/lib/ssl/test/ssl_session_cache_SUITE.erl
index 06a41f1260..36d086338e 100644
--- a/lib/ssl/test/ssl_session_cache_SUITE.erl
+++ b/lib/ssl/test/ssl_session_cache_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
diff --git a/lib/ssl/test/ssl_sni_SUITE.erl b/lib/ssl/test/ssl_sni_SUITE.erl
new file mode 100644
index 0000000000..b059ff991b
--- /dev/null
+++ b/lib/ssl/test/ssl_sni_SUITE.erl
@@ -0,0 +1,179 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+
+-module(ssl_sni_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+-include_lib("public_key/include/public_key.hrl").
+
+%%--------------------------------------------------------------------
+%% Common Test interface functions -----------------------------------
+%%--------------------------------------------------------------------
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() -> [no_sni_header,
+ sni_match,
+ sni_no_match,
+ no_sni_header_fun,
+ sni_match_fun,
+ sni_no_match_fun].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try crypto:start() of
+ ok ->
+ ssl:start(),
+ Result =
+ (catch make_certs:all(?config(data_dir, Config0),
+ ?config(priv_dir, Config0))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config0)
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(_) ->
+ ssl:stop(),
+ application:stop(crypto).
+
+%%--------------------------------------------------------------------
+%% Test Cases --------------------------------------------------------
+%%--------------------------------------------------------------------
+no_sni_header(Config) ->
+ run_handshake(Config, undefined, undefined, "server").
+
+no_sni_header_fun(Config) ->
+ run_sni_fun_handshake(Config, undefined, undefined, "server").
+
+sni_match(Config) ->
+ run_handshake(Config, "a.server", "a.server", "a.server").
+
+sni_match_fun(Config) ->
+ run_sni_fun_handshake(Config, "a.server", "a.server", "a.server").
+
+sni_no_match(Config) ->
+ run_handshake(Config, "c.server", undefined, "server").
+
+sni_no_match_fun(Config) ->
+ run_sni_fun_handshake(Config, "c.server", undefined, "server").
+
+
+%%--------------------------------------------------------------------
+%% Internal Functions ------------------------------------------------
+%%--------------------------------------------------------------------
+ssl_recv(SSLSocket, Expect) ->
+ ssl_recv(SSLSocket, "", Expect).
+
+ssl_recv(SSLSocket, CurrentData, ExpectedData) ->
+ receive
+ {ssl, SSLSocket, Data} ->
+ NeweData = CurrentData ++ Data,
+ case NeweData of
+ ExpectedData ->
+ ok;
+ _ ->
+ ssl_recv(SSLSocket, NeweData, ExpectedData)
+ end;
+ Other ->
+ ct:fail({unexpected_message, Other})
+ after 4000 ->
+ ct:fail({timeout, CurrentData, ExpectedData})
+ end.
+
+send_and_hostname(SSLSocket) ->
+ ssl:send(SSLSocket, "OK"),
+ {ok, [{sni_hostname, Hostname}]} = ssl:connection_information(SSLSocket, [sni_hostname]),
+ Hostname.
+
+rdnPart([[#'AttributeTypeAndValue'{type=Type, value=Value} | _] | _], Type) ->
+ Value;
+rdnPart([_ | Tail], Type) ->
+ rdnPart(Tail, Type);
+rdnPart([], _) ->
+ unknown.
+
+rdn_to_string({utf8String, Binary}) ->
+ erlang:binary_to_list(Binary);
+rdn_to_string({printableString, String}) ->
+ String.
+
+recv_and_certificate(SSLSocket) ->
+ ssl_recv(SSLSocket, "OK"),
+ {ok, PeerCert} = ssl:peercert(SSLSocket),
+ #'OTPCertificate'{tbsCertificate = #'OTPTBSCertificate'{subject = {rdnSequence, Subject}}}
+ = public_key:pkix_decode_cert(PeerCert, otp),
+ ct:log("Subject of certificate received from server: ~p", [Subject]),
+ rdn_to_string(rdnPart(Subject, ?'id-at-commonName')).
+
+run_sni_fun_handshake(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
+ ct:log("Start running handshake for sni_fun, Config: ~p, SNIHostname: ~p, "
+ "ExpectedSNIHostname: ~p, ExpectedCN: ~p",
+ [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
+ [{sni_hosts, ServerSNIConf}] = ?config(sni_server_opts, Config),
+ SNIFun = fun(Domain) -> proplists:get_value(Domain, ServerSNIConf, undefined) end,
+ ServerOptions = ?config(server_opts, Config) ++ [{sni_fun, SNIFun}],
+ ClientOptions =
+ case SNIHostname of
+ undefined ->
+ ?config(client_opts, Config);
+ _ ->
+ [{server_name_indication, SNIHostname}] ++ ?config(client_opts, Config)
+ end,
+ ct:log("Options: ~p", [[ServerOptions, ClientOptions]]),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
+ {options, ServerOptions}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname}, {from, self()},
+ {mfa, {?MODULE, recv_and_certificate, []}},
+ {options, ClientOptions}]),
+ ssl_test_lib:check_result(Server, ExpectedSNIHostname, Client, ExpectedCN),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client).
+
+run_handshake(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
+ ct:log("Start running handshake, Config: ~p, SNIHostname: ~p, "
+ "ExpectedSNIHostname: ~p, ExpectedCN: ~p",
+ [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
+ ServerOptions = ?config(sni_server_opts, Config) ++ ?config(server_opts, Config),
+ ClientOptions =
+ case SNIHostname of
+ undefined ->
+ ?config(client_opts, Config);
+ _ ->
+ [{server_name_indication, SNIHostname}] ++ ?config(client_opts, Config)
+ end,
+ ct:log("Options: ~p", [[ServerOptions, ClientOptions]]),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
+ {options, ServerOptions}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname}, {from, self()},
+ {mfa, {?MODULE, recv_and_certificate, []}},
+ {options, ClientOptions}]),
+ ssl_test_lib:check_result(Server, ExpectedSNIHostname, Client, ExpectedCN),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client).
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index d2e6e41482..a3bfdf8893 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -187,6 +187,7 @@ run_client(Opts) ->
Transport = proplists:get_value(transport, Opts, ssl),
Options = proplists:get_value(options, Opts),
ct:log("~p:~p~n~p:connect(~p, ~p)@~p~n", [?MODULE,?LINE, Transport, Host, Port, Node]),
+ ct:log("SSLOpts: ~p", [Options]),
case rpc:call(Node, Transport, connect, [Host, Port, Options]) of
{ok, Socket} ->
Pid ! {connected, Socket},
@@ -253,7 +254,6 @@ check_result(Server, ServerMsg, Client, ClientMsg) ->
{Port, {data,Debug}} when is_port(Port) ->
ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Server, ServerMsg, Client, ClientMsg);
-
Unexpected ->
Reason = {{expected, {Client, ClientMsg}},
{expected, {Server, ServerMsg}}, {got, Unexpected}},
@@ -267,6 +267,9 @@ check_result(Pid, Msg) ->
{Port, {data,Debug}} when is_port(Port) ->
ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Pid,Msg);
+ %% {Port, {exit_status, Status}} when is_port(Port) ->
+ %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]),
+ %% check_result(Pid, Msg);
Unexpected ->
Reason = {{expected, {Pid, Msg}},
{got, Unexpected}},
@@ -351,6 +354,11 @@ cert_options(Config) ->
BadKeyFile = filename:join([?config(priv_dir, Config),
"badkey.pem"]),
PskSharedSecret = <<1,2,3,4,5,6,7,8,9,10,11,12,13,14,15>>,
+
+ SNIServerACertFile = filename:join([?config(priv_dir, Config), "a.server", "cert.pem"]),
+ SNIServerAKeyFile = filename:join([?config(priv_dir, Config), "a.server", "key.pem"]),
+ SNIServerBCertFile = filename:join([?config(priv_dir, Config), "b.server", "cert.pem"]),
+ SNIServerBKeyFile = filename:join([?config(priv_dir, Config), "b.server", "key.pem"]),
[{client_opts, [{ssl_imp, new},{reuseaddr, true}]},
{client_verification_opts, [{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile},
@@ -411,7 +419,17 @@ cert_options(Config) ->
{server_bad_cert, [{ssl_imp, new},{cacertfile, ServerCaCertFile},
{certfile, BadCertFile}, {keyfile, ServerKeyFile}]},
{server_bad_key, [{ssl_imp, new},{cacertfile, ServerCaCertFile},
- {certfile, ServerCertFile}, {keyfile, BadKeyFile}]}
+ {certfile, ServerCertFile}, {keyfile, BadKeyFile}]},
+ {sni_server_opts, [{sni_hosts, [
+ {"a.server", [
+ {certfile, SNIServerACertFile},
+ {keyfile, SNIServerAKeyFile}
+ ]},
+ {"b.server", [
+ {certfile, SNIServerBCertFile},
+ {keyfile, SNIServerBKeyFile}
+ ]}
+ ]}]}
| Config].
@@ -836,7 +854,7 @@ string_regex_filter(Str, Search) when is_list(Str) ->
_ ->
true
end;
-string_regex_filter(Str, _Search) ->
+string_regex_filter(_Str, _Search) ->
false.
anonymous_suites() ->
@@ -918,6 +936,10 @@ srp_dss_suites() ->
{srp_dss, aes_256_cbc, sha}],
ssl_cipher:filter_suites(Suites).
+rc4_suites(Version) ->
+ Suites = ssl_cipher:rc4_suites(Version),
+ ssl_cipher:filter_suites(Suites).
+
pem_to_der(File) ->
{ok, PemBin} = file:read_file(File),
public_key:pem_decode(PemBin).
@@ -927,7 +949,8 @@ der_to_pem(File, Entries) ->
file:write_file(File, PemBin).
cipher_result(Socket, Result) ->
- Result = ssl:connection_info(Socket),
+ {ok, Info} = ssl:connection_information(Socket),
+ Result = {ok, {proplists:get_value(protocol, Info), proplists:get_value(cipher_suite, Info)}},
ct:log("~p:~p~nSuccessfull connect: ~p~n", [?MODULE,?LINE, Result]),
%% Importante to send two packets here
%% to properly test "cipher state" handling
@@ -1083,6 +1106,8 @@ cipher_restriction(Config0) ->
check_sane_openssl_version(Version) ->
case {Version, os:cmd("openssl version")} of
+ {_, "OpenSSL 1.0.2" ++ _} ->
+ true;
{_, "OpenSSL 1.0.1" ++ _} ->
true;
{'tlsv1.2', "OpenSSL 1.0" ++ _} ->
@@ -1125,7 +1150,8 @@ filter_suites(Ciphers0) ->
Supported0 = ssl_cipher:suites(Version)
++ ssl_cipher:anonymous_suites(Version)
++ ssl_cipher:psk_suites(Version)
- ++ ssl_cipher:srp_suites(),
+ ++ ssl_cipher:srp_suites()
+ ++ ssl_cipher:rc4_suites(Version),
Supported1 = ssl_cipher:filter_suites(Supported0),
Supported2 = [ssl:suite_definition(S) || S <- Supported1],
[Cipher || Cipher <- Ciphers0, lists:member(Cipher, Supported2)].
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index 942c446ec4..aca34cb6e9 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -50,9 +50,9 @@ all() ->
groups() ->
[{basic, [], basic_tests()},
- {'tlsv1.2', [], all_versions_tests() ++ npn_tests()},
- {'tlsv1.1', [], all_versions_tests() ++ npn_tests()},
- {'tlsv1', [], all_versions_tests()++ npn_tests()},
+ {'tlsv1.2', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
+ {'tlsv1.1', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
+ {'tlsv1', [], all_versions_tests()++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
{'sslv3', [], all_versions_tests()}].
basic_tests() ->
@@ -79,6 +79,18 @@ all_versions_tests() ->
expired_session,
ssl2_erlang_server_openssl_client].
+alpn_tests() ->
+ [erlang_client_alpn_openssl_server_alpn,
+ erlang_server_alpn_openssl_client_alpn,
+ erlang_client_alpn_openssl_server,
+ erlang_client_openssl_server_alpn,
+ erlang_server_alpn_openssl_client,
+ erlang_server_openssl_client_alpn,
+ erlang_client_alpn_openssl_server_alpn_renegotiate,
+ erlang_server_alpn_openssl_client_alpn_renegotiate,
+ erlang_client_alpn_npn_openssl_server_alpn_npn,
+ erlang_server_alpn_npn_openssl_client_alpn_npn].
+
npn_tests() ->
[erlang_client_openssl_server_npn,
erlang_server_openssl_client_npn,
@@ -89,6 +101,14 @@ npn_tests() ->
erlang_client_openssl_server_npn_only_client,
erlang_client_openssl_server_npn_only_server].
+sni_server_tests() ->
+ [erlang_server_openssl_client_sni_match,
+ erlang_server_openssl_client_sni_match_fun,
+ erlang_server_openssl_client_sni_no_match,
+ erlang_server_openssl_client_sni_no_match_fun,
+ erlang_server_openssl_client_sni_no_header,
+ erlang_server_openssl_client_sni_no_header_fun].
+
init_per_suite(Config0) ->
Dog = ct:timetrap(?LONG_TIMEOUT *2),
@@ -161,6 +181,36 @@ special_init(ssl2_erlang_server_openssl_client, Config) ->
check_sane_openssl_sslv2(Config);
special_init(TestCase, Config)
+ when TestCase == erlang_client_alpn_openssl_server_alpn;
+ TestCase == erlang_server_alpn_openssl_client_alpn;
+ TestCase == erlang_client_alpn_openssl_server;
+ TestCase == erlang_client_openssl_server_alpn;
+ TestCase == erlang_server_alpn_openssl_client;
+ TestCase == erlang_server_openssl_client_alpn ->
+ check_openssl_alpn_support(Config);
+
+special_init(TestCase, Config)
+ when TestCase == erlang_client_alpn_openssl_server_alpn_renegotiate;
+ TestCase == erlang_server_alpn_openssl_client_alpn_renegotiate ->
+ {ok, Version} = application:get_env(ssl, protocol_version),
+ case check_sane_openssl_renegotaite(Config, Version) of
+ {skip, _} = Skip ->
+ Skip;
+ _ ->
+ check_openssl_alpn_support(Config)
+ end;
+
+special_init(TestCase, Config)
+ when TestCase == erlang_client_alpn_npn_openssl_server_alpn_npn;
+ TestCase == erlang_server_alpn_npn_openssl_client_alpn_npn ->
+ case check_openssl_alpn_support(Config) of
+ {skip, _} = Skip ->
+ Skip;
+ _ ->
+ check_openssl_npn_support(Config)
+ end;
+
+special_init(TestCase, Config)
when TestCase == erlang_client_openssl_server_npn;
TestCase == erlang_server_openssl_client_npn;
TestCase == erlang_server_openssl_client_npn_only_server;
@@ -179,6 +229,16 @@ special_init(TestCase, Config)
_ ->
check_openssl_npn_support(Config)
end;
+
+special_init(TestCase, Config)
+ when TestCase == erlang_server_openssl_client_sni_match;
+ TestCase == erlang_server_openssl_client_sni_no_match;
+ TestCase == erlang_server_openssl_client_sni_no_header;
+ TestCase == erlang_server_openssl_client_sni_match_fun;
+ TestCase == erlang_server_openssl_client_sni_no_match_fun;
+ TestCase == erlang_server_openssl_client_sni_no_header_fun ->
+ check_openssl_sni_support(Config);
+
special_init(_, Config) ->
Config.
@@ -924,6 +984,128 @@ ssl2_erlang_server_openssl_client(Config) when is_list(Config) ->
process_flag(trap_exit, false).
%%--------------------------------------------------------------------
+
+erlang_client_alpn_openssl_server_alpn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_server_alpn_openssl_client_alpn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_server_and_openssl_client_for_alpn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------------
+
+erlang_client_alpn_openssl_server(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_client_and_openssl_server_with_opts(Config,
+ [{alpn_advertised_protocols, [<<"spdy/2">>]}],
+ "",
+ Data, fun(Server, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+ ssl_test_lib:check_result(Server, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------------
+
+erlang_client_openssl_server_alpn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_client_and_openssl_server_with_opts(Config,
+ [],
+ "-alpn spdy/2",
+ Data, fun(Server, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+ ssl_test_lib:check_result(Server, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------------
+
+erlang_server_alpn_openssl_client(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_server_and_openssl_client_with_opts(Config,
+ [{alpn_advertised_protocols, [<<"spdy/2">>]}],
+ "",
+ Data, fun(Server, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+ ssl_test_lib:check_result(Server, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------------
+
+erlang_server_openssl_client_alpn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_server_and_openssl_client_with_opts(Config,
+ [],
+ "-alpn spdy/2",
+ Data, fun(Server, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+ ssl_test_lib:check_result(Server, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_client_alpn_openssl_server_alpn_renegotiate(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, ?OPENSSL_RENEGOTIATE),
+ ct:sleep(?SLEEP),
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_server_alpn_openssl_client_alpn_renegotiate(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_server_and_openssl_client_for_alpn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, ?OPENSSL_RENEGOTIATE),
+ ct:sleep(?SLEEP),
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_client_alpn_npn_openssl_server_alpn_npn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
+
+erlang_server_alpn_npn_openssl_client_alpn_npn(Config) when is_list(Config) ->
+ Data = "From openssl to erlang",
+ start_erlang_server_and_openssl_client_for_alpn_npn_negotiation(Config, Data, fun(Client, OpensslPort) ->
+ true = port_command(OpensslPort, Data),
+
+ ssl_test_lib:check_result(Client, ok)
+ end),
+ ok.
+
+%%--------------------------------------------------------------------
erlang_client_openssl_server_npn() ->
[{doc,"Test erlang client with openssl server doing npn negotiation"}].
@@ -1016,6 +1198,25 @@ erlang_server_openssl_client_npn_only_client(Config) when is_list(Config) ->
ssl_test_lib:check_result(Server, ok)
end),
ok.
+%--------------------------------------------------------------------------
+erlang_server_openssl_client_sni_no_header(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test(Config, undefined, undefined, "server").
+
+erlang_server_openssl_client_sni_no_header_fun(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test_sni_fun(Config, undefined, undefined, "server").
+
+erlang_server_openssl_client_sni_match(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test(Config, "a.server", "a.server", "a.server").
+
+erlang_server_openssl_client_sni_match_fun(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test_sni_fun(Config, "a.server", "a.server", "a.server").
+
+erlang_server_openssl_client_sni_no_match(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test(Config, "c.server", undefined, "server").
+
+erlang_server_openssl_client_sni_no_match_fun(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test_sni_fun(Config, "c.server", undefined, "server").
+
%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
@@ -1042,6 +1243,94 @@ run_suites(Ciphers, Version, Config, Type) ->
ct:fail(cipher_suite_failed_see_test_case_log)
end.
+client_read_check([], _Data) ->
+ ok;
+client_read_check([Hd | T], Data) ->
+ case binary:match(Data, list_to_binary(Hd)) of
+ nomatch ->
+ nomatch;
+ _ ->
+ client_read_check(T, Data)
+ end.
+client_check_result(Port, DataExpected, DataReceived) ->
+ receive
+ {Port, {data, TheData}} ->
+ Data = list_to_binary(TheData),
+ NewData = <<DataReceived/binary, Data/binary>>,
+ ct:log("New Data: ~p", [NewData]),
+ case client_read_check(DataExpected, NewData) of
+ ok ->
+ ok;
+ _ ->
+ client_check_result(Port, DataExpected, NewData)
+ end
+ after 3000 ->
+ ct:fail({"Time out on opensssl Client", {expected, DataExpected},
+ {got, DataReceived}})
+ end.
+client_check_result(Port, DataExpected) ->
+ client_check_result(Port, DataExpected, <<"">>).
+
+send_and_hostname(SSLSocket) ->
+ ssl:send(SSLSocket, "OK"),
+ {ok, [{sni_hostname, Hostname}]} = ssl:connection_information(SSLSocket, [sni_hostname]),
+ Hostname.
+
+erlang_server_openssl_client_sni_test(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
+ ct:log("Start running handshake, Config: ~p, SNIHostname: ~p, ExpectedSNIHostname: ~p, ExpectedCN: ~p", [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
+ ServerOptions = ?config(sni_server_opts, Config) ++ ?config(server_opts, Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
+ {options, ServerOptions}]),
+ Port = ssl_test_lib:inet_port(Server),
+ ClientCommand = case SNIHostname of
+ undefined ->
+ "openssl s_client -connect " ++ Hostname ++ ":" ++ integer_to_list(Port);
+ _ ->
+ "openssl s_client -connect " ++ Hostname ++ ":" ++ integer_to_list(Port) ++ " -servername " ++ SNIHostname
+ end,
+ ct:log("Options: ~p", [[ServerOptions, ClientCommand]]),
+ ClientPort = open_port({spawn, ClientCommand}, [stderr_to_stdout]),
+
+ %% Client check needs to be done befor server check,
+ %% or server check might consume client messages
+ ExpectedClientOutput = ["OK", "/CN=" ++ ExpectedCN ++ "/"],
+ client_check_result(ClientPort, ExpectedClientOutput),
+ ssl_test_lib:check_result(Server, ExpectedSNIHostname),
+ ssl_test_lib:close_port(ClientPort),
+ ssl_test_lib:close(Server),
+ ok.
+
+
+erlang_server_openssl_client_sni_test_sni_fun(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
+ ct:log("Start running handshake for sni_fun, Config: ~p, SNIHostname: ~p, ExpectedSNIHostname: ~p, ExpectedCN: ~p", [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
+ [{sni_hosts, ServerSNIConf}] = ?config(sni_server_opts, Config),
+ SNIFun = fun(Domain) -> proplists:get_value(Domain, ServerSNIConf, undefined) end,
+ ServerOptions = ?config(server_opts, Config) ++ [{sni_fun, SNIFun}],
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
+ {options, ServerOptions}]),
+ Port = ssl_test_lib:inet_port(Server),
+ ClientCommand = case SNIHostname of
+ undefined ->
+ "openssl s_client -connect " ++ Hostname ++ ":" ++ integer_to_list(Port);
+ _ ->
+ "openssl s_client -connect " ++ Hostname ++ ":" ++ integer_to_list(Port) ++ " -servername " ++ SNIHostname
+ end,
+ ct:log("Options: ~p", [[ServerOptions, ClientCommand]]),
+ ClientPort = open_port({spawn, ClientCommand}, [stderr_to_stdout]),
+
+ %% Client check needs to be done befor server check,
+ %% or server check might consume client messages
+ ExpectedClientOutput = ["OK", "/CN=" ++ ExpectedCN ++ "/"],
+ client_check_result(ClientPort, ExpectedClientOutput),
+ ssl_test_lib:check_result(Server, ExpectedSNIHostname),
+ ssl_test_lib:close_port(ClientPort),
+ ssl_test_lib:close(Server).
+
+
cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
process_flag(trap_exit, true),
ct:log("Testing CipherSuite ~p~n", [CipherSuite]),
@@ -1139,6 +1428,142 @@ start_erlang_client_and_openssl_server_with_opts(Config, ErlangClientOpts, Opens
ssl_test_lib:close(Client),
process_flag(trap_exit, false).
+start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, Callback) ->
+ process_flag(trap_exit, true),
+ ServerOpts = ?config(server_opts, Config),
+ ClientOpts0 = ?config(client_opts, Config),
+ ClientOpts = [{alpn_advertised_protocols, [<<"spdy/2">>]} | ClientOpts0],
+
+ {ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
+
+ Data = "From openssl to erlang",
+
+ Port = ssl_test_lib:inet_port(node()),
+ CertFile = proplists:get_value(certfile, ServerOpts),
+ KeyFile = proplists:get_value(keyfile, ServerOpts),
+ Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
+
+ Cmd = "openssl s_server -msg -alpn http/1.1,spdy/2 -accept " ++ integer_to_list(Port) ++ ssl_test_lib:version_flag(Version) ++
+ " -cert " ++ CertFile ++ " -key " ++ KeyFile,
+
+ ct:log("openssl cmd: ~p~n", [Cmd]),
+
+ OpensslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ ssl_test_lib:wait_for_openssl_server(),
+
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE,
+ erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
+ {options, ClientOpts}]),
+
+ Callback(Client, OpensslPort),
+
+ %% Clean close down! Server needs to be closed first !!
+ ssl_test_lib:close_port(OpensslPort),
+
+ ssl_test_lib:close(Client),
+ process_flag(trap_exit, false).
+
+start_erlang_server_and_openssl_client_for_alpn_negotiation(Config, Data, Callback) ->
+ process_flag(trap_exit, true),
+ ServerOpts0 = ?config(server_opts, Config),
+ ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>]} | ServerOpts0],
+
+ {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
+ Cmd = "openssl s_client -alpn http/1.0,spdy/2 -msg -port " ++ integer_to_list(Port) ++ ssl_test_lib:version_flag(Version) ++
+ " -host localhost",
+
+ ct:log("openssl cmd: ~p~n", [Cmd]),
+
+ OpenSslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ Callback(Server, OpenSslPort),
+
+ ssl_test_lib:close(Server),
+
+ ssl_test_lib:close_port(OpenSslPort),
+ process_flag(trap_exit, false).
+
+start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, Callback) ->
+ process_flag(trap_exit, true),
+ ServerOpts = ?config(server_opts, Config),
+ ClientOpts0 = ?config(client_opts, Config),
+ ClientOpts = [{alpn_advertised_protocols, [<<"spdy/2">>]},
+ {client_preferred_next_protocols, {client, [<<"spdy/3">>, <<"http/1.1">>]}} | ClientOpts0],
+
+ {ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
+
+ Data = "From openssl to erlang",
+
+ Port = ssl_test_lib:inet_port(node()),
+ CertFile = proplists:get_value(certfile, ServerOpts),
+ KeyFile = proplists:get_value(keyfile, ServerOpts),
+ Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
+
+ Cmd = "openssl s_server -msg -alpn http/1.1,spdy/2 -nextprotoneg spdy/3 -accept " ++ integer_to_list(Port) ++ ssl_test_lib:version_flag(Version) ++
+ " -cert " ++ CertFile ++ " -key " ++ KeyFile,
+
+ ct:log("openssl cmd: ~p~n", [Cmd]),
+
+ OpensslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ ssl_test_lib:wait_for_openssl_server(),
+
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE,
+ erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
+ {options, ClientOpts}]),
+
+ Callback(Client, OpensslPort),
+
+ %% Clean close down! Server needs to be closed first !!
+ ssl_test_lib:close_port(OpensslPort),
+
+ ssl_test_lib:close(Client),
+ process_flag(trap_exit, false).
+
+start_erlang_server_and_openssl_client_for_alpn_npn_negotiation(Config, Data, Callback) ->
+ process_flag(trap_exit, true),
+ ServerOpts0 = ?config(server_opts, Config),
+ ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>]},
+ {next_protocols_advertised, [<<"spdy/3">>, <<"http/1.1">>]} | ServerOpts0],
+
+ {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+
+
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
+ {options, ServerOpts}]),
+ Port = ssl_test_lib:inet_port(Server),
+ Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
+ Cmd = "openssl s_client -alpn http/1.1,spdy/2 -nextprotoneg spdy/3 -msg -port " ++ integer_to_list(Port) ++ ssl_test_lib:version_flag(Version) ++
+ " -host localhost",
+
+ ct:log("openssl cmd: ~p~n", [Cmd]),
+
+ OpenSslPort = open_port({spawn, Cmd}, [stderr_to_stdout]),
+
+ Callback(Server, OpenSslPort),
+
+ ssl_test_lib:close(Server),
+
+ ssl_test_lib:close_port(OpenSslPort),
+ process_flag(trap_exit, false).
+
start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
ServerOpts = ?config(server_opts, Config),
@@ -1167,7 +1592,7 @@ start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callbac
{host, Hostname},
{from, self()},
{mfa, {?MODULE,
- erlang_ssl_receive_and_assert_npn, [<<"spdy/2">>, Data]}},
+ erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
{options, ClientOpts}]),
Callback(Client, OpensslPort),
@@ -1188,7 +1613,7 @@ start_erlang_server_and_openssl_client_for_npn_negotiation(Config, Data, Callbac
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
- {mfa, {?MODULE, erlang_ssl_receive_and_assert_npn, [<<"spdy/2">>, Data]}},
+ {mfa, {?MODULE, erlang_ssl_receive_and_assert_negotiated_protocol, [<<"spdy/2">>, Data]}},
{options, ServerOpts}]),
Port = ssl_test_lib:inet_port(Server),
Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
@@ -1236,15 +1661,15 @@ start_erlang_server_and_openssl_client_with_opts(Config, ErlangServerOpts, OpenS
process_flag(trap_exit, false).
-erlang_ssl_receive_and_assert_npn(Socket, Protocol, Data) ->
- {ok, Protocol} = ssl:negotiated_next_protocol(Socket),
+erlang_ssl_receive_and_assert_negotiated_protocol(Socket, Protocol, Data) ->
+ {ok, Protocol} = ssl:negotiated_protocol(Socket),
erlang_ssl_receive(Socket, Data),
- {ok, Protocol} = ssl:negotiated_next_protocol(Socket),
+ {ok, Protocol} = ssl:negotiated_protocol(Socket),
ok.
erlang_ssl_receive(Socket, Data) ->
ct:log("Connection info: ~p~n",
- [ssl:connection_info(Socket)]),
+ [ssl:connection_information(Socket)]),
receive
{ssl, Socket, Data} ->
io:format("Received ~p~n",[Data]),
@@ -1263,16 +1688,16 @@ erlang_ssl_receive(Socket, Data) ->
end.
connection_info(Socket, Version) ->
- case ssl:connection_info(Socket) of
- {ok, {Version, _} = Info} ->
+ case ssl:connection_information(Socket, [version]) of
+ {ok, [{version, Version}] = Info} ->
ct:log("Connection info: ~p~n", [Info]),
ok;
- {ok, {OtherVersion, _}} ->
+ {ok, [{version, OtherVersion}]} ->
{wrong_version, OtherVersion}
end.
connection_info_result(Socket) ->
- ssl:connection_info(Socket).
+ ssl:connection_information(Socket).
delayed_send(Socket, [ErlData, OpenSslData]) ->
@@ -1287,6 +1712,14 @@ server_sent_garbage(Socket) ->
end.
+check_openssl_sni_support(Config) ->
+ HelpText = os:cmd("openssl s_client --help"),
+ case string:str(HelpText, "-servername") of
+ 0 ->
+ {skip, "Current openssl doesn't support SNI"};
+ _ ->
+ Config
+ end.
check_openssl_npn_support(Config) ->
HelpText = os:cmd("openssl s_client --help"),
@@ -1297,6 +1730,15 @@ check_openssl_npn_support(Config) ->
Config
end.
+check_openssl_alpn_support(Config) ->
+ HelpText = os:cmd("openssl s_client --help"),
+ case string:str(HelpText, "alpn") of
+ 0 ->
+ {skip, "Openssl not compiled with alpn support"};
+ _ ->
+ Config
+ end.
+
check_sane_openssl_renegotaite(Config, Version) when Version == 'tlsv1.1';
Version == 'tlsv1.2' ->
case os:cmd("openssl version") of
diff --git a/lib/ssl/test/ssl_upgrade_SUITE.erl b/lib/ssl/test/ssl_upgrade_SUITE.erl
new file mode 100644
index 0000000000..c83fb367dc
--- /dev/null
+++ b/lib/ssl/test/ssl_upgrade_SUITE.erl
@@ -0,0 +1,164 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2014-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.2
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ssl_upgrade_SUITE).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+-record(state, {
+ config,
+ server,
+ client,
+ soft
+ }).
+
+all() ->
+ [
+ minor_upgrade,
+ major_upgrade
+ ].
+
+init_per_suite(Config0) ->
+ catch crypto:stop(),
+ try {crypto:start(), erlang:system_info({wordsize, internal}) == erlang:system_info({wordsize, external})} of
+ {ok, true} ->
+ case ct_release_test:init(Config0) of
+ {skip, Reason} ->
+ {skip, Reason};
+ Config ->
+ Result =
+ (catch make_certs:all(?config(data_dir, Config),
+ ?config(priv_dir, Config))),
+ ct:log("Make certs ~p~n", [Result]),
+ ssl_test_lib:cert_options(Config)
+ end;
+ {ok, false} ->
+ {skip, "Test server will not handle halfwordemulator correctly. Skip as halfwordemulator is deprecated"}
+ catch _:_ ->
+ {skip, "Crypto did not start"}
+ end.
+
+end_per_suite(Config) ->
+ ct_release_test:cleanup(Config),
+ crypto:stop().
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+end_per_testcase(_TestCase, Config) ->
+ Config.
+
+major_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, major,{?MODULE, #state{config = Config}}, Config).
+
+minor_upgrade(Config) when is_list(Config) ->
+ ct_release_test:upgrade(ssl, minor,{?MODULE, #state{config = Config}}, Config).
+
+upgrade_init(CTData, #state{config = Config} = State) ->
+ {ok, {_, _, Up, _Down}} = ct_release_test:get_appup(CTData, ssl),
+ ct:pal("Up: ~p", [Up]),
+ Soft = is_soft(Up), %% It is symmetrical, if upgrade is soft so is downgrade
+ case Soft of
+ true ->
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client,
+ soft = Soft};
+ false ->
+ State#state{soft = Soft}
+ end.
+
+upgrade_upgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_upgraded(_, #state{server = Server0, client = Client0,
+ config = Config, soft = true} = State) ->
+ Server0 ! changed_version,
+ Client0 ! changed_version,
+ ssl_test_lib:check_result(Server0, ok, Client0, ok),
+ ssl_test_lib:close(Server0),
+ ssl_test_lib:close(Client0),
+ {Server, Client} = soft_start_connection(Config),
+ State#state{server = Server, client = Client}.
+
+upgrade_downgraded(_, #state{soft = false, config = Config} = State) ->
+ {Server, Client} = restart_start_connection(Config),
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State;
+
+upgrade_downgraded(_, #state{server = Server, client = Client, soft = true} = State) ->
+ Server ! changed_version,
+ Client ! changed_version,
+ ssl_test_lib:check_result(Server, ok, Client, ok),
+ ssl_test_lib:close(Server),
+ ssl_test_lib:close(Client),
+ State.
+
+use_connection(Socket) ->
+ ssl_test_lib:send_recv_result_active(Socket),
+ receive
+ changed_version ->
+ ssl_test_lib:send_recv_result_active(Socket)
+ end.
+
+soft_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {?MODULE, use_connection, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+restart_start_connection(Config) ->
+ ClientOpts = ?config(client_verification_opts, Config),
+ ServerOpts = ?config(server_verification_opts, Config),
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ServerOpts}]),
+
+ Port = ssl_test_lib:inet_port(Server),
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, ClientOpts}]),
+ {Server, Client}.
+
+is_soft([{restart_application, ssl}]) ->
+ false;
+is_soft(_) ->
+ true.
+
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index bda974da0e..171147adf2 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -1 +1 @@
-SSL_VSN = 5.3.8
+SSL_VSN = 7.0
diff --git a/lib/stdlib/doc/src/Makefile b/lib/stdlib/doc/src/Makefile
index f5d8b2072a..a4a2ed9931 100644
--- a/lib/stdlib/doc/src/Makefile
+++ b/lib/stdlib/doc/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2012. All Rights Reserved.
+# Copyright Ericsson AB 1997-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -48,6 +48,7 @@ XML_REF3_FILES = \
digraph.xml \
digraph_utils.xml \
epp.xml \
+ erl_anno.xml \
erl_eval.xml \
erl_expand_records.xml \
erl_id_trans.xml \
@@ -81,6 +82,7 @@ XML_REF3_FILES = \
proplists.xml \
qlc.xml \
queue.xml \
+ rand.xml \
random.xml \
re.xml \
sets.xml \
diff --git a/lib/stdlib/doc/src/c.xml b/lib/stdlib/doc/src/c.xml
index b49fa6ad67..b43d4786ae 100644
--- a/lib/stdlib/doc/src/c.xml
+++ b/lib/stdlib/doc/src/c.xml
@@ -232,6 +232,14 @@ compile:file(<anno>File</anno>, <anno>Options</anno> ++ [report_errors, report_w
</desc>
</func>
<func>
+ <name name="uptime" arity="0"/>
+ <fsummary>Print node uptime</fsummary>
+ <desc>
+ <p>Prints the node uptime (as given by
+ <c>erlang:statistics(wall_clock)</c>), in human-readable form.</p>
+ </desc>
+ </func>
+ <func>
<name>xm(ModSpec) -> void()</name>
<fsummary>Cross reference check a module</fsummary>
<type>
diff --git a/lib/stdlib/doc/src/calendar.xml b/lib/stdlib/doc/src/calendar.xml
index e32a639b81..d8193a9ec2 100644
--- a/lib/stdlib/doc/src/calendar.xml
+++ b/lib/stdlib/doc/src/calendar.xml
@@ -270,7 +270,8 @@
<fsummary>Convert now to local date and time</fsummary>
<desc>
<p>This function returns local date and time converted from
- the return value from <c>erlang:now()</c>.</p>
+ the return value from
+ <seealso marker="erts:erlang#timestamp/0"><c>erlang:timestamp/0</c></seealso>.</p>
</desc>
</func>
<func>
@@ -279,7 +280,8 @@
<fsummary>Convert now to date and time</fsummary>
<desc>
<p>This function returns Universal Coordinated Time (UTC)
- converted from the return value from <c>erlang:now()</c>.</p>
+ converted from the return value from
+ <seealso marker="erts:erlang#timestamp/0"><c>erlang:timestamp/0</c></seealso>.</p>
</desc>
</func>
<func>
diff --git a/lib/stdlib/doc/src/erl_anno.xml b/lib/stdlib/doc/src/erl_anno.xml
new file mode 100644
index 0000000000..281feacdc4
--- /dev/null
+++ b/lib/stdlib/doc/src/erl_anno.xml
@@ -0,0 +1,308 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year>
+ <year>2015</year>
+ <holder>Ericsson AB, All Rights Reserved</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved on line at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ The Initial Developer of the Original Code is Ericsson AB.
+ </legalnotice>
+
+ <title>erl_anno</title>
+ <prepared>Hans Bolinder</prepared>
+ <responsible>Kenneth Lundin</responsible>
+ <docno>1</docno>
+ <approved></approved>
+ <checked></checked>
+ <date>2015-02-26</date>
+ <rev>A</rev>
+ <file>erl_anno.xml</file>
+ </header>
+ <module>erl_anno</module>
+
+ <modulesummary>
+ Abstract Datatype for the Annotations of the Erlang Compiler
+ </modulesummary>
+
+ <description>
+ <p>This module implements an abstract type that is used by the
+ Erlang Compiler and its helper modules for holding data such as
+ column, line number, and text. The data type is a collection of
+ <marker id="annotations"><em>annotations</em></marker> as
+ described in the following.</p>
+ <p>The Erlang Token Scanner returns tokens with a subset of
+ the following annotations, depending on the options:</p>
+ <taglist>
+ <tag><c>column</c></tag>
+ <item><p>The column where the token begins.</p></item>
+ <tag><c>location</c></tag>
+ <item><p>The line and column where the token begins, or
+ just the line if the column unknown.</p>
+ </item>
+ <tag><c>text</c></tag>
+ <item><p>The token's text.</p></item>
+ </taglist>
+ <p>From the above the following annotation is derived:</p>
+ <taglist>
+ <tag><c>line</c></tag>
+ <item><p>The line where the token begins.</p></item>
+ </taglist>
+ <p>Furthermore, the following annotations are supported by
+ this module, and used by various modules:</p>
+ <taglist>
+ <tag><c>file</c></tag>
+ <item><p>A filename.</p></item>
+ <tag><c>generated</c></tag>
+ <item><p>A Boolean indicating if the abstract code is
+ compiler generated. The Erlang Compiler does not emit warnings
+ for such code.</p>
+ </item>
+ <tag><c>record</c></tag>
+ <item><p>A Boolean indicating if the origin of the abstract
+ code is a record. Used by Dialyzer to assign types to tuple
+ elements.</p>
+ </item>
+ </taglist>
+ <p>The functions
+ <seealso marker="erl_scan#column/1">column()</seealso>,
+ <seealso marker="erl_scan#end_location/1">end_location()</seealso>,
+ <seealso marker="erl_scan#line/1">line()</seealso>,
+ <seealso marker="erl_scan#location/1">location()</seealso>, and
+ <seealso marker="erl_scan#text/1">text()</seealso>
+ in the <c>erl_scan</c> module can be used for inspecting
+ annotations in tokens.</p>
+ <p>The functions
+ <seealso marker="erl_parse#map_anno/2">map_anno()</seealso>,
+ <seealso marker="erl_parse#fold_anno/3">fold_anno()</seealso>,
+ <seealso marker="erl_parse#mapfold_anno/3">mapfold_anno()</seealso>,
+ <seealso marker="erl_parse#new_anno/1">new_anno()</seealso>,
+ <seealso marker="erl_parse#anno_from_term/1">
+ anno_from_term()</seealso>, and
+ <seealso marker="erl_parse#anno_to_term/1">
+ anno_to_term()</seealso> in the <c>erl_parse</c> module can be
+ used for manipulating annotations in abstract code.
+ </p>
+ </description>
+
+ <datatypes>
+ <datatype>
+ <name><marker id="type-anno">anno()</marker></name>
+ <desc><p>A collection of annotations.</p>
+ </desc>
+ </datatype>
+ <datatype>
+ <name name="anno_term"></name>
+ <desc>
+ <p>The term representing a collection of annotations. It is
+ either a <c>location()</c> or a list of key-value pairs.</p>
+ </desc>
+ </datatype>
+ <datatype>
+ <name name="column"></name>
+ </datatype>
+ <datatype>
+ <name name="line"></name>
+ <desc>
+ <p>To be changed to a non-negative integer in Erlang/OTP 19.0.</p>
+ </desc>
+ </datatype>
+ <datatype>
+ <name name="location"></name>
+ </datatype>
+ <datatype>
+ <name name="text"></name>
+ </datatype>
+ </datatypes>
+
+ <funcs>
+ <func>
+ <name name="column" arity="1"/>
+ <type name="column"></type>
+ <fsummary>Return the column</fsummary>
+ <desc>
+ <p>Returns the column of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="end_location" arity="1"/>
+ <type name="location"></type>
+ <fsummary>Return the end location of the text</fsummary>
+ <desc>
+ <p>Returns the end location of the text of the
+ annotations <anno>Anno</anno>. If there is no text,
+ <c>undefined</c> is returned.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="file" arity="1"/>
+ <type name="filename"></type>
+ <fsummary>Return the filename</fsummary>
+ <desc>
+ <p>Returns the filename of the annotations <anno>Anno</anno>.
+ If there is no filename, <c>undefined</c> is returned.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="from_term" arity="1"/>
+ <fsummary>Return annotations given a term</fsummary>
+ <desc>
+ <p>Returns annotations with the representation <anno>Term</anno>.
+ </p>
+ <!--
+ <p>Although it is possible to create new annotations by calling
+ <c>from_term/1</c>, the intention is that one should not do
+ so - the proper way to create annotations is to call
+ <c>new/1</c> and then modify the annotations
+ by calling the <c>set_*</c> functions.</p>
+ -->
+ <p>See also <seealso marker="#to_term/1">to_term()</seealso>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="generated" arity="1"/>
+ <type name="generated"></type>
+ <fsummary>Return the generated Boolean</fsummary>
+ <desc>
+ <p>Returns <c>true</c> if the annotations <anno>Anno</anno>
+ has been marked as generated. The default is to return
+ <c>false</c>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="is_anno" arity="1"/>
+ <fsummary>Test for a collection of annotations</fsummary>
+ <desc>
+ <p>Returns <c>true</c> if <anno>Term</anno> is a collection of
+ annotations, <c>false</c> otherwise.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="line" arity="1"/>
+ <type name="line"></type>
+ <fsummary>Return the line</fsummary>
+ <desc>
+ <p>Returns the line of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="location" arity="1"/>
+ <type name="location"></type>
+ <fsummary>Return the location</fsummary>
+ <desc>
+ <p>Returns the location of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="new" arity="1"/>
+ <type name="location"></type>
+ <fsummary>Create a new collection of annotations</fsummary>
+ <desc>
+ <p>Creates a new collection of annotations given a location.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_file" arity="2"/>
+ <type name="filename"></type>
+ <fsummary>Modify the filename</fsummary>
+ <desc>
+ <p>Modifies the filename of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_generated" arity="2"/>
+ <type name="generated"></type>
+ <fsummary>Modify the generated marker</fsummary>
+ <desc>
+ <p>Modifies the generated marker of the annotations
+ <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_line" arity="2"/>
+ <type name="line"></type>
+ <fsummary>Modify the line</fsummary>
+ <desc>
+ <p>Modifies the line of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_location" arity="2"/>
+ <type name="location"></type>
+ <fsummary>Modify the location</fsummary>
+ <desc>
+ <p>Modifies the location of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_record" arity="2"/>
+ <type name="record"></type>
+ <fsummary>Modify the record marker</fsummary>
+ <desc>
+ <p>Modifies the record marker of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="set_text" arity="2"/>
+ <type name="text"></type>
+ <fsummary>Modify the text</fsummary>
+ <desc>
+ <p>Modifies the text of the annotations <anno>Anno</anno>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="text" arity="1"/>
+ <type name="text"></type>
+ <fsummary>Return the text</fsummary>
+ <desc>
+ <p>Returns the text of the annotations <anno>Anno</anno>.
+ If there is no text, <c>undefined</c> is returned.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="to_term" arity="1"/>
+ <fsummary>Return the term representing a collection of
+ annotations</fsummary>
+ <desc>
+ <p>Returns the term representing the annotations <anno>Anno</anno>.
+ </p>
+ <p>See also <seealso marker="#from_term/1">from_term()</seealso>.
+ </p>
+ </desc>
+ </func>
+ </funcs>
+ <section>
+ <title>See Also</title>
+ <p><seealso marker="erl_scan">erl_scan(3)</seealso>,
+ <seealso marker="erl_parse">erl_parse(3)</seealso>
+ </p>
+ </section>
+</erlref>
diff --git a/lib/stdlib/doc/src/erl_parse.xml b/lib/stdlib/doc/src/erl_parse.xml
index cf0bff48cd..b97d06e919 100644
--- a/lib/stdlib/doc/src/erl_parse.xml
+++ b/lib/stdlib/doc/src/erl_parse.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2014</year>
+ <year>1996</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -192,6 +192,97 @@
considered a string.</p>
</desc>
</func>
+ <func>
+ <name name="map_anno" arity="2"/>
+ <fsummary>
+ Map a function over the annotations of an abstract form
+ </fsummary>
+ <desc>
+ <p>Modifies the abstract form <anno>Abstr</anno> by applying
+ <anno>Fun</anno> on every collection of annotations of the
+ abstract form. The abstract form is traversed in a
+ depth-first, left-to-right, fashion.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="fold_anno" arity="3"/>
+ <fsummary>
+ Fold a function over the annotations of an abstract form
+ </fsummary>
+ <desc>
+ <p>Updates an accumulator by applying <anno>Fun</anno> on
+ every collection of annotations of the abstract form
+ <anno>Abstr</anno>. The first call to <anno>Fun</anno> has
+ <anno>AccIn</anno> as argument, and the returned accumulator
+ <anno>AccOut</anno> is passed to the next call, and so on.
+ The final value of the accumulator is returned. The abstract
+ form is traversed in a depth-first, left-to-right, fashion.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="mapfold_anno" arity="3"/>
+ <fsummary>
+ Map and fold a function over the annotations of an abstract form
+ </fsummary>
+ <desc>
+ <p>Modifies the abstract form <anno>Abstr</anno> by applying
+ <anno>Fun</anno> on every collection of annotations of the
+ abstract form, while at the same time updating an
+ accumulator. The first call to <anno>Fun</anno> has
+ <anno>AccIn</anno> as second argument, and the returned
+ accumulator <anno>AccOut</anno> is passed to the next call,
+ and so on. The modified abstract form as well as the the
+ final value of the accumulator is returned. The abstract
+ form is traversed in a depth-first, left-to-right, fashion.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="new_anno" arity="1"/>
+ <fsummary>
+ Create new annotations
+ </fsummary>
+ <desc>
+ <p>Creates an abstract form from a term which has the same
+ structure as an abstract form, but <seealso
+ marker="erl_anno#type-location">locations</seealso> where the
+ abstract form has annotations. For each location, <seealso
+ marker="erl_anno#new/1"><c>erl_anno:new/1</c></seealso> is
+ called, and the annotations replace the location.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="anno_from_term" arity="1"/>
+ <fsummary>
+ Return annotations as terms
+ </fsummary>
+ <desc>
+ <p>Assumes that <anno>Term</anno> is a term with the same
+ structure as an abstract form, but with terms, T say, on
+ those places where an abstract form has annotations. Returns
+ an abstract form where every term T has been replaced by the
+ value returned by calling <c>erl_anno:from_term(T)</c>. The
+ term <anno>Term</anno> is traversed in a depth-first,
+ left-to-right, fashion.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="anno_to_term" arity="1"/>
+ <fsummary>
+ Return the representation of annotations
+ </fsummary>
+ <desc>
+ <p>Returns a term where every collection of annotations Anno of
+ <anno>Abstr</anno> has been replaced by the term returned by
+ calling <c>erl_anno:to_term(Anno)</c>. The abstract form is
+ traversed in a depth-first, left-to-right, fashion.
+ </p>
+ </desc>
+ </func>
</funcs>
<section>
@@ -211,8 +302,9 @@
<section>
<title>See Also</title>
<p><seealso marker="io">io(3)</seealso>,
- <seealso marker="erl_scan">erl_scan(3)</seealso>,
- ERTS User's Guide</p>
+ <seealso marker="erl_anno">erl_anno(3)</seealso>,
+ <seealso marker="erl_scan">erl_scan(3)</seealso>,
+ <seealso marker="erts:absform">ERTS User's Guide</seealso></p>
</section>
</erlref>
diff --git a/lib/stdlib/doc/src/erl_scan.xml b/lib/stdlib/doc/src/erl_scan.xml
index 855c8fc195..8f9c1db25b 100644
--- a/lib/stdlib/doc/src/erl_scan.xml
+++ b/lib/stdlib/doc/src/erl_scan.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2013</year>
+ <year>1996</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -120,7 +120,7 @@
<c>string(<anno>String</anno>,
<anno>StartLocation</anno>, [])</c>.</p>
<p><c><anno>StartLocation</anno></c> indicates the initial location
- when scanning starts. If <c><anno>StartLocation</anno></c> is a line
+ when scanning starts. If <c><anno>StartLocation</anno></c> is a line,
<c>attributes()</c> as well as <c><anno>EndLocation</anno></c> and
<c><anno>ErrorLocation</anno></c> will be lines. If
<c><anno>StartLocation</anno></c> is a pair of a line and a column
@@ -132,8 +132,12 @@
line where the token begins, as well as the text of the
token (if the <c>text</c> option is given), all of which can
be accessed by calling <seealso
- marker="#token_info/1">token_info/1,2</seealso> or <seealso
- marker="#attributes_info/1">attributes_info/1,2</seealso>.</p>
+ marker="#token_info/1">token_info/1,2</seealso>, <seealso
+ marker="#attributes_info/1">attributes_info/1,2</seealso>,
+ <seealso marker="#column/1">column/1</seealso>,
+ <seealso marker="#line/1">line/1</seealso>,
+ <seealso marker="#location/1">location/1</seealso>, and
+ <seealso marker="#text/1">text/1</seealso>.</p>
<p>A <em>token</em> is a tuple containing information about
syntactic category, the token attributes, and the actual
terminal symbol. For punctuation characters (e.g. <c>;</c>,
@@ -237,6 +241,70 @@
</desc>
</func>
<func>
+ <name name="category" arity="1"/>
+ <fsummary>Return the category</fsummary>
+ <desc>
+ <p>Returns the category of <c><anno>Token</anno></c>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="symbol" arity="1"/>
+ <fsummary>Return the symbol</fsummary>
+ <desc>
+ <p>Returns the symbol of <c><anno>Token</anno></c>.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="column" arity="1"/>
+ <fsummary>Return the column</fsummary>
+ <desc>
+ <p>Returns the column of <c><anno>Token</anno></c>'s
+ collection of annotations.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="end_location" arity="1"/>
+ <fsummary>Return the end location of the text</fsummary>
+ <desc>
+ <p>Returns the end location of the text of
+ <c><anno>Token</anno></c>'s collection of annotations. If
+ there is no text,
+ <c>undefined</c> is returned.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="line" arity="1"/>
+ <fsummary>Return the line</fsummary>
+ <desc>
+ <p>Returns the line of <c><anno>Token</anno></c>'s collection
+ of annotations.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="location" arity="1"/>
+ <fsummary>Return the location</fsummary>
+ <desc>
+ <p>Returns the location of <c><anno>Token</anno></c>'s
+ collection of annotations.
+ </p>
+ </desc>
+ </func>
+ <func>
+ <name name="text" arity="1"/>
+ <fsummary>Return the text</fsummary>
+ <desc>
+ <p>Returns the text of <c><anno>Token</anno></c>'s collection
+ of annotations. If there is no text, <c>undefined</c> is
+ returned.
+ </p>
+ </desc>
+ </func>
+ <func>
<name name="token_info" arity="1"/>
<fsummary>Return information about a token</fsummary>
<desc>
@@ -417,6 +485,7 @@ Module:format_error(ErrorDescriptor)</code>
<section>
<title>See Also</title>
<p><seealso marker="io">io(3)</seealso>,
- <seealso marker="erl_parse">erl_parse(3)</seealso></p>
+ <seealso marker="erl_anno">erl_anno(3)</seealso>,
+ <seealso marker="erl_parse">erl_parse(3)</seealso></p>
</section>
</erlref>
diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml
index 3df24bf688..2bfe074c3e 100644
--- a/lib/stdlib/doc/src/ets.xml
+++ b/lib/stdlib/doc/src/ets.xml
@@ -456,6 +456,12 @@ Error: fun containing local Erlang function calls
<item><c>{type, <seealso marker="#type-type">type()</seealso>}</c> <br></br>
The table type.</item>
+ <item><c>{read_concurrency, boolean()}</c> <br></br>
+
+ Indicates whether the table uses read_concurrency or not.</item>
+ <item><c>{write_concurrency, boolean()}</c> <br></br>
+
+ Indicates whether the table uses write_concurrency or not.</item>
</list>
</desc>
</func>
@@ -1429,7 +1435,9 @@ is_integer(X), is_integer(Y), X + Y < 4711]]></code>
<p>Whenever the <c>extended_info</c> option is used, it
results in a file not readable by versions of ets prior to
that in stdlib-1.15.1</p>
-
+ <p>The <c>sync</c> option, if set to <c>true</c>, ensures that
+ the content of the file is actually written to the disk before
+ <c>tab2file</c> returns. Default is <c>{sync, false}</c>.</p>
</desc>
</func>
<func>
@@ -1587,6 +1595,21 @@ true</pre>
</desc>
</func>
<func>
+ <name name="take" arity="2"/>
+ <fsummary>Return and remove all objects with a given key from an ETS
+ table.</fsummary>
+ <desc>
+ <p>Returns a list of all objects with the key <c><anno>Key</anno></c> in
+ the table <c><anno>Tab</anno></c> and removes.</p>
+ <p>The given <c><anno>Key</anno></c> is used to identify the object by
+ either <em>comparing equal</em> the key of an object in an
+ <c>ordered_set</c> table, or <em>matching</em> in other types of
+ tables (see <seealso marker="#lookup/2">lookup/2</seealso> and
+ <seealso marker="#new/2">new/2</seealso> for details on the
+ difference).</p>
+ </desc>
+ </func>
+ <func>
<name name="to_dets" arity="2"/>
<fsummary>Fill a Dets table with objects from an ETS table.</fsummary>
<desc>
@@ -1597,14 +1620,18 @@ true</pre>
</func>
<func>
<name name="update_counter" arity="3" clause_i="1"/>
+ <name name="update_counter" arity="4" clause_i="1"/>
<name name="update_counter" arity="3" clause_i="2"/>
+ <name name="update_counter" arity="4" clause_i="2"/>
<name name="update_counter" arity="3" clause_i="3"/>
+ <name name="update_counter" arity="4" clause_i="3"/>
<type variable="Tab"/>
<type variable="Key"/>
<type variable="UpdateOp" name_i="1"/>
<type variable="Pos" name_i="1"/>
<type variable="Threshold" name_i="1"/>
<type variable="SetValue" name_i="1"/>
+ <type variable="Default"/>
<fsummary>Update a counter object in an ETS table.</fsummary>
<desc>
<p>This function provides an efficient way to update one or more
@@ -1646,12 +1673,22 @@ true</pre>
<seealso marker="#lookup/2">lookup/2</seealso> and
<seealso marker="#new/2">new/2</seealso>
for details on the difference).</p>
+ <p>If a default object <c><anno>Default</anno></c> is given, it is used
+ as the object to be updated if the key is missing from the table. The
+ value in place of the key is ignored and replaced by the proper key
+ value. The return value is as if the default object had not been used,
+ that is a single updated element or a list of them.</p>
<p>The function will fail with reason <c>badarg</c> if:</p>
<list type="bulleted">
<item>the table is not of type <c>set</c> or
<c>ordered_set</c>,</item>
- <item>no object with the right key exists,</item>
+ <item>no object with the right key exists and no default object were
+ supplied,</item>
<item>the object has the wrong arity,</item>
+ <item>the default object arity is smaller than
+ <c><![CDATA[<keypos>]]></c></item>
+ <item>any field from the default object being updated is not an
+ integer</item>
<item>the element to update is not an integer,</item>
<item>the element to update is also the key, or,</item>
<item>any of <c><anno>Pos</anno></c>, <c><anno>Incr</anno></c>, <c><anno>Threshold</anno></c> or
diff --git a/lib/stdlib/doc/src/file_sorter.xml b/lib/stdlib/doc/src/file_sorter.xml
index 16572df3c5..c069333c29 100644
--- a/lib/stdlib/doc/src/file_sorter.xml
+++ b/lib/stdlib/doc/src/file_sorter.xml
@@ -105,9 +105,9 @@
<c>file:get_cwd()</c> is used instead. The names of
temporary files are derived from the Erlang nodename
(<c>node()</c>), the process identifier of the current Erlang
- emulator (<c>os:getpid()</c>), and a timestamp
- (<c>erlang:now()</c>); a typical name would be
- <c>fs_mynode@myhost_1763_1043_337000_266005.17</c>, where
+ emulator (<c>os:getpid()</c>), and a unique integer
+ (<c>erlang:unique_integer([positive])</c>); a typical name would be
+ <c>fs_mynode@myhost_1763_4711.17</c>, where
<c>17</c> is a sequence number. Existing files will be
overwritten. Temporary files are deleted unless some
uncaught EXIT signal occurs.
diff --git a/lib/stdlib/doc/src/gb_sets.xml b/lib/stdlib/doc/src/gb_sets.xml
index ea96c14472..405bae5698 100644
--- a/lib/stdlib/doc/src/gb_sets.xml
+++ b/lib/stdlib/doc/src/gb_sets.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2001</year><year>2014</year>
+ <year>2001</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -306,6 +306,17 @@
</desc>
</func>
<func>
+ <name name="iterator_from" arity="2"/>
+ <fsummary>Return an iterator for a set starting from a specified element</fsummary>
+ <desc>
+ <p>Returns an iterator that can be used for traversing the
+ entries of <c><anno>Set</anno></c>; see <c>next/1</c>.
+ The difference as compared to the iterator returned by
+ <c>iterator/1</c> is that the first element greater than
+ or equal to <c><anno>Element</anno></c> is returned.</p>
+ </desc>
+ </func>
+ <func>
<name name="largest" arity="1"/>
<fsummary>Return largest element</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/gb_trees.xml b/lib/stdlib/doc/src/gb_trees.xml
index b2f237e1d7..82167e1083 100644
--- a/lib/stdlib/doc/src/gb_trees.xml
+++ b/lib/stdlib/doc/src/gb_trees.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2001</year><year>2014</year>
+ <year>2001</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -183,6 +183,17 @@
</desc>
</func>
<func>
+ <name name="iterator_from" arity="2"/>
+ <fsummary>Return an iterator for a tree starting from specified key</fsummary>
+ <desc>
+ <p>Returns an iterator that can be used for traversing the
+ entries of <c><anno>Tree</anno></c>; see <c>next/1</c>.
+ The difference as compared to the iterator returned by
+ <c>iterator/1</c> is that the first key greater than
+ or equal to <c><anno>Key</anno></c> is returned.</p>
+ </desc>
+ </func>
+ <func>
<name name="keys" arity="1"/>
<fsummary>Return a list of the keys in a tree</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/gen_server.xml b/lib/stdlib/doc/src/gen_server.xml
index a915e567a5..3c92de59b9 100644
--- a/lib/stdlib/doc/src/gen_server.xml
+++ b/lib/stdlib/doc/src/gen_server.xml
@@ -321,7 +321,7 @@ gen_server:abcast -----> Module:handle_cast/2
which may be infinity.</p>
<p>This problem does not exist if all nodes are Erlang nodes.</p>
</warning>
- <p>To avoid that late answers (after the timeout) pollutes
+ <p>To prevent late answers (after the timeout) from polluting
the caller's message queue, a middleman process is used to
do the actual calls. Late answers will then be discarded
when they arrive to a terminated process.</p>
diff --git a/lib/stdlib/doc/src/io.xml b/lib/stdlib/doc/src/io.xml
index a28180b42a..8ebfdb2e7f 100644
--- a/lib/stdlib/doc/src/io.xml
+++ b/lib/stdlib/doc/src/io.xml
@@ -505,7 +505,8 @@ ok
<p>Writes the data with standard syntax in the same way as
<c>~w</c>, but breaks terms whose printed representation
is longer than one line into many lines and indents each
- line sensibly. It also tries to detect lists of
+ line sensibly. Left justification is not supported.
+ It also tries to detect lists of
printable characters and to output these as strings. The
Unicode translation modifier is used for determining
what characters are printable. For example:</p>
diff --git a/lib/stdlib/doc/src/io_lib.xml b/lib/stdlib/doc/src/io_lib.xml
index 3312b08064..2117d66381 100644
--- a/lib/stdlib/doc/src/io_lib.xml
+++ b/lib/stdlib/doc/src/io_lib.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2013</year>
+ <year>1996</year><year>2014</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -59,6 +59,35 @@
<datatype>
<name name="latin1_string"/>
</datatype>
+ <datatype>
+ <name name="format_spec"/>
+ <desc><p>Description:</p>
+ <list type="bulleted">
+ <item><p><c>control_char</c> is the type of control
+ sequence: <c>$P</c>, <c>$w</c>, and so on;</p>
+ </item>
+ <item><p><c>args</c> is a list of the arguments used by the
+ control sequence, or an empty list if the control sequence
+ does not take any arguments;</p>
+ </item>
+ <item><p><c>width</c> is the field width;</p>
+ </item>
+ <item><p><c>adjust</c> is the adjustment;</p>
+ </item>
+ <item><p><c>precision</c> is the precision of the printed
+ argument;</p>
+ </item>
+ <item><p><c>pad_char</c> is the padding character;</p>
+ </item>
+ <item><p><c>encoding</c> is set to <c>true</c> if the translation
+ modifier <c>t</c> is present;</p>
+ </item>
+ <item><p><c>strings</c> is set to <c>false</c> if the modifier
+ <c>l</c> is present.</p>
+ </item>
+ </list>
+ </desc>
+ </datatype>
</datatypes>
<funcs>
<func>
@@ -260,6 +289,45 @@
</desc>
</func>
<func>
+ <name name="scan_format" arity="2"/>
+ <fsummary>Parse all control sequences in the format string</fsummary>
+ <desc>
+ <p>Returns a list corresponding to the given format string,
+ where control sequences have been replaced with
+ corresponding tuples. This list can be passed to <seealso
+ marker="#build_text/1">io_lib:build_text/1</seealso> to have
+ the same effect as <c>io_lib:format(Format, Args)</c>, or to
+ <seealso
+ marker="#unscan_format/1">io_lib:unscan_format/1</seealso>
+ in order to get the corresponding pair of <c>Format</c> and
+ <c>Args</c> (with every <c>*</c> and corresponding argument
+ expanded to numeric values).</p>
+ <p>A typical use of this function is to replace unbounded-size
+ control sequences like <c>~w</c> and <c>~p</c> with the
+ depth-limited variants <c>~W</c> and <c>~P</c> before
+ formatting to text, e.g. in a logger.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="unscan_format" arity="1"/>
+ <fsummary>Revert a pre-parsed format list to a plain character list
+ and a list of arguments</fsummary>
+ <desc>
+ <p>See <seealso
+ marker="#scan_format/2">io_lib:scan_format/2</seealso> for
+ details.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="build_text" arity="1"/>
+ <fsummary>Build the output text for a pre-parsed format list</fsummary>
+ <desc>
+ <p>See <seealso
+ marker="#scan_format/2">io_lib:scan_format/2</seealso> for
+ details.</p>
+ </desc>
+ </func>
+ <func>
<name name="indentation" arity="2"/>
<fsummary>Indentation after printing string</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/lists.xml b/lib/stdlib/doc/src/lists.xml
index ee3c51c62c..dcc08d008b 100644
--- a/lib/stdlib/doc/src/lists.xml
+++ b/lib/stdlib/doc/src/lists.xml
@@ -176,7 +176,7 @@ filtermap(Fun, List1) ->
false -> Acc;
true -> [Elem|Acc];
{true,Value} -> [Value|Acc]
- end,
+ end
end, [], List1).</code>
<p>Example:</p>
<pre>
diff --git a/lib/stdlib/doc/src/maps.xml b/lib/stdlib/doc/src/maps.xml
index f766c843be..7345a9357a 100644
--- a/lib/stdlib/doc/src/maps.xml
+++ b/lib/stdlib/doc/src/maps.xml
@@ -33,6 +33,28 @@
<funcs>
<func>
+ <name name="filter" arity="2"/>
+ <fsummary>Choose pairs which satisfy a predicate</fsummary>
+ <desc>
+ <p>
+ Returns a map <c><anno>Map2</anno></c> for which predicate
+ <c><anno>Pred</anno></c> holds true in <c><anno>Map1</anno></c>.
+ </p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if
+ <c><anno>Map1</anno></c> is not a map or with <c>badarg</c> if
+ <c><anno>Pred</anno></c> is not a function of arity 2.
+ </p>
+ <p>Example:</p>
+ <code type="none">
+> M = #{a => 2, b => 3, c=> 4, "a" => 1, "b" => 2, "c" => 4},
+ Pred = fun(K,V) -> is_atom(K) andalso (V rem 2) =:= 0 end,
+ maps:filter(Pred,M).
+#{a => 2,c => 4} </code>
+ </desc>
+ </func>
+
+ <func>
<name name="find" arity="2"/>
<fsummary></fsummary>
<desc>
@@ -40,6 +62,9 @@
Returns a tuple <c>{ok, Value}</c> where <c><anno>Value</anno></c> is the value associated with <c><anno>Key</anno></c>,
or <c>error</c> if no value is associated with <c><anno>Key</anno></c> in <c><anno>Map</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
+ </p>
<p>Example:</p>
<code type="none">
> Map = #{"hi" => 42},
@@ -95,8 +120,10 @@
<p>
Returns the value <c><anno>Value</anno></c> associated with <c><anno>Key</anno></c> if
<c><anno>Map</anno></c> contains <c><anno>Key</anno></c>.
- If no value is associated with <c><anno>Key</anno></c> then the call will
- fail with an exception.
+ </p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map,
+ or with a <c>{badkey,Key}</c> exception if no value is associated with <c><anno>Key</anno></c>.
</p>
<p>Example:</p>
<code type="none">
@@ -116,6 +143,10 @@
<c><anno>Map</anno></c> contains <c><anno>Key</anno></c>.
If no value is associated with <c><anno>Key</anno></c> then returns <c><anno>Default</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
+
+ </p>
<p>Example:</p>
<code type="none">
> Map = #{ key1 => val1, key2 => val2 }.
@@ -134,7 +165,9 @@ val1
<p>
Returns <c>true</c> if map <c><anno>Map</anno></c> contains <c><anno>Key</anno></c> and returns
<c>false</c> if it does not contain the <c><anno>Key</anno></c>.
- The function will fail with an exception if <c><anno>Map</anno></c> is not a Map.
+ </p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
</p>
<p>Example:</p>
<code type="none">
@@ -154,6 +187,9 @@ false</code>
<p>
Returns a complete list of keys, in arbitrary order, which resides within <c><anno>Map</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
+ </p>
<p>Example:</p>
<code type="none">
> Map = #{42 => value_three,1337 => "value two","a" => 1},
@@ -189,6 +225,10 @@ false</code>
Merges two maps into a single map <c><anno>Map3</anno></c>. If two keys exists in both maps the
value in <c><anno>Map1</anno></c> will be superseded by the value in <c><anno>Map2</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map1</anno></c> or
+ <c><anno>Map2</anno></c> is not a map.
+ </p>
<p>Example:</p>
<code type="none">
> Map1 = #{a => "value_one", b => "value_two"},
@@ -222,6 +262,10 @@ false</code>
replaced by value <c><anno>Value</anno></c>. The function returns a new map <c><anno>Map2</anno></c> containing the new association and
the old associations in <c><anno>Map1</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map1</anno></c> is not a map.
+ </p>
+
<p>Example:</p>
<code type="none">
> Map = #{"a" => 1}.
@@ -241,6 +285,9 @@ false</code>
The function removes the <c><anno>Key</anno></c>, if it exists, and its associated value from
<c><anno>Map1</anno></c> and returns a new map <c><anno>Map2</anno></c> without key <c><anno>Key</anno></c>.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map1</anno></c> is not a map.
+ </p>
<p>Example:</p>
<code type="none">
> Map = #{"a" => 1}.
@@ -276,6 +323,9 @@ false</code>
The fuction returns a list of pairs representing the key-value associations of <c><anno>Map</anno></c>,
where the pairs, <c>[{K1,V1}, ..., {Kn,Vn}]</c>, are returned in arbitrary order.
</p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
+ </p>
<p>Example:</p>
<code type="none">
> Map = #{42 => value_three,1337 => "value two","a" => 1},
@@ -291,8 +341,11 @@ false</code>
<p>
If <c><anno>Key</anno></c> exists in <c><anno>Map1</anno></c> the old associated value is
replaced by value <c><anno>Value</anno></c>. The function returns a new map <c><anno>Map2</anno></c> containing
- the new associated value. If <c><anno>Key</anno></c> does not exist in <c><anno>Map1</anno></c> an exception is
- generated.
+ the new associated value.
+ </p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map1</anno></c> is not a map,
+ or with a <c>{badkey,Key}</c> exception if no value is associated with <c><anno>Key</anno></c>.
</p>
<p>Example:</p>
<code type="none">
@@ -308,7 +361,10 @@ false</code>
<fsummary></fsummary>
<desc>
<p>
- Returns a complete list of values, in arbitrary order, contained in map <c>M</c>.
+ Returns a complete list of values, in arbitrary order, contained in map <c>Map</c>.
+ </p>
+ <p>
+ The call will fail with a <c>{badmap,Map}</c> exception if <c><anno>Map</anno></c> is not a map.
</p>
<p>Example:</p>
<code type="none">
diff --git a/lib/stdlib/doc/src/math.xml b/lib/stdlib/doc/src/math.xml
index 43cd20e726..7cfc8a1175 100644
--- a/lib/stdlib/doc/src/math.xml
+++ b/lib/stdlib/doc/src/math.xml
@@ -67,6 +67,7 @@
<name name="atanh" arity="1"/>
<name name="exp" arity="1"/>
<name name="log" arity="1"/>
+ <name name="log2" arity="1"/>
<name name="log10" arity="1"/>
<name name="pow" arity="2"/>
<name name="sqrt" arity="1"/>
diff --git a/lib/stdlib/doc/src/notes.xml b/lib/stdlib/doc/src/notes.xml
index 8582bfc9f9..301a5ee2e8 100644
--- a/lib/stdlib/doc/src/notes.xml
+++ b/lib/stdlib/doc/src/notes.xml
@@ -30,6 +30,41 @@
</header>
<p>This document describes the changes made to the STDLIB application.</p>
+<section><title>STDLIB 2.4</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Behaviour of character types \d, \w and \s has always
+ been to not match characters with value above 255, not
+ 128, i.e. they are limited to ISO-Latin-1 and not ASCII</p>
+ <p>
+ Own Id: OTP-12521</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ c:m/1 now displays the module's MD5 sum.</p>
+ <p>
+ Own Id: OTP-12500</p>
+ </item>
+ <item>
+ <p>
+ Make ets:i/1 handle binary input from IO server.</p>
+ <p>
+ Own Id: OTP-12550</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>STDLIB 2.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/stdlib/doc/src/orddict.xml b/lib/stdlib/doc/src/orddict.xml
index 6d1702bc59..c853b402d4 100644
--- a/lib/stdlib/doc/src/orddict.xml
+++ b/lib/stdlib/doc/src/orddict.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2000</year><year>2013</year>
+ <year>2000</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -48,8 +48,11 @@
<datatypes>
<datatype>
- <name name="orddict"/>
- <desc><p>As returned by new/0.</p></desc>
+ <name name="orddict" n_vars="2"/>
+ <desc><p>Dictionary as returned by <c>new/0</c>.</p></desc>
+ </datatype>
+ <datatype>
+ <name name="orddict" n_vars="0"/>
</datatype>
</datatypes>
@@ -125,8 +128,7 @@
<c><anno>Orddict</anno></c> together with an extra argument <c>Acc</c>
(short for accumulator). <c><anno>Fun</anno></c> must return a new
accumulator which is passed to the next call. <c><anno>Acc0</anno></c> is
- returned if the list is empty. The evaluation order is
- undefined.</p>
+ returned if the list is empty.</p>
</desc>
</func>
<func>
@@ -150,8 +152,7 @@
<fsummary>Map a function over a dictionary</fsummary>
<desc>
<p><c>map</c> calls <c><anno>Fun</anno></c> on successive keys and values
- of <c><anno>Orddict1</anno></c> to return a new value for each key.
- The evaluation order is undefined.</p>
+ of <c><anno>Orddict1</anno></c> to return a new value for each key.</p>
</desc>
</func>
<func>
diff --git a/lib/stdlib/doc/src/proc_lib.xml b/lib/stdlib/doc/src/proc_lib.xml
index f27a974242..9a0ff85038 100644
--- a/lib/stdlib/doc/src/proc_lib.xml
+++ b/lib/stdlib/doc/src/proc_lib.xml
@@ -173,7 +173,7 @@
<name name="init_ack" arity="2"/>
<fsummary>Used by a process when it has started.</fsummary>
<desc>
- <p>This function must used by a process that has been started by
+ <p>This function must be used by a process that has been started by
a <seealso marker="#start/3">start[_link]/3,4,5</seealso>
function. It tells <c><anno>Parent</anno></c> that the process has
initialized itself, has started, or has failed to initialize
diff --git a/lib/stdlib/doc/src/rand.xml b/lib/stdlib/doc/src/rand.xml
new file mode 100644
index 0000000000..178afda5a0
--- /dev/null
+++ b/lib/stdlib/doc/src/rand.xml
@@ -0,0 +1,246 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2015</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ The contents of this file are subject to the Erlang Public License,
+ Version 1.1, (the "License"); you may not use this file except in
+ compliance with the License. You should have received a copy of the
+ Erlang Public License along with this software. If not, it can be
+ retrieved online at http://www.erlang.org/.
+
+ Software distributed under the License is distributed on an "AS IS"
+ basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+ the License for the specific language governing rights and limitations
+ under the License.
+
+ </legalnotice>
+
+ <title>rand</title>
+ <prepared></prepared>
+ <responsible></responsible>
+ <docno>1</docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev>A</rev>
+ <file>rand.xml</file>
+ </header>
+ <module>rand</module>
+ <modulesummary>Pseudo random number generation</modulesummary>
+ <description>
+ <p>Random number generator.</p>
+
+ <p>The module contains several different algorithms and can be
+ extended with more in the future. The current uniform
+ distribution algorithms uses the
+ <url href="http://xorshift.di.unimi.it">
+ scrambled Xorshift algorithms by Sebastiano Vigna</url> and the
+ normal distribution algorithm uses the
+ <url href="http://www.jstatsoft.org/v05/i08">
+ Ziggurat Method by Marsaglia and Tsang</url>.
+ </p>
+
+ <p>The implemented algorithms are:</p>
+ <taglist>
+ <tag><c>exsplus</c></tag> <item>Xorshift116+, 58 bits precision and period of 2^116-1.</item>
+ <tag><c>exs64</c></tag> <item>Xorshift64*, 64 bits precision and a period of 2^64-1.</item>
+ <tag><c>exs1024</c></tag> <item>Xorshift1024*, 64 bits precision and a period of 2^1024-1.</item>
+ </taglist>
+
+ <p>The current default algorithm is <c>exsplus</c>. The default
+ may change in future. If a specific algorithm is required make
+ sure to always use <seealso marker="#seed-1">seed/1</seealso>
+ to initialize the state.
+ </p>
+
+ <p>Every time a random number is requested, a state is used to
+ calculate it and a new state produced. The state can either be
+ implicit or it can be an explicit argument and return value.
+ </p>
+
+ <p>The functions with implicit state use the process dictionary
+ variable <c>rand_seed</c> to remember the current state.</p>
+
+ <p>If a process calls <seealso marker="#uniform-0">uniform/0</seealso> or
+ <seealso marker="#uniform-1">uniform/1</seealso> without
+ setting a seed first, <seealso marker="#seed-1">seed/1</seealso>
+ is called automatically with the default algorithm and creates a
+ non-constant seed.</p>
+
+ <p>The functions with explicit state never use the process
+ dictionary.</p>
+
+ <p>Examples:</p>
+ <pre>
+ %% Simple usage. Creates and seeds the default algorithm
+ %% with a non-constant seed if not already done.
+ R0 = rand:uniform(),
+ R1 = rand:uniform(),
+
+ %% Use a given algorithm.
+ _ = rand:seed(exs1024),
+ R2 = rand:uniform(),
+
+ %% Use a given algorithm with a constant seed.
+ _ = rand:seed(exs1024, {123, 123534, 345345}),
+ R3 = rand:uniform(),
+
+ %% Use the functional api with non-constant seed.
+ S0 = rand:seed_s(exsplus),
+ {R4, S1} = rand:uniform_s(S0),
+
+ %% Create a standard normal deviate.
+ {SND0, S2} = rand:normal_s(S1),
+ </pre>
+
+ <note><p>This random number generator is not cryptographically
+ strong. If a strong cryptographic random number generator is
+ needed, use one of functions in the
+ <seealso marker="crypto:crypto">crypto</seealso>
+ module, for example <c>crypto:rand_bytes/1</c>.</p></note>
+ </description>
+ <datatypes>
+ <datatype>
+ <name name="alg"/>
+ </datatype>
+
+ <datatype>
+ <name name="state"/>
+ <desc><p>Algorithm dependent state.</p></desc>
+ </datatype>
+
+ <datatype>
+ <name name="export_state"/>
+ <desc><p>Algorithm dependent state which can be printed or saved to file.</p></desc>
+ </datatype>
+ </datatypes>
+
+ <funcs>
+ <func>
+ <name name="seed" arity="1"/>
+ <fsummary>Seed random number generator</fsummary>
+ <desc>
+ <marker id="seed-1"/>
+ <p>Seeds random number generation with the given algorithm and time dependent
+ data if <anno>AlgOrExpState</anno> is an algorithm.</p>
+ <p>Otherwise recreates the exported seed in the process
+ dictionary, and returns the state.
+ <em>See also:</em> <seealso marker="#export_seed-0">export_seed/0</seealso>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="seed_s" arity="1"/>
+ <fsummary>Seed random number generator</fsummary>
+ <desc>
+ <p>Seeds random number generation with the given algorithm and time dependent
+ data if <anno>AlgOrExpState</anno> is an algorithm.</p>
+ <p>Otherwise recreates the exported seed and returns the state.
+ <em>See also:</em> <seealso marker="#export_seed-0">export_seed/0</seealso>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="seed" arity="2"/>
+ <fsummary>Seed the random number generation</fsummary>
+ <desc>
+ <p>Seeds random number generation with the given algorithm and
+ integers in the process dictionary and returns
+ the state.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="seed_s" arity="2"/>
+ <fsummary>Seed the random number generation</fsummary>
+ <desc>
+ <p>Seeds random number generation with the given algorithm and
+ integers and returns the state.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="export_seed" arity="0"/>
+ <fsummary>Export the random number generation state</fsummary>
+ <desc><marker id="export_seed-0"/>
+ <p>Returns the random number state in an external format.
+ To be used with <seealso marker="#seed-1">seed/1</seealso>.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="export_seed_s" arity="1"/>
+ <fsummary>Export the random number generation state</fsummary>
+ <desc><marker id="export_seed_s-1"/>
+ <p>Returns the random number generator state in an external format.
+ To be used with <seealso marker="#seed-1">seed/1</seealso>.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="uniform" arity="0"/>
+ <fsummary>Return a random float</fsummary>
+ <desc>
+ <marker id="uniform-0"/>
+ <p>Returns a random float uniformly distributed in the value
+ range <c>0.0 &lt; <anno>X</anno> &lt; 1.0 </c> and
+ updates the state in the process dictionary.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="uniform_s" arity="1"/>
+ <fsummary>Return a random float</fsummary>
+ <desc>
+ <p>Given a state, <c>uniform_s/1</c> returns a random float
+ uniformly distributed in the value range <c>0.0 &lt;
+ <anno>X</anno> &lt; 1.0</c> and a new state.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="uniform" arity="1"/>
+ <fsummary>Return a random integer</fsummary>
+ <desc>
+ <marker id="uniform-1"/>
+ <p>Given an integer <c><anno>N</anno> >= 1</c>,
+ <c>uniform/1</c> returns a random integer uniformly
+ distributed in the value range
+ <c>1 &lt;= <anno>X</anno> &lt;= <anno>N</anno></c> and
+ updates the state in the process dictionary.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="uniform_s" arity="2"/>
+ <fsummary>Return a random integer</fsummary>
+ <desc>
+ <p>Given an integer <c><anno>N</anno> >= 1</c> and a state,
+ <c>uniform_s/2</c> returns a random integer uniformly
+ distributed in the value range <c>1 &lt;= <anno>X</anno> &lt;=
+ <anno>N</anno></c> and a new state.</p>
+ </desc>
+ </func>
+
+ <func>
+ <name name="normal" arity="0"/>
+ <fsummary>Return a standard normal distributed random float</fsummary>
+ <desc>
+ <p>Returns a standard normal deviate float (that is, the mean
+ is 0 and the standard deviation is 1) and updates the state in
+ the process dictionary.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="normal_s" arity="1"/>
+ <fsummary>Return a standard normal distributed random float</fsummary>
+ <desc>
+ <p>Given a state, <c>normal_s/1</c> returns a standard normal
+ deviate float (that is, the mean is 0 and the standard
+ deviation is 1) and a new state.</p>
+ </desc>
+ </func>
+
+ </funcs>
+</erlref>
diff --git a/lib/stdlib/doc/src/random.xml b/lib/stdlib/doc/src/random.xml
index e001058e19..e475cda23d 100644
--- a/lib/stdlib/doc/src/random.xml
+++ b/lib/stdlib/doc/src/random.xml
@@ -48,6 +48,9 @@
<p>It should be noted that this random number generator is not cryptographically
strong. If a strong cryptographic random number generator is needed for
example <c>crypto:rand_bytes/1</c> could be used instead.</p>
+ <note><p>The new and improved <seealso
+ marker="stdlib:rand">rand</seealso> module should be used
+ instead of this module.</p></note>
</description>
<datatypes>
<datatype>
@@ -70,12 +73,11 @@
<desc>
<p>Seeds random number generation with integer values in the process
dictionary, and returns the old state.</p>
- <p>One way of obtaining a seed is to use the BIF <c>now/0</c>:</p>
+ <p>One easy way of obtaining a unique value to seed with is to:</p>
<code type="none">
- ...
- {A1,A2,A3} = now(),
- random:seed(A1, A2, A3),
- ...</code>
+ random:seed(<seealso marker="erts:erlang#phash2/1">erlang:phash2</seealso>([<seealso marker="erts:erlang#node/0">node()</seealso>]),
+ <seealso marker="erts:erlang#monotonic_time/0">erlang:monotonic_time()</seealso>,
+ <seealso marker="erts:erlang#unique_integer/0">erlang:unique_integer()</seealso>)</code>
</desc>
</func>
<func>
diff --git a/lib/stdlib/doc/src/re.xml b/lib/stdlib/doc/src/re.xml
index a1833f6a51..5af1468e9b 100644
--- a/lib/stdlib/doc/src/re.xml
+++ b/lib/stdlib/doc/src/re.xml
@@ -150,7 +150,11 @@ This option makes it possible to include comments inside complicated patterns. N
<tag><c>no_start_optimize</c></tag>
<item>This option disables optimization that may malfunction if "Special start-of-pattern items" are present in the regular expression. A typical example would be when matching "DEFABC" against "(*COMMIT)ABC", where the start optimization of PCRE would skip the subject up to the "A" and would never realize that the (*COMMIT) instruction should have made the matching fail. This option is only relevant if you use "start-of-pattern items", as discussed in the section "PCRE regular expression details" below.</item>
<tag><c>ucp</c></tag>
- <item>Specifies that Unicode Character Properties should be used when resolving \B, \b, \D, \d, \S, \s, \Wand \w. Without this flag, only ISO-Latin-1 properties are used. Using Unicode properties hurts performance, but is semantically correct when working with Unicode characters beyond the ISO-Latin-1 range.</item>
+ <item>Specifies that Unicode Character Properties should be used when
+ resolving \B, \b, \D, \d, \S, \s, \W and \w. Without this flag, only
+ ISO-Latin-1 properties are used. Using Unicode properties hurts
+ performance, but is semantically correct when working with Unicode
+ characters beyond the ISO-Latin-1 range.</item>
<tag><c>never_utf</c></tag>
<item>Specifies that the (*UTF) and/or (*UTF8) "start-of-pattern items" are forbidden. This flag can not be combined with <c>unicode</c>. Useful if ISO-Latin-1 patterns from an external source are to be compiled.</item>
</taglist>
@@ -966,7 +970,7 @@ appearance causes an error.
</quote>
<p>This has the same effect as setting the <c>ucp</c> option: it causes sequences
such as \d and \w to use Unicode properties to determine character types,
-instead of recognizing only characters with codes less than 128 via a lookup
+instead of recognizing only characters with codes less than 256 via a lookup
table.
</p>
@@ -1307,7 +1311,8 @@ By default, the definition of letters and digits is controlled by PCRE's
low-valued character tables, in Erlang's case (and without the <c>unicode</c> option),
the ISO-Latin-1 character set.</p>
-<p>By default, in <c>unicode</c> mode, characters with values greater than 128 never match
+<p>By default, in <c>unicode</c> mode, characters with values greater than 255,
+i.e. all characters outside the ISO-Latin-1 character set, never match
\d, \s, or \w, and always match \D, \S, and \W. These sequences retain
their original meanings from before UTF support was available, mainly for
efficiency reasons. However, if the <c>ucp</c> option is set, the behaviour is changed so that Unicode
@@ -1954,10 +1959,10 @@ can be included in a class as a literal string of data units, or by using the
upper case and lower case versions, so for example, a caseless [aeiou] matches
"A" as well as "a", and a caseless [^aeiou] does not match "A", whereas a
caseful version would. In a UTF mode, PCRE always understands the concept of
-case for characters whose values are less than 128, so caseless matching is
+case for characters whose values are less than 256, so caseless matching is
always possible. For characters with higher values, the concept of case is
supported if PCRE is compiled with Unicode property support, but not otherwise.
-If you want to use caseless matching in a UTF mode for characters 128 and
+If you want to use caseless matching in a UTF mode for characters 256 and
above, you must ensure that PCRE is compiled with Unicode property support as
well as with UTF support.</p>
@@ -1989,7 +1994,7 @@ matches the letters in either case. For example, [W-c] is equivalent to
[][\\^_`wxyzabc], matched caselessly, and in a non-UTF mode, if character
tables for a French locale are in use, [\xc8-\xcb] matches accented E
characters in both cases. In UTF modes, PCRE supports the concept of case for
-characters with values greater than 128 only when it is compiled with Unicode
+characters with values greater than 255 only when it is compiled with Unicode
property support.</p>
<p>The character escape sequences \d, \D, \h, \H, \p, \P, \s, \S, \v,
@@ -2062,7 +2067,7 @@ by a ^ character after the colon. For example,</p>
syntax [.ch.] and [=ch=] where "ch" is a "collating element", but these are not
supported, and an error is given if they are encountered.</p>
-<p>By default, in UTF modes, characters with values greater than 128 do not match
+<p>By default, in UTF modes, characters with values greater than 255 do not match
any of the POSIX character classes. However, if the PCRE_UCP option is passed
to <b>pcre_compile()</b>, some of the classes are changed so that Unicode
character properties are used. This is achieved by replacing the POSIX classes
@@ -2081,7 +2086,7 @@ by other sequences, as follows:</p>
<p>Negated versions, such as [:^alpha:] use \P instead of \p. The other POSIX
classes are unchanged, and match only characters with code points less than
-128.</p>
+256.</p>
</section>
diff --git a/lib/stdlib/doc/src/ref_man.xml b/lib/stdlib/doc/src/ref_man.xml
index ea4009dc3e..eee4a68ca1 100644
--- a/lib/stdlib/doc/src/ref_man.xml
+++ b/lib/stdlib/doc/src/ref_man.xml
@@ -4,7 +4,7 @@
<application xmlns:xi="http://www.w3.org/2001/XInclude">
<header>
<copyright>
- <year>1996</year><year>2013</year>
+ <year>1996</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -45,6 +45,7 @@
<xi:include href="digraph.xml"/>
<xi:include href="digraph_utils.xml"/>
<xi:include href="epp.xml"/>
+ <xi:include href="erl_anno.xml"/>
<xi:include href="erl_eval.xml"/>
<xi:include href="erl_expand_records.xml"/>
<xi:include href="erl_id_trans.xml"/>
@@ -78,6 +79,7 @@
<xi:include href="proplists.xml"/>
<xi:include href="qlc.xml"/>
<xi:include href="queue.xml"/>
+ <xi:include href="rand.xml"/>
<xi:include href="random.xml"/>
<xi:include href="re.xml"/>
<xi:include href="sets.xml"/>
diff --git a/lib/stdlib/doc/src/sets.xml b/lib/stdlib/doc/src/sets.xml
index c5b8dce4b7..4a31648f8f 100644
--- a/lib/stdlib/doc/src/sets.xml
+++ b/lib/stdlib/doc/src/sets.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2000</year><year>2014</year>
+ <year>2000</year><year>2015</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -65,7 +65,7 @@
</func>
<func>
<name name="is_set" arity="1"/>
- <fsummary>Test for an <c>Set</c></fsummary>
+ <fsummary>Test for a <c>Set</c></fsummary>
<desc>
<p>Returns <c>true</c> if <c><anno>Set</anno></c> is a set of
elements, otherwise <c>false</c>.</p>
@@ -80,21 +80,22 @@
</func>
<func>
<name name="to_list" arity="1"/>
- <fsummary>Convert an <c>Set</c>into a list</fsummary>
+ <fsummary>Convert a <c>Set</c>into a list</fsummary>
<desc>
- <p>Returns the elements of <c><anno>Set</anno></c> as a list.</p>
+ <p>Returns the elements of <c><anno>Set</anno></c> as a list.
+ The order of the returned elements is undefined.</p>
</desc>
</func>
<func>
<name name="from_list" arity="1"/>
- <fsummary>Convert a list into an <c>Set</c></fsummary>
+ <fsummary>Convert a list into a <c>Set</c></fsummary>
<desc>
- <p>Returns an set of the elements in <c><anno>List</anno></c>.</p>
+ <p>Returns a set of the elements in <c><anno>List</anno></c>.</p>
</desc>
</func>
<func>
<name name="is_element" arity="2"/>
- <fsummary>Test for membership of an <c>Set</c></fsummary>
+ <fsummary>Test for membership of a <c>Set</c></fsummary>
<desc>
<p>Returns <c>true</c> if <c><anno>Element</anno></c> is an element of
<c><anno>Set</anno></c>, otherwise <c>false</c>.</p>
@@ -102,7 +103,7 @@
</func>
<func>
<name name="add_element" arity="2"/>
- <fsummary>Add an element to an <c>Set</c></fsummary>
+ <fsummary>Add an element to a <c>Set</c></fsummary>
<desc>
<p>Returns a new set formed from <c><anno>Set1</anno></c> with
<c><anno>Element</anno></c> inserted.</p>
@@ -110,7 +111,7 @@
</func>
<func>
<name name="del_element" arity="2"/>
- <fsummary>Remove an element from an <c>Set</c></fsummary>
+ <fsummary>Remove an element from a <c>Set</c></fsummary>
<desc>
<p>Returns <c><anno>Set1</anno></c>, but with <c><anno>Element</anno></c> removed.</p>
</desc>
@@ -175,7 +176,8 @@
<fsummary>Fold over set elements</fsummary>
<desc>
<p>Fold <c><anno>Function</anno></c> over every element in <c><anno>Set</anno></c>
- returning the final value of the accumulator.</p>
+ returning the final value of the accumulator.
+ The evaluation order is undefined.</p>
</desc>
</func>
<func>
diff --git a/lib/stdlib/doc/src/specs.xml b/lib/stdlib/doc/src/specs.xml
index fd77b52da6..0418bf7b22 100644
--- a/lib/stdlib/doc/src/specs.xml
+++ b/lib/stdlib/doc/src/specs.xml
@@ -11,6 +11,7 @@
<xi:include href="../specs/specs_digraph.xml"/>
<xi:include href="../specs/specs_digraph_utils.xml"/>
<xi:include href="../specs/specs_epp.xml"/>
+ <xi:include href="../specs/specs_erl_anno.xml"/>
<xi:include href="../specs/specs_erl_eval.xml"/>
<xi:include href="../specs/specs_erl_expand_records.xml"/>
<xi:include href="../specs/specs_erl_id_trans.xml"/>
@@ -44,6 +45,7 @@
<xi:include href="../specs/specs_proplists.xml"/>
<xi:include href="../specs/specs_qlc.xml"/>
<xi:include href="../specs/specs_queue.xml"/>
+ <xi:include href="../specs/specs_rand.xml"/>
<xi:include href="../specs/specs_random.xml"/>
<xi:include href="../specs/specs_re.xml"/>
<xi:include href="../specs/specs_sets.xml"/>
diff --git a/lib/stdlib/doc/src/supervisor.xml b/lib/stdlib/doc/src/supervisor.xml
index ffac1c0bd7..6ff477a42d 100644
--- a/lib/stdlib/doc/src/supervisor.xml
+++ b/lib/stdlib/doc/src/supervisor.xml
@@ -386,9 +386,15 @@
added to the supervisor and the function returns the same
value.</p>
<p>If the child process start function returns <c>ignore</c>,
- the child specification is added to the supervisor, the pid
- is set to <c>undefined</c>, and the function returns
- <c>{ok,undefined}</c>.</p>
+ the child specification is added to the supervisor (unless the
+ supervisor is a <c>simple_one_for_one</c> supervisor, see below),
+ the pid is set to <c>undefined</c> and the function returns
+ <c>{ok,undefined}</c>.
+ </p>
+ <p>In the case of a <c>simple_one_for_one</c> supervisor, when a child
+ process start function returns <c>ignore</c> the functions returns
+ <c>{ok,undefined}</c> and no child is added to the supervisor.
+ </p>
<p>If the child process start function returns an error tuple or
an erroneous value, or if it fails, the child specification is
discarded, and the function returns <c>{error,Error}</c> where
diff --git a/lib/stdlib/doc/src/timer.xml b/lib/stdlib/doc/src/timer.xml
index eca9a72d36..791a29689e 100644
--- a/lib/stdlib/doc/src/timer.xml
+++ b/lib/stdlib/doc/src/timer.xml
@@ -217,12 +217,14 @@
</func>
<func>
<name name="now_diff" arity="2"/>
- <fsummary>Calculate time difference between <c>now/0</c>timestamps</fsummary>
+ <fsummary>Calculate time difference between timestamps</fsummary>
<type_desc variable="Tdiff">In microseconds</type_desc>
<desc>
<p>Calculates the time difference <c><anno>Tdiff</anno> = <anno>T2</anno> - <anno>T1</anno></c> in
- <em>microseconds</em>, where <c><anno>T1</anno></c> and <c><anno>T2</anno></c> probably
- are timestamp tuples returned from <c>erlang:now/0</c>.</p>
+ <em>microseconds</em>, where <c><anno>T1</anno></c> and <c><anno>T2</anno></c>
+ are timestamp tuples on the same format as returned from
+ <seealso marker="erts:erlang#timestamp/0"><c>erlang:timestamp/0</c></seealso>,
+ or <seealso marker="kernel:os#timestamp/0"><c>os:timestamp/0</c></seealso>.</p>
</desc>
</func>
<func>
@@ -234,7 +236,7 @@
</func>
<func>
<name name="minutes" arity="1"/>
- <fsummary>Converts <c>Minutes</c>to <c>Milliseconds</c>.</fsummary>
+ <fsummary>Converts <c>Minutes</c> to <c>Milliseconds</c>.</fsummary>
<desc>
<p>Return the number of milliseconds in <c><anno>Minutes</anno></c>.</p>
</desc>
diff --git a/lib/stdlib/doc/src/zip.xml b/lib/stdlib/doc/src/zip.xml
index 48b376743d..d201e81a79 100644
--- a/lib/stdlib/doc/src/zip.xml
+++ b/lib/stdlib/doc/src/zip.xml
@@ -135,6 +135,12 @@
<p>These options are described in <seealso marker="#zip_options">create/3</seealso>.</p>
</desc>
</datatype>
+ <datatype>
+ <name name="handle"/>
+ <desc>
+ <p>As returned by <seealso marker="#zip_open/2">zip_open/2</seealso>.</p>
+ </desc>
+ </datatype>
</datatypes>
<funcs>
<func>
@@ -430,6 +436,8 @@
means that subsequently reading files from the archive will be
faster than unzipping files one at a time with <c>unzip</c>.</p>
<p>The archive must be closed with <c>zip_close/1</c>.</p>
+ <p>The <c><anno>ZipHandle</anno></c> will be closed if the
+ process which originally opened the archive dies.</p>
</desc>
</func>
<func>
diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile
index 1b3744b6fb..55bda60da5 100644
--- a/lib/stdlib/src/Makefile
+++ b/lib/stdlib/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1996-2013. All Rights Reserved.
+# Copyright Ericsson AB 1996-2015. All Rights Reserved.
#
# The contents of this file are subject to the Erlang Public License,
# Version 1.1, (the "License"); you may not use this file except in
@@ -58,6 +58,7 @@ MODULES= \
edlin \
edlin_expand \
epp \
+ erl_anno \
erl_bits \
erl_compile \
erl_eval \
@@ -104,6 +105,7 @@ MODULES= \
qlc \
qlc_pt \
queue \
+ rand \
random \
sets \
shell \
@@ -168,6 +170,7 @@ docs:
# specifications.
primary_bootstrap_compiler: \
$(BOOTSTRAP_COMPILER)/ebin/epp.beam \
+ $(BOOTSTRAP_COMPILER)/ebin/erl_anno.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_scan.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_parse.beam \
$(BOOTSTRAP_COMPILER)/ebin/erl_lint.beam \
diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl
index 1a7b7d5a5e..4a6b489204 100644
--- a/lib/stdlib/src/beam_lib.erl
+++ b/lib/stdlib/src/beam_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -652,7 +652,13 @@ chunk_to_data(abstract_code=Id, Chunk, File, _Cs, AtomTable, Mod) ->
{'EXIT', _} ->
error({invalid_chunk, File, chunk_name_to_id(Id, File)});
Term ->
- {AtomTable, {Id, Term}}
+ try
+ {AtomTable, {Id, anno_from_term(Term)}}
+ catch
+ _:_ ->
+ error({invalid_chunk, File,
+ chunk_name_to_id(Id, File)})
+ end
end
end;
chunk_to_data(atoms=Id, _Chunk, _File, Cs, AtomTable0, _Mod) ->
@@ -878,7 +884,22 @@ decrypt_abst(Type, Module, File, Id, AtomTable, Bin) ->
decrypt_abst_1({Type,Key,IVec,_BlockSize}, Bin) ->
ok = start_crypto(),
NewBin = crypto:block_decrypt(Type, Key, IVec, Bin),
- binary_to_term(NewBin).
+ Term = binary_to_term(NewBin),
+ anno_from_term(Term).
+
+anno_from_term({raw_abstract_v1, Forms}) ->
+ {raw_abstract_v1, anno_from_forms(Forms)};
+anno_from_term({Tag, Forms}) when Tag =:= abstract_v1; Tag =:= abstract_v2 ->
+ try {Tag, anno_from_forms(Forms)}
+ catch
+ _:_ ->
+ {Tag, Forms}
+ end;
+anno_from_term(T) ->
+ T.
+
+anno_from_forms(Forms) ->
+ [erl_parse:anno_from_term(Form) || Form <- Forms].
start_crypto() ->
case crypto:start() of
diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl
index b94829892d..de26784ead 100644
--- a/lib/stdlib/src/binary.erl
+++ b/lib/stdlib/src/binary.erl
@@ -89,9 +89,9 @@ copy(_, _) ->
decode_unsigned(_) ->
erlang:nif_error(undef).
--spec decode_unsigned(Subject, Endianess) -> Unsigned when
+-spec decode_unsigned(Subject, Endianness) -> Unsigned when
Subject :: binary(),
- Endianess :: big | little,
+ Endianness :: big | little,
Unsigned :: non_neg_integer().
decode_unsigned(_, _) ->
@@ -103,9 +103,9 @@ decode_unsigned(_, _) ->
encode_unsigned(_) ->
erlang:nif_error(undef).
--spec encode_unsigned(Unsigned, Endianess) -> binary() when
+-spec encode_unsigned(Unsigned, Endianness) -> binary() when
Unsigned :: non_neg_integer(),
- Endianess :: big | little.
+ Endianness :: big | little.
encode_unsigned(_, _) ->
erlang:nif_error(undef).
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index c2256c0cf9..d5b24d3c32 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -27,7 +27,7 @@
lc_batch/0, lc_batch/1,
i/3,pid/3,m/0,m/1,
bt/1, q/0,
- erlangrc/0,erlangrc/1,bi/1, flush/0, regs/0,
+ erlangrc/0,erlangrc/1,bi/1, flush/0, regs/0, uptime/0,
nregs/0,pwd/0,ls/0,ls/1,cd/1,memory/1,memory/0, xm/1]).
-export([display_info/1]).
@@ -65,6 +65,7 @@ help() ->
"q() -- quit - shorthand for init:stop()\n"
"regs() -- information about registered processes\n"
"nregs() -- information about all registered processes\n"
+ "uptime() -- print node uptime\n"
"xm(M) -- cross reference check a module\n"
"y(File) -- generate a Yecc parser\n">>).
@@ -509,9 +510,12 @@ m(M) ->
{exports,E} = lists:keyfind(exports, 1, L),
Time = get_compile_time(L),
COpts = get_compile_options(L),
- format("Module ~w compiled: ",[M]), print_time(Time),
- format("Compiler options: ~p~n", [COpts]),
+ format("Module: ~w~n", [M]),
+ print_md5(L),
+ format("Compiled: "),
+ print_time(Time),
print_object_file(M),
+ format("Compiler options: ~p~n", [COpts]),
format("Exports: ~n",[]), print_exports(keysort(1, E)).
print_object_file(Mod) ->
@@ -522,6 +526,12 @@ print_object_file(Mod) ->
ignore
end.
+print_md5(L) ->
+ case lists:keyfind(md5, 1, L) of
+ {md5,<<MD5:128>>} -> io:format("MD5: ~.16b~n",[MD5]);
+ _ -> ok
+ end.
+
get_compile_time(L) ->
case get_compile_info(L, time) of
{ok,Val} -> Val;
@@ -569,8 +579,8 @@ split_print_exports([{F1, A1}|T1], [{F2, A2} | T2]) ->
split_print_exports([], []) -> ok.
print_time({Year,Month,Day,Hour,Min,_Secs}) ->
- format("Date: ~s ~w ~w, ", [month(Month),Day,Year]),
- format("Time: ~.2.0w.~.2.0w~n", [Hour,Min]);
+ format("~s ~w ~w, ", [month(Month),Day,Year]),
+ format("~.2.0w:~.2.0w~n", [Hour,Min]);
print_time(notime) ->
format("No compile time info available~n",[]).
@@ -765,6 +775,26 @@ memory() -> erlang:memory().
memory(TypeSpec) -> erlang:memory(TypeSpec).
%%
+%% uptime/0
+%%
+
+-spec uptime() -> 'ok'.
+
+uptime() ->
+ io:format("~s~n", [uptime(get_uptime())]).
+
+uptime({D, {H, M, S}}) ->
+ lists:flatten(
+ [[ io_lib:format("~p days, ", [D]) || D > 0 ],
+ [ io_lib:format("~p hours, ", [H]) || D+H > 0 ],
+ [ io_lib:format("~p minutes and ", [M]) || D+H+M > 0 ],
+ io_lib:format("~p seconds", [S])]).
+
+get_uptime() ->
+ {UpTime, _} = erlang:statistics(wall_clock),
+ calendar:seconds_to_daystime(UpTime div 1000).
+
+%%
%% Cross Reference Check
%%
%%-spec xm(module() | file:filename()) -> xref:m/1 return
diff --git a/lib/stdlib/src/calendar.erl b/lib/stdlib/src/calendar.erl
index 0320e0cd0e..d08001c933 100644
--- a/lib/stdlib/src/calendar.erl
+++ b/lib/stdlib/src/calendar.erl
@@ -299,7 +299,7 @@ local_time_to_universal_time_dst(DateTime) ->
%% now_to_universal_time(Now)
%% now_to_datetime(Now)
%%
-%% Convert from now() to UTC.
+%% Convert from erlang:timestamp() to UTC.
%%
%% Args: Now = now(); now() = {MegaSec, Sec, MilliSec}, MegaSec = Sec
%% = MilliSec = integer()
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index a4bd45ea19..5d365ac962 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1963,7 +1963,7 @@ do_safe_fixtable(Head, Pid, true) ->
case Head#head.fixed of
false ->
link(Pid),
- Fixed = {erlang:now(), [{Pid, 1}]},
+ Fixed = {utime_now(), [{Pid, 1}]},
Ftab = dets_utils:get_freelists(Head),
Head#head{fixed = Fixed, freelists = {Ftab, Ftab}};
{TimeStamp, Counters} ->
@@ -3088,14 +3088,14 @@ update_cache(Head, ToAdd) ->
{Head1, Found, []};
Cache#cache.wrtime =:= undefined ->
%% Empty cache. Schedule a delayed write.
- Now = now(), Me = self(),
+ Now = time_now(), Me = self(),
Call = ?DETS_CALL(Me, {delayed_write, Now}),
erlang:send_after(Cache#cache.delay, Me, Call),
{Head1#head{cache = NewCache#cache{wrtime = Now}}, Found, []};
Size0 =:= 0 ->
%% Empty cache that has been written after the
%% currently scheduled delayed write.
- {Head1#head{cache = NewCache#cache{wrtime = now()}}, Found, []};
+ {Head1#head{cache = NewCache#cache{wrtime = time_now()}}, Found, []};
true ->
%% Cache is not empty, delayed write has been scheduled.
{Head1, Found, []}
@@ -3158,11 +3158,7 @@ delayed_write(Head, WrTime) ->
Head#head{cache = NewCache};
true ->
%% Yes, schedule a new delayed write.
- {MS1,S1,M1} = WrTime,
- {MS2,S2,M2} = LastWrTime,
- WrT = M1+1000000*(S1+1000000*MS1),
- LastWrT = M2+1000000*(S2+1000000*MS2),
- When = round((LastWrT - WrT)/1000), Me = self(),
+ When = round((LastWrTime - WrTime)/1000), Me = self(),
Call = ?DETS_CALL(Me, {delayed_write, LastWrTime}),
erlang:send_after(When, Me, Call),
Head
@@ -3274,6 +3270,16 @@ err(Error) ->
Error
end.
+-compile({inline, [time_now/0]}).
+time_now() ->
+ erlang:monotonic_time(1000000).
+
+-compile({inline, [utime_now/0]}).
+utime_now() ->
+ Time = time_now(),
+ UniqueCounter = erlang:unique_integer([monotonic]),
+ {Time, UniqueCounter}.
+
%%%%%%%%%%%%%%%%% DEBUG functions %%%%%%%%%%%%%%%%
file_info(FileName) ->
diff --git a/lib/stdlib/src/dets_utils.erl b/lib/stdlib/src/dets_utils.erl
index 6c176ad513..26e22dbd5b 100644
--- a/lib/stdlib/src/dets_utils.erl
+++ b/lib/stdlib/src/dets_utils.erl
@@ -447,7 +447,7 @@ reset_cache(C) ->
WrTime =:= undefined ->
WrTime;
true ->
- now()
+ erlang:monotonic_time(1000000)
end,
PK = family(C#cache.cache),
NewC = C#cache{cache = [], csize = 0, inserts = 0, wrtime = NewWrTime},
diff --git a/lib/stdlib/src/dict.erl b/lib/stdlib/src/dict.erl
index cf8fb3114a..5a9f63c5e2 100644
--- a/lib/stdlib/src/dict.erl
+++ b/lib/stdlib/src/dict.erl
@@ -417,6 +417,8 @@ on_bucket(F, T, Slot) ->
%% could have implemented map and filter using fold but these are
%% faster. We hope!
+fold_dict(F, Acc, #dict{size=0}) when is_function(F, 3) ->
+ Acc;
fold_dict(F, Acc, D) ->
Segs = D#dict.segs,
fold_segs(F, Acc, Segs, tuple_size(Segs)).
@@ -434,6 +436,8 @@ fold_bucket(F, Acc, [?kv(Key,Val)|Bkt]) ->
fold_bucket(F, F(Key, Val, Acc), Bkt);
fold_bucket(F, Acc, []) when is_function(F, 3) -> Acc.
+map_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
map_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
Segs1 = map_seg_list(F, Segs0),
@@ -453,6 +457,8 @@ map_bucket(F, [?kv(Key,Val)|Bkt]) ->
[?kv(Key,F(Key, Val))|map_bucket(F, Bkt)];
map_bucket(F, []) when is_function(F, 2) -> [].
+filter_dict(F, #dict{size=0} = Dict) when is_function(F, 2) ->
+ Dict;
filter_dict(F, D) ->
Segs0 = tuple_to_list(D#dict.segs),
{Segs1,Fc} = filter_seg_list(F, Segs0, [], 0),
diff --git a/lib/stdlib/src/digraph.erl b/lib/stdlib/src/digraph.erl
index 0c21271529..1f8caa88a4 100644
--- a/lib/stdlib/src/digraph.erl
+++ b/lib/stdlib/src/digraph.erl
@@ -36,7 +36,7 @@
-export([get_short_path/3, get_short_cycle/2]).
--export_type([graph/0, d_type/0, vertex/0, edge/0]).
+-export_type([graph/0, d_type/0, vertex/0, edge/0, label/0]).
-record(digraph, {vtab = notable :: ets:tab(),
etab = notable :: ets:tab(),
diff --git a/lib/stdlib/src/edlin.erl b/lib/stdlib/src/edlin.erl
index b3bc5f6d92..362669545e 100644
--- a/lib/stdlib/src/edlin.erl
+++ b/lib/stdlib/src/edlin.erl
@@ -21,7 +21,7 @@
%% A simple Emacs-like line editor.
%% About Latin-1 characters: see the beginning of erl_scan.erl.
--export([init/0,start/1,start/2,edit_line/2,prefix_arg/1]).
+-export([init/0,init/1,start/1,start/2,edit_line/2,prefix_arg/1]).
-export([erase_line/1,erase_inp/1,redraw_line/1]).
-export([length_before/1,length_after/1,prompt/1]).
-export([current_line/1, current_chars/1]).
@@ -44,6 +44,20 @@
init() ->
put(kill_buffer, []).
+init(Pid) ->
+ %% copy the kill_buffer from the process Pid
+ CopiedKillBuf =
+ case erlang:process_info(Pid, dictionary) of
+ {dictionary,Dict} ->
+ case proplists:get_value(kill_buffer, Dict) of
+ undefined -> [];
+ Buf -> Buf
+ end;
+ undefined ->
+ []
+ end,
+ put(kill_buffer, CopiedKillBuf).
+
%% start(Prompt)
%% edit(Characters, Continuation)
%% Return
diff --git a/lib/stdlib/src/epp.erl b/lib/stdlib/src/epp.erl
index 5f8637c118..7866b5f792 100644
--- a/lib/stdlib/src/epp.erl
+++ b/lib/stdlib/src/epp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -158,7 +158,7 @@ scan_erl_form(Epp) ->
{'ok', AbsForm} | {'eof', Line} | {error, ErrorInfo} when
Epp :: epp_handle(),
AbsForm :: erl_parse:abstract_form(),
- Line :: erl_scan:line(),
+ Line :: erl_anno:line(),
ErrorInfo :: erl_scan:error_info() | erl_parse:error_info().
parse_erl_form(Epp) ->
@@ -220,7 +220,7 @@ format_error(E) -> file:format_error(E).
IncludePath :: [DirectoryName :: file:name()],
Form :: erl_parse:abstract_form() | {'error', ErrorInfo} | {'eof',Line},
PredefMacros :: macros(),
- Line :: erl_scan:line(),
+ Line :: erl_anno:line(),
ErrorInfo :: erl_scan:error_info() | erl_parse:error_info(),
OpenError :: file:posix() | badarg | system_limit.
@@ -235,7 +235,7 @@ parse_file(Ifile, Path, Predefs) ->
{'default_encoding', DefEncoding :: source_encoding()} |
'extra'],
Form :: erl_parse:abstract_form() | {'error', ErrorInfo} | {'eof',Line},
- Line :: erl_scan:line(),
+ Line :: erl_anno:line(),
ErrorInfo :: erl_scan:error_info() | erl_parse:error_info(),
Extra :: [{'encoding', source_encoding() | 'none'}],
OpenError :: file:posix() | badarg | system_limit.
@@ -257,7 +257,7 @@ parse_file(Ifile, Options) ->
-spec parse_file(Epp) -> [Form] when
Epp :: epp_handle(),
Form :: erl_parse:abstract_form() | {'error', ErrorInfo} | {'eof',Line},
- Line :: erl_scan:line(),
+ Line :: erl_anno:line(),
ErrorInfo :: erl_scan:error_info() | erl_parse:error_info().
parse_file(Epp) ->
@@ -280,7 +280,7 @@ parse_file(Epp) ->
{error,E} ->
[{error,E}|parse_file(Epp)];
{eof,Location} ->
- [{eof,Location}]
+ [{eof,erl_anno:new(Location)}]
end.
-spec default_encoding() -> source_encoding().
@@ -547,7 +547,8 @@ init_server(Pid, Name, Options, St0) ->
path=Path, macs=Ms1,
default_encoding=DefEncoding},
From = wait_request(St),
- enter_file_reply(From, Name, AtLocation, AtLocation),
+ Anno = erl_anno:new(AtLocation),
+ enter_file_reply(From, Name, Anno, AtLocation, code),
wait_req_scan(St);
{error,E} ->
epp_reply(Pid, {error,E})
@@ -559,15 +560,16 @@ init_server(Pid, Name, Options, St0) ->
predef_macros(File) ->
Machine = list_to_atom(erlang:system_info(machine)),
+ Anno = line1(),
dict:from_list([
- {{atom,'FILE'}, {none,[{string,1,File}]}},
- {{atom,'LINE'}, {none,[{integer,1,1}]}},
+ {{atom,'FILE'}, {none,[{string,Anno,File}]}},
+ {{atom,'LINE'}, {none,[{integer,Anno,1}]}},
{{atom,'MODULE'}, undefined},
{{atom,'MODULE_STRING'}, undefined},
{{atom,'BASE_MODULE'}, undefined},
{{atom,'BASE_MODULE_STRING'}, undefined},
- {{atom,'MACHINE'}, {none,[{atom,1,Machine}]}},
- {{atom,Machine}, {none,[{atom,1,true}]}}
+ {{atom,'MACHINE'}, {none,[{atom,Anno,Machine}]}},
+ {{atom,Machine}, {none,[{atom,Anno,true}]}}
]).
%% user_predef(PreDefMacros, Macros) ->
@@ -595,8 +597,9 @@ user_predef([M|Pdm], Ms) when is_atom(M) ->
{ok,_Def} -> %% Predefined macros
{error,{redefine_predef,M}};
error ->
+ A = line1(),
user_predef(Pdm,
- dict:store({atom,M}, [{none, {none,[{atom,1,true}]}}], Ms))
+ dict:store({atom,M}, [{none, {none,[{atom,A,true}]}}], Ms))
end;
user_predef([Md|_Pdm], _Ms) -> {error,{bad,Md}};
user_predef([], Ms) -> {ok,Ms}.
@@ -645,7 +648,7 @@ wait_req_skip(St, Sis) ->
enter_file(_NewName, Inc, From, St)
when length(St#epp.sstk) >= 8 ->
- epp_reply(From, {error,{abs_loc(Inc),epp,{depth,"include"}}}),
+ epp_reply(From, {error,{loc(Inc),epp,{depth,"include"}}}),
wait_req_scan(St);
enter_file(NewName, Inc, From, St) ->
case file:path_open(St#epp.path, NewName, [read]) of
@@ -653,7 +656,7 @@ enter_file(NewName, Inc, From, St) ->
Loc = start_loc(St#epp.location),
wait_req_scan(enter_file2(NewF, Pname, From, St, Loc));
{error,_E} ->
- epp_reply(From, {error,{abs_loc(Inc),epp,{include,file,NewName}}}),
+ epp_reply(From, {error,{loc(Inc),epp,{include,file,NewName}}}),
wait_req_scan(St)
end.
@@ -661,9 +664,9 @@ enter_file(NewName, Inc, From, St) ->
%% Set epp to use this file and "enter" it.
enter_file2(NewF, Pname, From, St0, AtLocation) ->
- Loc = start_loc(AtLocation),
- enter_file_reply(From, Pname, Loc, AtLocation),
- Ms = dict:store({atom,'FILE'}, {none,[{string,Loc,Pname}]}, St0#epp.macs),
+ Anno = erl_anno:new(AtLocation),
+ enter_file_reply(From, Pname, Anno, AtLocation, code),
+ Ms = dict:store({atom,'FILE'}, {none,[{string,Anno,Pname}]}, St0#epp.macs),
%% update the head of the include path to be the directory of the new
%% source file, so that an included file can always include other files
%% relative to its current location (this is also how C does it); note
@@ -673,16 +676,20 @@ enter_file2(NewF, Pname, From, St0, AtLocation) ->
Path = [filename:dirname(Pname) | tl(St0#epp.path)],
DefEncoding = St0#epp.default_encoding,
_ = set_encoding(NewF, DefEncoding),
- #epp{file=NewF,location=Loc,name=Pname,name2=Pname,delta=0,
+ #epp{file=NewF,location=AtLocation,name=Pname,name2=Pname,delta=0,
sstk=[St0|St0#epp.sstk],path=Path,macs=Ms,
default_encoding=DefEncoding}.
-enter_file_reply(From, Name, Location, AtLocation) ->
- Attr = loc_attr(AtLocation),
- Rep = {ok, [{'-',Attr},{atom,Attr,file},{'(',Attr},
- {string,Attr,file_name(Name)},{',',Attr},
- {integer,Attr,get_line(Location)},{')',Location},
- {dot,Attr}]},
+enter_file_reply(From, Name, LocationAnno, AtLocation, Where) ->
+ Anno0 = loc_anno(AtLocation),
+ Anno = case Where of
+ code -> Anno0;
+ generated -> erl_anno:set_generated(true, Anno0)
+ end,
+ Rep = {ok, [{'-',Anno},{atom,Anno,file},{'(',Anno},
+ {string,Anno,file_name(Name)},{',',Anno},
+ {integer,Anno,get_line(LocationAnno)},{')',LocationAnno},
+ {dot,Anno}]},
epp_reply(From, Rep).
%% Flatten filename to a string. Must be a valid filename.
@@ -710,18 +717,20 @@ leave_file(From, St) ->
#epp{location=OldLoc, delta=Delta, name=OldName,
name2=OldName2} = OldSt,
CurrLoc = add_line(OldLoc, Delta),
+ Anno = erl_anno:new(CurrLoc),
Ms = dict:store({atom,'FILE'},
- {none,[{string,CurrLoc,OldName2}]},
+ {none,[{string,Anno,OldName2}]},
St#epp.macs),
NextSt = OldSt#epp{sstk=Sts,macs=Ms,uses=St#epp.uses},
- enter_file_reply(From, OldName, CurrLoc, CurrLoc),
+ enter_file_reply(From, OldName, Anno, CurrLoc, code),
case OldName2 =:= OldName of
true ->
ok;
false ->
NFrom = wait_request(NextSt),
- enter_file_reply(NFrom, OldName2, OldLoc,
- neg_line(CurrLoc))
+ OldAnno = erl_anno:new(OldLoc),
+ enter_file_reply(NFrom, OldName2, OldAnno,
+ CurrLoc, generated)
end,
wait_req_scan(NextSt);
[] ->
@@ -818,9 +827,9 @@ scan_extends(_Ts, _As, Ms) -> Ms.
%% scan_define(Tokens, DefineToken, From, EppState)
-scan_define([{'(',_Lp},{Type,_Lm,M}=Mac,{',',Lc}|Toks], _Def, From, St)
+scan_define([{'(',_Lp},{Type,_Lm,M}=Mac,{',',_}=Comma|Toks], _Def, From, St)
when Type =:= atom; Type =:= var ->
- case catch macro_expansion(Toks, Lc) of
+ case catch macro_expansion(Toks, Comma) of
Expansion when is_list(Expansion) ->
case dict:find({atom,M}, St#epp.macs) of
{ok, Defs} when is_list(Defs) ->
@@ -910,10 +919,12 @@ macro_ref([]) ->
[];
macro_ref([{'?', _}, {'?', _} | Rest]) ->
macro_ref(Rest);
-macro_ref([{'?', _}, {atom, Lm, A} | Rest]) ->
+macro_ref([{'?', _}, {atom, _, A}=Atom | Rest]) ->
+ Lm = loc(Atom),
Arity = count_args(Rest, Lm, A),
[{{atom, A}, Arity} | macro_ref(Rest)];
-macro_ref([{'?', _}, {var, Lm, A} | Rest]) ->
+macro_ref([{'?', _}, {var, _, A}=Var | Rest]) ->
+ Lm = loc(Var),
Arity = count_args(Rest, Lm, A),
[{{atom, A}, Arity} | macro_ref(Rest)];
macro_ref([_Token | Rest]) ->
@@ -940,7 +951,7 @@ scan_include([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}], Inc,
NewName = expand_var(NewName0),
enter_file(NewName, Inc, From, St);
scan_include(_Toks, Inc, From, St) ->
- epp_reply(From, {error,{abs_loc(Inc),epp,{bad,include}}}),
+ epp_reply(From, {error,{loc(Inc),epp,{bad,include}}}),
wait_req_scan(St).
%% scan_include_lib(Tokens, IncludeToken, From, EppState)
@@ -955,7 +966,7 @@ find_lib_dir(NewName) ->
scan_include_lib([{'(',_Llp},{string,_Lf,_NewName0},{')',_Lrp},{dot,_Ld}],
Inc, From, St)
when length(St#epp.sstk) >= 8 ->
- epp_reply(From, {error,{abs_loc(Inc),epp,{depth,"include_lib"}}}),
+ epp_reply(From, {error,{loc(Inc),epp,{depth,"include_lib"}}}),
wait_req_scan(St);
scan_include_lib([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}],
Inc, From, St) ->
@@ -974,18 +985,18 @@ scan_include_lib([{'(',_Llp},{string,_Lf,NewName0},{')',_Lrp},{dot,_Ld}],
St, Loc));
{error,_E2} ->
epp_reply(From,
- {error,{abs_loc(Inc),epp,
+ {error,{loc(Inc),epp,
{include,lib,NewName}}}),
wait_req_scan(St)
end;
_Error ->
- epp_reply(From, {error,{abs_loc(Inc),epp,
+ epp_reply(From, {error,{loc(Inc),epp,
{include,lib,NewName}}}),
wait_req_scan(St)
end
end;
scan_include_lib(_Toks, Inc, From, St) ->
- epp_reply(From, {error,{abs_loc(Inc),epp,{bad,include_lib}}}),
+ epp_reply(From, {error,{loc(Inc),epp,{bad,include_lib}}}),
wait_req_scan(St).
%% scan_ifdef(Tokens, IfdefToken, From, EppState)
@@ -1088,11 +1099,12 @@ scan_endif(_Toks, Endif, From, St) ->
scan_file([{'(',_Llp},{string,_Ls,Name},{',',_Lc},{integer,_Li,Ln},{')',_Lrp},
{dot,_Ld}], Tf, From, St) ->
- enter_file_reply(From, Name, Ln, neg_line(abs_loc(Tf))),
- Ms = dict:store({atom,'FILE'}, {none,[{string,1,Name}]}, St#epp.macs),
+ Anno = erl_anno:new(Ln),
+ enter_file_reply(From, Name, Anno, loc(Tf), generated),
+ Ms = dict:store({atom,'FILE'}, {none,[{string,line1(),Name}]}, St#epp.macs),
Locf = loc(Tf),
NewLoc = new_location(Ln, St#epp.location, Locf),
- Delta = abs(get_line(element(2, Tf)))-Ln + St#epp.delta,
+ Delta = get_line(element(2, Tf))-Ln + St#epp.delta,
wait_req_scan(St#epp{name2=Name,location=NewLoc,delta=Delta,macs=Ms});
scan_file(_Toks, Tf, From, St) ->
epp_reply(From, {error,{loc(Tf),epp,{bad,file}}}),
@@ -1153,7 +1165,7 @@ skip_else(_Else, From, St, Sis) ->
skip_toks(From, St, Sis).
%% macro_pars(Tokens, ArgStack)
-%% macro_expansion(Tokens, Line)
+%% macro_expansion(Tokens, Anno)
%% Extract the macro parameters and the expansion from a macro definition.
macro_pars([{')',_Lp}, {',',Ld}|Ex], Args) ->
@@ -1165,11 +1177,12 @@ macro_pars([{var,_L,Name}, {',',_}|Ts], Args) ->
false = lists:member(Name, Args),
macro_pars(Ts, [Name|Args]).
-macro_expansion([{')',_Lp},{dot,_Ld}], _L0) -> [];
-macro_expansion([{dot,Ld}], _L0) -> throw({error,Ld,missing_parenthesis});
-macro_expansion([T|Ts], _L0) ->
- [T|macro_expansion(Ts, element(2, T))];
-macro_expansion([], L0) -> throw({error,L0,premature_end}).
+macro_expansion([{')',_Lp},{dot,_Ld}], _Anno0) -> [];
+macro_expansion([{dot,_}=Dot], _Anno0) ->
+ throw({error,loc(Dot),missing_parenthesis});
+macro_expansion([T|Ts], _Anno0) ->
+ [T|macro_expansion(Ts, T)];
+macro_expansion([], Anno0) -> throw({error,loc(Anno0),premature_end}).
%% expand_macros(Tokens, Macros)
%% expand_macro(Tokens, MacroToken, RestTokens)
@@ -1239,17 +1252,17 @@ expand_macros([{'?',_Lq},{atom,_Lm,M}=MacT|Toks], Ms) ->
expand_macros(atom, MacT, M, Toks, Ms);
%% Special macros
expand_macros([{'?',_Lq},{var,Lm,'LINE'}=Tok|Toks], Ms) ->
- {line,Line} = erl_scan:token_info(Tok, line),
+ Line = erl_scan:line(Tok),
[{integer,Lm,Line}|expand_macros(Toks, Ms)];
expand_macros([{'?',_Lq},{var,_Lm,M}=MacT|Toks], Ms) ->
expand_macros(atom, MacT, M, Toks, Ms);
%% Illegal macros
expand_macros([{'?',_Lq},Token|_Toks], _Ms) ->
- T = case erl_scan:token_info(Token, text) of
- {text,Text} ->
+ T = case erl_scan:text(Token) of
+ Text when is_list(Text) ->
Text;
undefined ->
- {symbol,Symbol} = erl_scan:token_info(Token, symbol),
+ Symbol = erl_scan:symbol(Token),
io_lib:write(Symbol)
end,
throw({error,loc(Token),{call,[$?|T]}});
@@ -1383,7 +1396,7 @@ expand_arg([], Ts, L, Rest, Bs) ->
%%% stringify(Ts, L) returns a list of one token: a string which when
%%% tokenized would yield the token list Ts.
-%% erl_scan:token_info(T, text) is not backward compatible with this.
+%% erl_scan:text(T) is not backward compatible with this.
%% Note that escaped characters will be replaced by themselves.
token_src({dot, _}) ->
".";
@@ -1456,36 +1469,29 @@ fname_join(Components) ->
filename:join(Components).
%% The line only. (Other tokens may have the column and text as well...)
-loc_attr(Line) when is_integer(Line) ->
- Line;
-loc_attr({Line,_Column}) ->
- Line.
+loc_anno(Line) when is_integer(Line) ->
+ erl_anno:new(Line);
+loc_anno({Line,_Column}) ->
+ erl_anno:new(Line).
loc(Token) ->
- {location,Location} = erl_scan:token_info(Token, location),
- Location.
+ erl_scan:location(Token).
-abs_loc(Token) ->
- loc(setelement(2, Token, abs_line(element(2, Token)))).
-
-neg_line(L) ->
- erl_scan:set_attribute(line, L, fun(Line) -> -abs(Line) end).
-
-abs_line(L) ->
- erl_scan:set_attribute(line, L, fun(Line) -> abs(Line) end).
-
-add_line(L, Offset) ->
- erl_scan:set_attribute(line, L, fun(Line) -> Line+Offset end).
+add_line(Line, Offset) when is_integer(Line) ->
+ Line+Offset;
+add_line({Line, Column}, Offset) ->
+ {Line+Offset, Column}.
start_loc(Line) when is_integer(Line) ->
1;
start_loc({_Line, _Column}) ->
- {1,1}.
+ {1, 1}.
-get_line(Line) when is_integer(Line) ->
- Line;
-get_line({Line,_Column}) ->
- Line.
+line1() ->
+ erl_anno:new(1).
+
+get_line(Anno) ->
+ erl_anno:line(Anno).
%% epp has always output -file attributes when entering and leaving
%% included files (-include, -include_lib). Starting with R11B the
@@ -1525,14 +1531,15 @@ get_line({Line,_Column}) ->
interpret_file_attribute(Forms) ->
interpret_file_attr(Forms, 0, []).
-interpret_file_attr([{attribute,Loc,file,{File,Line}}=Form | Forms],
+interpret_file_attr([{attribute,Anno,file,{File,Line}}=Form | Forms],
Delta, Fs) ->
- {line, L} = erl_scan:attributes_info(Loc, line),
+ L = get_line(Anno),
+ Generated = erl_anno:generated(Anno),
if
- L < 0 ->
+ Generated ->
%% -file attribute
- interpret_file_attr(Forms, (abs(L) + Delta) - Line, Fs);
- true ->
+ interpret_file_attr(Forms, (L + Delta) - Line, Fs);
+ not Generated ->
%% -include or -include_lib
% true = L =:= Line,
case Fs of
@@ -1543,11 +1550,11 @@ interpret_file_attr([{attribute,Loc,file,{File,Line}}=Form | Forms],
end
end;
interpret_file_attr([Form0 | Forms], Delta, Fs) ->
- F = fun(Attrs) ->
- F2 = fun(L) -> abs(L) + Delta end,
- erl_scan:set_attribute(line, Attrs, F2)
+ F = fun(Anno) ->
+ Line = erl_anno:line(Anno),
+ erl_anno:set_line(Line + Delta, Anno)
end,
- Form = erl_lint:modify_line(Form0, F),
+ Form = erl_parse:map_anno(F, Form0),
[Form | interpret_file_attr(Forms, Delta, Fs)];
interpret_file_attr([], _Delta, _Fs) ->
[].
diff --git a/lib/stdlib/src/erl_anno.erl b/lib/stdlib/src/erl_anno.erl
new file mode 100644
index 0000000000..fa83375c34
--- /dev/null
+++ b/lib/stdlib/src/erl_anno.erl
@@ -0,0 +1,458 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(erl_anno).
+
+-export([new/1, is_anno/1]).
+-export([column/1, end_location/1, file/1, generated/1,
+ line/1, location/1, record/1, text/1]).
+-export([set_file/2, set_generated/2, set_line/2, set_location/2,
+ set_record/2, set_text/2]).
+
+%% To be used when necessary to avoid Dialyzer warnings.
+-export([to_term/1, from_term/1]).
+
+-export_type([anno/0, line/0, column/0, location/0, text/0]).
+
+-export_type([anno_term/0]).
+
+-define(LN(L), is_integer(L)).
+-define(COL(C), (is_integer(C) andalso C >= 1)).
+
+%% Location.
+-define(LCOLUMN(C), ?COL(C)).
+-define(LLINE(L), ?LN(L)).
+
+%% Debug: define DEBUG to make sure that annotations are handled as an
+%% opaque type. Note that all abstract code need to be compiled with
+%% DEBUG=true. See also ./erl_pp.erl.
+
+%-define(DEBUG, true).
+
+-type annotation() :: {'file', filename()}
+ | {'generated', generated()}
+ | {'location', location()}
+ | {'record', record()}
+ | {'text', string()}.
+
+-type anno() :: location() | [annotation(), ...].
+-type anno_term() :: term().
+
+-type column() :: pos_integer().
+-type generated() :: boolean().
+-type filename() :: file:filename_all().
+-type line() :: integer().
+-type location() :: line() | {line(), column()}.
+-type record() :: boolean().
+-type text() :: string().
+
+-ifdef(DEBUG).
+%% Anything 'false' accepted by the compiler.
+-define(ALINE(A), is_reference(A)).
+-define(ACOLUMN(A), is_reference(A)).
+-else.
+-define(ALINE(L), ?LN(L)).
+-define(ACOLUMN(C), ?COL(C)).
+-endif.
+
+-spec to_term(Anno) -> anno_term() when
+ Anno :: anno().
+
+-ifdef(DEBUG).
+to_term(Anno) ->
+ simplify(Anno).
+-else.
+to_term(Anno) ->
+ Anno.
+-endif.
+
+-spec from_term(Term) -> Anno when
+ Term :: anno_term(),
+ Anno :: anno().
+
+-ifdef(DEBUG).
+from_term(Term) when is_list(Term) ->
+ Term;
+from_term(Term) ->
+ [{location, Term}].
+-else.
+from_term(Term) ->
+ Term.
+-endif.
+
+-spec new(Location) -> anno() when
+ Location :: location().
+
+new(Line) when ?LLINE(Line) ->
+ new_location(Line);
+new({Line, Column}=Loc) when ?LLINE(Line), ?LCOLUMN(Column) ->
+ new_location(Loc);
+new(Term) ->
+ erlang:error(badarg, [Term]).
+
+-ifdef(DEBUG).
+new_location(Location) ->
+ [{location, Location}].
+-else.
+new_location(Location) ->
+ Location.
+-endif.
+
+-spec is_anno(Term) -> boolean() when
+ Term :: any().
+
+is_anno(Line) when ?ALINE(Line) ->
+ true;
+is_anno({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ true;
+is_anno(Anno) ->
+ (Anno =/= [] andalso
+ is_anno1(Anno) andalso
+ lists:keymember(location, 1, Anno)).
+
+is_anno1([{Item, Value}|Anno]) ->
+ is_anno2(Item, Value) andalso is_anno1(Anno);
+is_anno1(A) ->
+ A =:= [].
+
+is_anno2(location, Line) when ?LN(Line) ->
+ true;
+is_anno2(location, {Line, Column}) when ?LN(Line), ?COL(Column) ->
+ true;
+is_anno2(generated, true) ->
+ true;
+is_anno2(file, Filename) ->
+ is_filename(Filename);
+is_anno2(record, true) ->
+ true;
+is_anno2(text, Text) ->
+ is_string(Text);
+is_anno2(_, _) ->
+ false.
+
+is_filename(T) ->
+ is_list(T) orelse is_binary(T).
+
+is_string(T) ->
+ is_list(T).
+
+-spec column(Anno) -> column() | 'undefined' when
+ Anno :: anno().
+
+column({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ Column;
+column(Line) when ?ALINE(Line) ->
+ undefined;
+column(Anno) ->
+ case location(Anno) of
+ {_Line, Column} ->
+ Column;
+ _Line ->
+ undefined
+ end.
+
+-spec end_location(Anno) -> location() | 'undefined' when
+ Anno :: anno().
+
+end_location(Anno) ->
+ case text(Anno) of
+ undefined ->
+ undefined;
+ Text ->
+ case location(Anno) of
+ {Line, Column} ->
+ end_location(Text, Line, Column);
+ Line ->
+ end_location(Text, Line)
+ end
+ end.
+
+-spec file(Anno) -> filename() | 'undefined' when
+ Anno :: anno().
+
+file(Line) when ?ALINE(Line) ->
+ undefined;
+file({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ undefined;
+file(Anno) ->
+ anno_info(Anno, file).
+
+-spec generated(Anno) -> generated() when
+ Anno :: anno().
+
+generated(Line) when ?ALINE(Line) ->
+ Line =< 0;
+generated({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ Line =< 0;
+generated(Anno) ->
+ _ = anno_info(Anno, generated, false),
+ {location, Location} = lists:keyfind(location, 1, Anno),
+ case Location of
+ {Line, _Column} ->
+ Line =< 0;
+ Line ->
+ Line =< 0
+ end.
+
+-spec line(Anno) -> line() when
+ Anno :: anno().
+
+line(Anno) ->
+ case location(Anno) of
+ {Line, _Column} ->
+ Line;
+ Line ->
+ Line
+ end.
+
+-spec location(Anno) -> location() when
+ Anno :: anno().
+
+location(Line) when ?ALINE(Line) ->
+ abs(Line);
+location({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ {abs(Line), Column};
+location(Anno) ->
+ case anno_info(Anno, location) of
+ Line when Line < 0 ->
+ -Line;
+ {Line, Column} when Line < 0 ->
+ {-Line, Column};
+ Location ->
+ Location
+ end.
+
+-spec record(Anno) -> record() when
+ Anno :: anno().
+
+record(Line) when ?ALINE(Line) ->
+ false;
+record({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ false;
+record(Anno) ->
+ anno_info(Anno, record, false).
+
+-spec text(Anno) -> text() | 'undefined' when
+ Anno :: anno().
+
+text(Line) when ?ALINE(Line) ->
+ undefined;
+text({Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column) ->
+ undefined;
+text(Anno) ->
+ anno_info(Anno, text).
+
+-spec set_file(File, Anno) -> Anno when
+ File :: filename(),
+ Anno :: anno().
+
+set_file(File, Anno) ->
+ set(file, File, Anno).
+
+-spec set_generated(Generated, Anno) -> Anno when
+ Generated :: generated(),
+ Anno :: anno().
+
+set_generated(true, Line) when ?ALINE(Line) ->
+ -abs(Line);
+set_generated(false, Line) when ?ALINE(Line) ->
+ abs(Line);
+set_generated(true, {Line, Column}) when ?ALINE(Line),
+ ?ACOLUMN(Column) ->
+ {-abs(Line),Column};
+set_generated(false, {Line, Column}) when ?ALINE(Line),
+ ?ACOLUMN(Column) ->
+ {abs(Line),Column};
+set_generated(Generated, Anno) ->
+ _ = set(generated, Generated, Anno),
+ {location, Location} = lists:keyfind(location, 1, Anno),
+ NewLocation =
+ case Location of
+ {Line, Column} when Generated ->
+ {-abs(Line), Column};
+ {Line, Column} when not Generated ->
+ {abs(Line), Column};
+ Line when Generated ->
+ -abs(Line);
+ Line when not Generated ->
+ abs(Line)
+ end,
+ lists:keyreplace(location, 1, Anno, {location, NewLocation}).
+
+-spec set_line(Line, Anno) -> Anno when
+ Line :: line(),
+ Anno :: anno().
+
+set_line(Line, Anno) ->
+ case location(Anno) of
+ {_Line, Column} ->
+ set_location({Line, Column}, Anno);
+ _Line ->
+ set_location(Line, Anno)
+ end.
+
+-spec set_location(Location, Anno) -> Anno when
+ Location :: location(),
+ Anno :: anno().
+
+set_location(Line, L) when ?ALINE(L), ?LLINE(Line) ->
+ new_location(fix_line(Line, L));
+set_location(Line, {L, Column}) when ?ALINE(L), ?ACOLUMN(Column),
+ ?LLINE(Line) ->
+ new_location(fix_line(Line, L));
+set_location({L, C}=Loc, Line) when ?ALINE(Line), ?LLINE(L), ?LCOLUMN(C) ->
+ new_location(fix_location(Loc, Line));
+set_location({L, C}=Loc, {Line, Column}) when ?ALINE(Line), ?ACOLUMN(Column),
+ ?LLINE(L), ?LCOLUMN(C) ->
+ new_location(fix_location(Loc, Line));
+set_location(Location, Anno) ->
+ _ = set(location, Location, Anno),
+ {location, OldLocation} = lists:keyfind(location, 1, Anno),
+ NewLocation =
+ case {Location, OldLocation} of
+ {{_Line, _Column}=Loc, {L, _C}} ->
+ fix_location(Loc, L);
+ {Line, {L, _C}} ->
+ fix_line(Line, L);
+ {{_Line, _Column}=Loc, L} ->
+ fix_location(Loc, L);
+ {Line, L} ->
+ fix_line(Line, L)
+ end,
+ lists:keyreplace(location, 1, Anno, {location, NewLocation}).
+
+fix_location({Line, Column}, OldLine) ->
+ {fix_line(Line, OldLine), Column}.
+
+fix_line(Line, OldLine) when OldLine < 0, Line > 0 ->
+ -Line;
+fix_line(Line, _OldLine) ->
+ Line.
+
+-spec set_record(Record, Anno) -> Anno when
+ Record :: record(),
+ Anno :: anno().
+
+set_record(Record, Anno) ->
+ set(record, Record, Anno).
+
+-spec set_text(Text, Anno) -> Anno when
+ Text :: text(),
+ Anno :: anno().
+
+set_text(Text, Anno) ->
+ set(text, Text, Anno).
+
+set(Item, Value, Anno) ->
+ case {is_settable(Item, Value), Anno} of
+ {true, Line} when ?ALINE(Line) ->
+ set_anno(Item, Value, [{location, Line}]);
+ {true, {L, C}=Location} when ?ALINE(L), ?ACOLUMN(C) ->
+ set_anno(Item, Value, [{location, Location}]);
+ {true, A} when is_list(A), A =/= [] ->
+ set_anno(Item, Value, Anno);
+ _ ->
+ erlang:error(badarg, [Item, Value, Anno])
+ end.
+
+set_anno(Item, Value, Anno) ->
+ case default(Item, Value) of
+ true ->
+ reset(Anno, Item);
+ false ->
+ R = case anno_info(Anno, Item) of
+ undefined ->
+ [{Item, Value}|Anno];
+ _ ->
+ lists:keyreplace(Item, 1, Anno, {Item, Value})
+ end,
+ simplify(R)
+ end.
+
+reset(Anno, Item) ->
+ A = lists:keydelete(Item, 1, Anno),
+ reset_simplify(A).
+
+-ifdef(DEBUG).
+reset_simplify(A) ->
+ A.
+-else.
+reset_simplify(A) ->
+ simplify(A).
+-endif.
+
+simplify([{location, Location}]) ->
+ Location;
+simplify(Anno) ->
+ Anno.
+
+anno_info(Anno, Item, Default) ->
+ try lists:keyfind(Item, 1, Anno) of
+ false ->
+ Default;
+ {Item, Value} ->
+ Value
+ catch
+ _:_ ->
+ erlang:error(badarg, [Anno])
+ end.
+
+anno_info(Anno, Item) ->
+ try lists:keyfind(Item, 1, Anno) of
+ {Item, Value} ->
+ Value;
+ false ->
+ undefined
+ catch
+ _:_ ->
+ erlang:error(badarg, [Anno])
+ end.
+
+end_location("", Line, Column) ->
+ {Line, Column};
+end_location([$\n|String], Line, _Column) ->
+ end_location(String, Line+1, 1);
+end_location([_|String], Line, Column) ->
+ end_location(String, Line, Column+1).
+
+end_location("", Line) ->
+ Line;
+end_location([$\n|String], Line) ->
+ end_location(String, Line+1);
+end_location([_|String], Line) ->
+ end_location(String, Line).
+
+is_settable(file, File) ->
+ is_filename(File);
+is_settable(generated, Boolean) when Boolean; not Boolean ->
+ true;
+is_settable(location, Line) when ?LLINE(Line) ->
+ true;
+is_settable(location, {Line, Column}) when ?LLINE(Line), ?LCOLUMN(Column) ->
+ true;
+is_settable(record, Boolean) when Boolean; not Boolean ->
+ true;
+is_settable(text, Text) ->
+ is_string(Text);
+is_settable(_, _) ->
+ false.
+
+default(generated, false) -> true;
+default(record, false) -> true;
+default(_, _) -> false.
diff --git a/lib/stdlib/src/erl_eval.erl b/lib/stdlib/src/erl_eval.erl
index 371573dc23..39f833009f 100644
--- a/lib/stdlib/src/erl_eval.erl
+++ b/lib/stdlib/src/erl_eval.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -246,18 +246,14 @@ expr({record,_,_,Name,_}, _Bs, _Lf, _Ef, _RBs) ->
%% map
expr({map,_,Binding,Es}, Bs0, Lf, Ef, RBs) ->
{value, Map0, Bs1} = expr(Binding, Bs0, Lf, Ef, none),
- case Map0 of
- #{} ->
- {Vs,Bs2} = eval_map_fields(Es, Bs0, Lf, Ef),
- Map1 = lists:foldl(fun ({map_assoc,K,V}, Mi) ->
- maps:put(K, V, Mi);
- ({map_exact,K,V}, Mi) ->
- maps:update(K, V, Mi)
- end, Map0, Vs),
- ret_expr(Map1, merge_bindings(Bs2, Bs1), RBs);
- _ ->
- erlang:raise(error, {badarg,Map0}, stacktrace())
- end;
+ {Vs,Bs2} = eval_map_fields(Es, Bs0, Lf, Ef),
+ _ = maps:put(k, v, Map0), %Validate map.
+ Map1 = lists:foldl(fun ({map_assoc,K,V}, Mi) ->
+ maps:put(K, V, Mi);
+ ({map_exact,K,V}, Mi) ->
+ maps:update(K, V, Mi)
+ end, Map0, Vs),
+ ret_expr(Map1, merge_bindings(Bs2, Bs1), RBs);
expr({map,_,Es}, Bs0, Lf, Ef, RBs) ->
{Vs,Bs} = eval_map_fields(Es, Bs0, Lf, Ef),
ret_expr(lists:foldl(fun
@@ -483,12 +479,13 @@ expr({value,_,Val}, Bs, _Lf, _Ef, RBs) -> % Special case straight values.
find_maxline(LC) ->
put('$erl_eval_max_line', 0),
- F = fun(L) ->
+ F = fun(A) ->
+ L = erl_anno:line(A),
case is_integer(L) and (L > get('$erl_eval_max_line')) of
true -> put('$erl_eval_max_line', L);
false -> ok
end end,
- _ = erl_lint:modify_line(LC, F),
+ _ = erl_parse:map_anno(F, LC),
erase('$erl_eval_max_line').
hide_calls(LC, MaxLine) ->
@@ -498,14 +495,16 @@ hide_calls(LC, MaxLine) ->
%% v/1 and local calls are hidden.
hide({value,L,V}, Id, D) ->
- {{atom,Id,ok}, Id+1, dict:store(Id, {value,L,V}, D)};
+ A = erl_anno:new(Id),
+ {{atom,A,ok}, Id+1, dict:store(Id, {value,L,V}, D)};
hide({call,L,{atom,_,N}=Atom,Args}, Id0, D0) ->
{NArgs, Id, D} = hide(Args, Id0, D0),
C = case erl_internal:bif(N, length(Args)) of
true ->
{call,L,Atom,NArgs};
false ->
- {call,Id,{remote,L,{atom,L,m},{atom,L,f}},NArgs}
+ A = erl_anno:new(Id),
+ {call,A,{remote,L,{atom,L,m},{atom,L,f}},NArgs}
end,
{C, Id+1, dict:store(Id, {call,Atom}, D)};
hide(T0, Id0, D0) when is_tuple(T0) ->
@@ -518,11 +517,23 @@ hide([E0 | Es0], Id0, D0) ->
hide(E, Id, D) ->
{E, Id, D}.
-unhide_calls({atom,Id,ok}, MaxLine, D) when Id > MaxLine ->
- dict:fetch(Id, D);
-unhide_calls({call,Id,{remote,L,_M,_F},Args}, MaxLine, D) when Id > MaxLine ->
- {call,Atom} = dict:fetch(Id, D),
- {call,L,Atom,unhide_calls(Args, MaxLine, D)};
+unhide_calls({atom,A,ok}=E, MaxLine, D) ->
+ L = erl_anno:line(A),
+ if
+ L > MaxLine ->
+ dict:fetch(L, D);
+ true ->
+ E
+ end;
+unhide_calls({call,A,{remote,L,{atom,L,m},{atom,L,f}}=F,Args}, MaxLine, D) ->
+ Line = erl_anno:line(A),
+ if
+ Line > MaxLine ->
+ {call,Atom} = dict:fetch(Line, D),
+ {call,L,Atom,unhide_calls(Args, MaxLine, D)};
+ true ->
+ {call,A,F,unhide_calls(Args, MaxLine, D)}
+ end;
unhide_calls(T, MaxLine, D) when is_tuple(T) ->
list_to_tuple(unhide_calls(tuple_to_list(T), MaxLine, D));
unhide_calls([E | Es], MaxLine, D) ->
diff --git a/lib/stdlib/src/erl_expand_records.erl b/lib/stdlib/src/erl_expand_records.erl
index 64a00acd88..0d3debae22 100644
--- a/lib/stdlib/src/erl_expand_records.erl
+++ b/lib/stdlib/src/erl_expand_records.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -38,8 +38,6 @@
checked_ra=[] % successfully accessed records
}).
--define(REC_OFFSET, 100000000). % A hundred millions. Also in v3_core.
-
-spec(module(AbsForms, CompileOptions) -> AbsForms when
AbsForms :: [erl_parse:abstract_form()],
CompileOptions :: [compile:option()]).
@@ -149,7 +147,7 @@ pattern({record_index,Line,Name,Field}, St) ->
pattern({record,Line0,Name,Pfs}, St0) ->
Fs = record_fields(Name, St0),
{TMs,St1} = pattern_list(pattern_fields(Fs, Pfs), St0),
- Line = record_offset(Line0, St1),
+ Line = mark_record(Line0, St1),
{{tuple,Line,[{atom,Line0,Name} | TMs]},St1};
pattern({bin,Line,Es0}, St0) ->
{Es1,St1} = pattern_bin(Es0, St0),
@@ -243,7 +241,7 @@ record_test_in_guard(Line, Term, Name, St) ->
expr({atom,Line,false}, St);
false ->
Fs = record_fields(Name, St),
- NLine = neg_line(Line),
+ NLine = no_compiler_warning(Line),
expr({call,NLine,{remote,NLine,{atom,NLine,erlang},{atom,NLine,is_record}},
[Term,{atom,Line,Name},{integer,Line,length(Fs)+1}]},
St)
@@ -269,7 +267,7 @@ record_test_in_body(Line, Expr, Name, St0) ->
%% evaluate to a tuple properly.
Fs = record_fields(Name, St0),
{Var,St} = new_var(Line, St0),
- NLine = neg_line(Line),
+ NLine = no_compiler_warning(Line),
expr({block,Line,
[{match,Line,Var,Expr},
{call,NLine,{remote,NLine,{atom,NLine,erlang},
@@ -333,7 +331,7 @@ expr({record_index,Line,Name,F}, St) ->
I = index_expr(Line, F, Name, record_fields(Name, St)),
expr(I, St);
expr({record,Line0,Name,Is}, St) ->
- Line = record_offset(Line0, St),
+ Line = mark_record(Line0, St),
expr({tuple,Line,[{atom,Line0,Name} |
record_inits(record_fields(Name, St), Is)]},
St);
@@ -384,21 +382,11 @@ expr({call,Line,{tuple,_,[{atom,_,erlang},{atom,_,is_record}]},
expr({call,Line,{atom,_La,N}=Atom,As0}, St0) ->
{As,St1} = expr_list(As0, St0),
Ar = length(As),
- case erl_internal:bif(N, Ar) of
- true ->
- {{call,Line,Atom,As},St1};
- false ->
- case imported(N, Ar, St1) of
- {yes,_Mod} ->
- {{call,Line,Atom,As},St1};
- no ->
- case {N,Ar} of
- {record_info,2} ->
- record_info_call(Line, As, St1);
- _ ->
- {{call,Line,Atom,As},St1}
- end
- end
+ case {N,Ar} =:= {record_info,2} andalso not imported(N, Ar, St1) of
+ true ->
+ record_info_call(Line, As, St1);
+ false ->
+ {{call,Line,Atom,As},St1}
end;
expr({call,Line,{remote,Lr,M,F},As0}, St0) ->
{[M1,F1 | As1],St1} = expr_list([M,F | As0], St0),
@@ -469,7 +457,7 @@ strict_record_access(E0, St0) ->
conj([], _E) ->
empty;
conj([{{Name,_Rp},L,R,Sz} | AL], E) ->
- NL = neg_line(L),
+ NL = no_compiler_warning(L),
T1 = {op,NL,'orelse',
{call,NL,
{remote,NL,{atom,NL,erlang},{atom,NL,is_record}},
@@ -585,8 +573,8 @@ strict_get_record_field(Line, R, {atom,_,F}=Index, Name, St0) ->
Fs = record_fields(Name, St),
I = index_expr(F, Fs, 2),
P = record_pattern(2, I, Var, length(Fs)+1, Line, [{atom,Line,Name}]),
- NLine = neg_line(Line),
- RLine = record_offset(NLine, St),
+ NLine = no_compiler_warning(Line),
+ RLine = mark_record(NLine, St),
E = {'case',NLine,R,
[{clause,NLine,[{tuple,RLine,P}],[],[Var]},
{clause,NLine,[{var,NLine,'_'}],[],
@@ -600,7 +588,8 @@ strict_get_record_field(Line, R, {atom,_,F}=Index, Name, St0) ->
I = index_expr(Line, Index, Name, Fs),
{ExpR,St1} = expr(R, St0),
%% Just to make comparison simple:
- ExpRp = erl_lint:modify_line(ExpR, fun(_L) -> 0 end),
+ A0 = erl_anno:new(0),
+ ExpRp = erl_parse:map_anno(fun(_A) -> A0 end, ExpR),
RA = {{Name,ExpRp},Line,ExpR,length(Fs)+1},
St2 = St1#exprec{strict_ra = [RA | St1#exprec.strict_ra]},
{{call,Line,
@@ -701,8 +690,8 @@ record_update(R, Name, Fs, Us0, St0) ->
record_match(R, Name, Lr, Fs, Us, St0) ->
{Ps,News,St1} = record_upd_fs(Fs, Us, St0),
- NLr = neg_line(Lr),
- RLine = record_offset(Lr, St1),
+ NLr = no_compiler_warning(Lr),
+ RLine = mark_record(Lr, St1),
{{'case',Lr,R,
[{clause,Lr,[{tuple,RLine,[{atom,Lr,Name} | Ps]}],[],
[{tuple,RLine,[{atom,Lr,Name} | News]}]},
@@ -733,8 +722,8 @@ record_setel(R, Name, Fs, Us0) ->
Us = [T || {_,T} <- Us2],
Lr = element(2, hd(Us)),
Wildcards = duplicate(length(Fs), {var,Lr,'_'}),
- NLr = neg_line(Lr),
- %% Note: calling record_offset() here is not necessary since it is
+ NLr = no_compiler_warning(Lr),
+ %% Note: calling mark_record() here is not necessary since it is
%% targeted at Dialyzer which always calls the compiler with
%% 'strict_record_updates' meaning that record_setel() will never
%% be called.
@@ -832,10 +821,7 @@ add_imports(Mod, [F | Fs], Is) ->
add_imports(_, [], Is) -> Is.
imported(F, A, St) ->
- case orddict:find({F,A}, St#exprec.imports) of
- {ok,Mod} -> {yes,Mod};
- error -> no
- end.
+ orddict:is_key({F,A}, St#exprec.imports).
%%%
%%% Replace is_record/3 in guards with matching if possible.
@@ -969,12 +955,11 @@ opt_remove_2({call,Line,{atom,_,is_record},
end;
opt_remove_2(A, _) -> A.
-neg_line(L) ->
- erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
+no_compiler_warning(Anno) ->
+ erl_anno:set_generated(true, Anno).
-record_offset(L, St) ->
+mark_record(Anno, St) ->
case lists:member(dialyzer, St#exprec.compile) of
- true when L >= 0 -> L+?REC_OFFSET;
- true when L < 0 -> L-?REC_OFFSET;
- false -> L
+ true -> erl_anno:set_record(true, Anno);
+ false -> Anno
end.
diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl
index 26d8454731..b13848c501 100644
--- a/lib/stdlib/src/erl_lint.erl
+++ b/lib/stdlib/src/erl_lint.erl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,6 +34,8 @@
-import(lists, [member/2,map/2,foldl/3,foldr/3,mapfoldl/3,all/2,reverse/1]).
+-deprecated([{modify_line, 2, next_major_release}]).
+
%% bool_option(OnOpt, OffOpt, Default, Options) -> boolean().
%% value_option(Flag, Default, Options) -> Value.
%% value_option(Flag, Default, OnOpt, OnVal, OffOpt, OffVal, Options) ->
@@ -76,7 +78,7 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
%%-define(DEBUGF(X,Y), io:format(X, Y)).
-define(DEBUGF(X,Y), void).
--type line() :: erl_scan:line(). % a convenient alias
+-type line() :: erl_anno:line(). % a convenient alias
-type fa() :: {atom(), arity()}. % function+arity
-type ta() :: {atom(), arity()}. % type+arity
@@ -111,19 +113,20 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
defined=gb_sets:empty() %Defined fuctions
:: gb_sets:set(fa()),
on_load=[] :: [fa()], %On-load function
- on_load_line=0 :: line(), %Line for on_load
+ on_load_line=erl_anno:new(0) %Line for on_load
+ :: erl_anno:anno(),
clashes=[], %Exported functions named as BIFs
not_deprecated=[], %Not considered deprecated
func=[], %Current function
warn_format=0, %Warn format calls
enabled_warnings=[], %All enabled warnings (ordset).
+ nowarn_bif_clash=[], %All no warn bif clashes (ordset).
errors=[], %Current errors
warnings=[], %Current warnings
file = "" :: string(), %From last file attribute
recdef_top=false :: boolean(), %true in record initialisation
%outside any fun or lc
xqlc= false :: boolean(), %true if qlc.hrl included
- new = false :: boolean(), %Has user-defined 'new/N'
called= [] :: [{fa(),line()}], %Called functions
usage = #usage{} :: #usage{},
specs = dict:new() %Type specifications
@@ -140,7 +143,7 @@ value_option(Flag, Default, On, OnVal, Off, OffVal, Opts) ->
-type lint_state() :: #lint{}.
-type error_description() :: term().
--type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
%% format_error(Error)
%% Return a string describing the error.
@@ -227,6 +230,8 @@ format_error({deprecated, MFA, ReplacementMFA, Rel}) ->
[format_mfa(MFA), Rel, format_mfa(ReplacementMFA)]);
format_error({deprecated, {M1, F1, A1}, String}) when is_list(String) ->
io_lib:format("~p:~p/~p: ~s", [M1, F1, A1, String]);
+format_error({deprecated_type, {M1, F1, A1}, String}) when is_list(String) ->
+ io_lib:format("~p:~p~s: ~s", [M1, F1, gen_type_paren(A1), String]);
format_error({removed, MFA, ReplacementMFA, Rel}) ->
io_lib:format("call to ~s will fail, since it was removed in ~s; "
"use ~s", [format_mfa(MFA), Rel, format_mfa(ReplacementMFA)]);
@@ -425,13 +430,13 @@ exprs(Exprs, BindingsList) ->
exprs_opt(Exprs, BindingsList, Opts) ->
{St0,Vs} = foldl(fun({{record,_SequenceNumber,_Name},Attr0}, {St1,Vs1}) ->
- Attr = zip_file_and_line(Attr0, "none"),
+ Attr = set_file(Attr0, "none"),
{attribute_state(Attr, St1),Vs1};
({V,_}, {St1,Vs1}) ->
{St1,[{V,{bound,unused,[]}} | Vs1]}
end, {start("nofile",Opts),[]}, BindingsList),
Vt = orddict:from_list(Vs),
- {_Evt,St} = exprs(zip_file_and_line(Exprs, "nofile"), Vt, St0),
+ {_Evt,St} = exprs(set_file(Exprs, "nofile"), Vt, St0),
return_status(St).
used_vars(Exprs, BindingsList) ->
@@ -439,7 +444,7 @@ used_vars(Exprs, BindingsList) ->
({V,_Val}, Vs0) -> [{V,{bound,unused,[]}} | Vs0]
end, [], BindingsList),
Vt = orddict:from_list(Vs),
- {Evt,_St} = exprs(zip_file_and_line(Exprs, "nofile"), Vt, start()),
+ {Evt,_St} = exprs(set_file(Exprs, "nofile"), Vt, start()),
{ok, foldl(fun({V,{_,used,_}}, L) -> [V | L];
(_, L) -> L
end, [], Evt)}.
@@ -564,6 +569,7 @@ start(File, Opts) ->
warn_format = value_option(warn_format, 1, warn_format, 1,
nowarn_format, 0, Opts),
enabled_warnings = Enabled,
+ nowarn_bif_clash = nowarn_function(nowarn_bif_clash, Opts),
file = File
}.
@@ -603,34 +609,39 @@ pack_warnings(Ws) ->
%% add_warning(ErrorDescriptor, State) -> State'
%% add_warning(Line, Error, State) -> State'
-add_error(E, St) -> St#lint{errors=[{St#lint.file,E}|St#lint.errors]}.
+add_error(E, St) -> add_lint_error(E, St#lint.file, St).
+
+add_error(Anno, E, St) ->
+ {File,Location} = loc(Anno, St),
+ add_lint_error({Location,erl_lint,E}, File, St).
-add_error(FileLine, E, St) ->
- {File,Location} = loc(FileLine),
- add_error({Location,erl_lint,E}, St#lint{file = File}).
+add_lint_error(E, File, St) ->
+ St#lint{errors=[{File,E}|St#lint.errors]}.
-add_warning(W, St) -> St#lint{warnings=[{St#lint.file,W}|St#lint.warnings]}.
+add_warning(W, St) -> add_lint_warning(W, St#lint.file, St).
add_warning(FileLine, W, St) ->
- {File,Location} = loc(FileLine),
- add_warning({Location,erl_lint,W}, St#lint{file = File}).
-
-loc(L) ->
- case erl_parse:get_attribute(L, location) of
- {location,{{File,Line},Column}} ->
- {File,{Line,Column}};
- {location,{File,Line}} ->
- {File,Line}
+ {File,Location} = loc(FileLine, St),
+ add_lint_warning({Location,erl_lint,W}, File, St).
+
+add_lint_warning(W, File, St) ->
+ St#lint{warnings=[{File,W}|St#lint.warnings]}.
+
+loc(Anno, St) ->
+ Location = erl_anno:location(Anno),
+ case erl_anno:file(Anno) of
+ undefined -> {St#lint.file,Location};
+ File -> {File,Location}
end.
%% forms([Form], State) -> State'
forms(Forms0, St0) ->
Forms = eval_file_attribute(Forms0, St0),
+ %% Annotations from now on include the 'file' item.
Locals = local_functions(Forms),
AutoImportSuppressed = auto_import_suppressed(St0#lint.compile),
StDeprecated = disallowed_compile_flags(Forms,St0),
- %% Line numbers are from now on pairs {File,Line}.
St1 = includes_qlc_hrl(Forms, StDeprecated#lint{locals = Locals,
no_auto = AutoImportSuppressed}),
St2 = bif_clashes(Forms, St1),
@@ -638,8 +649,6 @@ forms(Forms0, St0) ->
St4 = foldl(fun form/2, pre_scan(Forms, St3), Forms),
post_traversal_check(Forms, St4).
-pre_scan([{function,_L,new,_A,_Cs} | Fs], St) ->
- pre_scan(Fs, St#lint{new=true});
pre_scan([{attribute,L,compile,C} | Fs], St) ->
case is_warn_enabled(export_all, St) andalso
member(export_all, lists:flatten([C])) of
@@ -666,15 +675,24 @@ eval_file_attribute(Forms, St) ->
eval_file_attr([{attribute,_L,file,{File,_Line}}=Form | Forms], _File) ->
[Form | eval_file_attr(Forms, File)];
eval_file_attr([Form0 | Forms], File) ->
- Form = zip_file_and_line(Form0, File),
+ Form = set_form_file(Form0, File),
[Form | eval_file_attr(Forms, File)];
eval_file_attr([], _File) ->
[].
-zip_file_and_line(T, File) ->
- F0 = fun(Line) -> {File,Line} end,
- F = fun(L) -> erl_parse:set_line(L, F0) end,
- modify_line(T, F).
+%% Sets the file only on the form. This is used on post-traversal.
+%% For the remaining of the AST we rely on #lint.file.
+
+set_form_file({attribute,L,K,V}, File) ->
+ {attribute,erl_anno:set_file(File, L),K,V};
+set_form_file({function,L,N,A,C}, File) ->
+ {function,erl_anno:set_file(File, L),N,A,C};
+set_form_file(Form, _File) ->
+ Form.
+
+set_file(T, File) ->
+ F = fun(Anno) -> erl_anno:set_file(File, Anno) end,
+ erl_parse:map_anno(F, T).
%% form(Form, State) -> State'
%% Check a form returning the updated State. Handle generic cases here.
@@ -744,6 +762,8 @@ attribute_state(Form, St) ->
%% State'
%% Allow for record, type and opaque type definitions and spec
%% declarations to be intersperced within function definitions.
+%% Dialyzer attributes are also allowed everywhere, but are not
+%% checked at all.
function_state({attribute,L,record,{Name,Fields}}, St) ->
record_def(L, Name, Fields, St);
@@ -753,6 +773,8 @@ function_state({attribute,L,opaque,{TypeName,TypeDef,Args}}, St) ->
type_def(opaque, L, TypeName, TypeDef, Args, St);
function_state({attribute,L,spec,{Fun,Types}}, St) ->
spec_decl(L, Fun, Types, St);
+function_state({attribute,_L,dialyzer,_Val}, St) ->
+ St;
function_state({attribute,La,Attr,_Val}, St) ->
add_error(La, {attribute,Attr}, St);
function_state({function,L,N,A,Cs}, St) ->
@@ -767,8 +789,7 @@ eof(_Line, St0) ->
%% bif_clashes(Forms, State0) -> State.
-bif_clashes(Forms, St) ->
- Nowarn = nowarn_function(nowarn_bif_clash, St#lint.compile),
+bif_clashes(Forms, #lint{nowarn_bif_clash=Nowarn} = St) ->
Clashes0 = [{Name,Arity} || {function,_L,Name,Arity,_Cs} <- Forms,
erl_internal:bif(Name, Arity)],
Clashes = ordsets:subtract(ordsets:from_list(Clashes0), Nowarn),
@@ -792,9 +813,11 @@ not_deprecated(Forms, St0) ->
disallowed_compile_flags(Forms, St0) ->
%% There are (still) no line numbers in St0#lint.compile.
Errors0 = [ {St0#lint.file,{L,erl_lint,disallowed_nowarn_bif_clash}} ||
- {attribute,[{line,{_,L}}],compile,nowarn_bif_clash} <- Forms ],
+ {attribute,A,compile,nowarn_bif_clash} <- Forms,
+ {_,L} <- [loc(A, St0)] ],
Errors1 = [ {St0#lint.file,{L,erl_lint,disallowed_nowarn_bif_clash}} ||
- {attribute,[{line,{_,L}}],compile,{nowarn_bif_clash, {_,_}}} <- Forms ],
+ {attribute,A,compile,{nowarn_bif_clash, {_,_}}} <- Forms,
+ {_,L} <- [loc(A, St0)] ],
Disabled = (not is_warn_enabled(bif_clash, St0)),
Errors = if
Disabled andalso Errors0 =:= [] ->
@@ -919,7 +942,7 @@ behaviour_conflicting(AllBfs, St) ->
behaviour_add_conflicts(R, St).
behaviour_add_conflicts([{Cb,[{FirstLoc,FirstB}|Cs]}|T], St0) ->
- FirstL = element(2, loc(FirstLoc)),
+ FirstL = element(2, loc(FirstLoc, St0)),
St = behaviour_add_conflict(Cs, Cb, FirstL, FirstB, St0),
behaviour_add_conflicts(T, St);
behaviour_add_conflicts([], St) -> St.
@@ -1137,7 +1160,7 @@ check_unused_records(Forms, St0) ->
end, St0#lint.records, UsedRecords),
Unused = [{Name,FileLine} ||
{Name,{FileLine,_Fields}} <- dict:to_list(URecs),
- element(1, loc(FileLine)) =:= FirstFile],
+ element(1, loc(FileLine, St0)) =:= FirstFile],
foldl(fun ({N,L}, St) ->
add_warning(L, {unused_record, N}, St)
end, St0, Unused);
@@ -1295,7 +1318,7 @@ imported(F, A, St) ->
error -> no
end.
--spec on_load(line(), fa(), lint_state()) -> lint_state().
+-spec on_load(erl_anno:anno(), fa(), lint_state()) -> lint_state().
%% Check an on_load directive and remember it.
on_load(Line, {Name,Arity}=Fa, #lint{on_load=OnLoad0}=St0)
@@ -1330,14 +1353,15 @@ check_on_load(St) -> St.
-spec call_function(line(), atom(), arity(), lint_state()) -> lint_state().
%% Add to both called and calls.
-call_function(Line, F, A, #lint{usage=Usage0,called=Cd,func=Func}=St) ->
+call_function(Line, F, A, #lint{usage=Usage0,called=Cd,func=Func,file=File}=St) ->
#usage{calls = Cs} = Usage0,
NA = {F,A},
Usage = case Cs of
undefined -> Usage0;
_ -> Usage0#usage{calls=dict:append(Func, NA, Cs)}
end,
- St#lint{called=[{NA,Line}|Cd], usage=Usage}.
+ Anno = erl_anno:set_file(File, Line),
+ St#lint{called=[{NA,Anno}|Cd], usage=Usage}.
%% function(Line, Name, Arity, Clauses, State) -> State.
@@ -1950,10 +1974,10 @@ is_guard_test(E) ->
is_guard_test(Expression, Forms) ->
RecordAttributes = [A || A = {attribute, _, record, _D} <- Forms],
St0 = foldl(fun(Attr0, St1) ->
- Attr = zip_file_and_line(Attr0, "none"),
+ Attr = set_file(Attr0, "none"),
attribute_state(Attr, St1)
end, start(), RecordAttributes),
- is_guard_test2(zip_file_and_line(Expression, "nofile"), St0#lint.records).
+ is_guard_test2(set_file(Expression, "nofile"), St0#lint.records).
%% is_guard_test2(Expression, RecordDefs :: dict:dict()) -> boolean().
is_guard_test2({call,Line,{atom,Lr,record},[E,A]}, RDs) ->
@@ -2116,7 +2140,7 @@ expr({'receive',Line,Cs,To,ToEs}, Vt, St0) ->
{Cvt,St3} = icrt_clauses(Cs, Vt, St2),
%% Csvts = [vtnew(Tevt, Vt)|Cvt], %This is just NEW variables!
Csvts = [Tevt|Cvt],
- Rvt = icrt_export(Csvts, Vt, {'receive',Line}),
+ Rvt = icrt_export(Csvts, Vt, {'receive',Line}, St3),
{vtmerge([Tvt,Tevt,Rvt]),St3};
expr({'fun',Line,Body}, Vt, St) ->
%%No one can think funs export!
@@ -2266,11 +2290,10 @@ expr({remote,Line,_M,_F}, _Vt, St) ->
%% {UsedVarTable,State}
expr_list(Es, Vt, St) ->
- {Vt1,St1} = foldl(fun (E, {Esvt,St0}) ->
- {Evt,St1} = expr(E, Vt, St0),
- {vtmerge_pat(Evt, Esvt),St1}
- end, {[],St}, Es),
- {vtmerge(vtnew(Vt1, Vt), vtold(Vt1, Vt)),St1}.
+ foldl(fun (E, {Esvt,St0}) ->
+ {Evt,St1} = expr(E, Vt, St0),
+ {vtmerge_pat(Evt, Esvt),St1}
+ end, {[],St}, Es).
record_expr(Line, Rec, Vt, St0) ->
St1 = warn_invalid_record(Line, Rec, St0),
@@ -2288,8 +2311,8 @@ map_fields([{Tag,_,K,V}|Fs], Vt, St, F) when Tag =:= map_field_assoc;
{Pvt,St2} = F([K,V], Vt, St),
{Vts,St3} = map_fields(Fs, Vt, St2, F),
{vtupdate(Pvt, Vts),St3};
-map_fields([], Vt, St, _) ->
- {Vt,St}.
+map_fields([], _, St, _) ->
+ {[],St}.
%% warn_invalid_record(Line, Record, State0) -> State
%% Adds warning if the record is invalid.
@@ -2616,7 +2639,7 @@ type_def(_Attr, _Line, {record, _RecName}, Fields, [], St0) ->
%% The record field names and such are checked in the record format.
%% We only need to check the types.
Types = [T || {typed_record_field, _, T} <- Fields],
- check_type({type, -1, product, Types}, St0);
+ check_type({type, nowarn(), product, Types}, St0);
type_def(Attr, Line, TypeName, ProtoType, Args, St0) ->
TypeDefs = St0#lint.types,
Arity = length(Args),
@@ -2625,7 +2648,7 @@ type_def(Attr, Line, TypeName, ProtoType, Args, St0) ->
StoreType =
fun(St) ->
NewDefs = dict:store(TypePair, Info, TypeDefs),
- CheckType = {type, -1, product, [ProtoType|Args]},
+ CheckType = {type, nowarn(), product, [ProtoType|Args]},
check_type(CheckType, St#lint{types=NewDefs})
end,
case is_default_type(TypePair) of
@@ -2681,7 +2704,9 @@ check_type({ann_type, _L, [_Var, Type]}, SeenVars, St) ->
check_type({paren_type, _L, [Type]}, SeenVars, St) ->
check_type(Type, SeenVars, St);
check_type({remote_type, L, [{atom, _, Mod}, {atom, _, Name}, Args]},
- SeenVars, #lint{module=CurrentMod} = St) ->
+ SeenVars, St0) ->
+ St = deprecated_type(L, Mod, Name, Args, St0),
+ CurrentMod = St#lint.module,
case Mod =:= CurrentMod of
true -> check_type({user_type, L, Name, Args}, SeenVars, St);
false ->
@@ -2709,7 +2734,7 @@ check_type({type, L, 'fun', [Dom, Range]}, SeenVars, St) ->
{type, _, any} -> St;
_ -> add_error(L, {type_syntax, 'fun'}, St)
end,
- check_type({type, -1, product, [Dom, Range]}, SeenVars, St1);
+ check_type({type, nowarn(), product, [Dom, Range]}, SeenVars, St1);
check_type({type, L, range, [From, To]}, SeenVars, St) ->
St1 =
case {erl_eval:partial_eval(From), erl_eval:partial_eval(To)} of
@@ -2726,7 +2751,7 @@ check_type({type, _L, map, Pairs}, SeenVars, St) ->
check_type(Pair, AccSeenVars, AccSt)
end, {SeenVars, St}, Pairs);
check_type({type, _L, map_field_assoc, [Dom, Range]}, SeenVars, St) ->
- check_type({type, -1, product, [Dom, Range]}, SeenVars, St);
+ check_type({type, nowarn(), product, [Dom, Range]}, SeenVars, St);
check_type({type, _L, tuple, any}, SeenVars, St) -> {SeenVars, St};
check_type({type, _L, any}, SeenVars, St) -> {SeenVars, St};
check_type({type, L, binary, [Base, Unit]}, SeenVars, St) ->
@@ -2769,7 +2794,7 @@ check_type({type, La, TypeName, Args}, SeenVars, St) ->
end;
_ -> St
end,
- check_type({type, -1, product, Args}, SeenVars, St1);
+ check_type({type, nowarn(), product, Args}, SeenVars, St1);
check_type({user_type, L, TypeName, Args}, SeenVars, St) ->
Arity = length(Args),
TypePair = {TypeName, Arity},
@@ -2818,10 +2843,9 @@ check_record_types([{type, _, field_type, [{atom, AL, FName}, Type]}|Left],
check_record_types([], _Name, _DefFields, SeenVars, St, _SeenFields) ->
{SeenVars, St}.
-used_type(TypePair, L, St) ->
- Usage = St#lint.usage,
+used_type(TypePair, L, #lint{usage = Usage, file = File} = St) ->
OldUsed = Usage#usage.used_types,
- UsedTypes = dict:store(TypePair, L, OldUsed),
+ UsedTypes = dict:store(TypePair, erl_anno:set_file(File, L), OldUsed),
St#lint{usage=Usage#usage{used_types=UsedTypes}}.
is_default_type({Name, NumberOfTypeVariables}) ->
@@ -2916,11 +2940,16 @@ check_specs([FunType|Left], Arity, St0) ->
true -> St0;
false -> add_error(L, spec_wrong_arity, St0)
end,
- St2 = check_type({type, -1, product, [FunType1|CTypes]}, St1),
+ St2 = check_type({type, nowarn(), product, [FunType1|CTypes]}, St1),
check_specs(Left, Arity, St2);
check_specs([], _Arity, St) ->
St.
+nowarn() ->
+ A0 = erl_anno:new(0),
+ A1 = erl_anno:set_generated(true, A0),
+ erl_anno:set_file("", A1).
+
check_specs_without_function(#lint{module=Mod,defined=Funcs,specs=Specs}=St) ->
Fun = fun({M, F, A}, Line, AccSt) when M =:= Mod ->
FA = {F, A},
@@ -2971,7 +3000,7 @@ check_unused_types(Forms, #lint{usage=Usage, types=Ts, exp_types=ExpTs}=St) ->
UsedTypes = gb_sets:from_list(L),
FoldFun =
fun(Type, #typeinfo{line = FileLine}, AccSt) ->
- case loc(FileLine) of
+ case loc(FileLine, AccSt) of
{FirstFile, _} ->
case gb_sets:is_member(Type, UsedTypes) of
true -> AccSt;
@@ -3009,7 +3038,7 @@ check_local_opaque_types(St) ->
icrt_clauses(Cs, In, Vt, St0) ->
{Csvt,St1} = icrt_clauses(Cs, Vt, St0),
- UpdVt = icrt_export(Csvt, Vt, In),
+ UpdVt = icrt_export(Csvt, Vt, In, St1),
{UpdVt,St1}.
%% icrt_clauses(Clauses, ImportVarTable, State) ->
@@ -3026,8 +3055,8 @@ icrt_clause({clause,_Line,H,G,B}, Vt0, St0) ->
{Bvt,St3} = exprs(B, vtupdate(Vt2, Vt0), St2),
{vtupdate(Bvt, Vt2),St3}.
-icrt_export(Vts, Vt, {Tag,Attrs}) ->
- {_File,Loc} = loc(Attrs),
+icrt_export(Vts, Vt, {Tag,Attrs}, St) ->
+ {_File,Loc} = loc(Attrs, St),
icrt_export(lists:merge(Vts), Vt, {Tag,Loc}, length(Vts), []).
icrt_export([{V,{{export,_},_,_}}|Vs0], [{V,{{export,_}=S0,_,Ls}}|Vt],
@@ -3384,7 +3413,7 @@ vtupdate(Uvt, Vt0) ->
%% Return all new variables in UpdVarTable as unsafe.
vtunsafe({Tag,FileLine}, Uvt, Vt) ->
- {_File,Line} = loc(FileLine),
+ Line = erl_anno:location(FileLine),
[{V,{{unsafe,{Tag,Line}},U,Ls}} || {V,{_,U,Ls}} <- vtnew(Uvt, Vt)].
%% vtmerge(VarTable, VarTable) -> VarTable.
@@ -3449,58 +3478,15 @@ vt_no_unused(Vt) -> [V || {_,{_,U,_L}}=V <- Vt, U =/= unused].
%% copy_expr(Expr, Line) -> Expr.
%% Make a copy of Expr converting all line numbers to Line.
-copy_expr(Expr, Line) ->
- modify_line(Expr, fun(_L) -> Line end).
+copy_expr(Expr, Anno) ->
+ erl_parse:map_anno(fun(_A) -> Anno end, Expr).
%% modify_line(Form, Fun) -> Form
%% modify_line(Expression, Fun) -> Expression
%% Applies Fun to each line number occurrence.
modify_line(T, F0) ->
- modify_line1(T, F0).
-
-%% Forms.
-modify_line1({function,F,A}, _Mf) -> {function,F,A};
-modify_line1({function,M,F,A}, Mf) ->
- {function,modify_line1(M, Mf),modify_line1(F, Mf),modify_line1(A, Mf)};
-modify_line1({attribute,L,record,{Name,Fields}}, Mf) ->
- {attribute,Mf(L),record,{Name,modify_line1(Fields, Mf)}};
-modify_line1({attribute,L,spec,{Fun,Types}}, Mf) ->
- {attribute,Mf(L),spec,{Fun,modify_line1(Types, Mf)}};
-modify_line1({attribute,L,callback,{Fun,Types}}, Mf) ->
- {attribute,Mf(L),callback,{Fun,modify_line1(Types, Mf)}};
-modify_line1({attribute,L,type,{TypeName,TypeDef,Args}}, Mf) ->
- {attribute,Mf(L),type,{TypeName,modify_line1(TypeDef, Mf),
- modify_line1(Args, Mf)}};
-modify_line1({attribute,L,opaque,{TypeName,TypeDef,Args}}, Mf) ->
- {attribute,Mf(L),opaque,{TypeName,modify_line1(TypeDef, Mf),
- modify_line1(Args, Mf)}};
-modify_line1({attribute,L,Attr,Val}, Mf) -> {attribute,Mf(L),Attr,Val};
-modify_line1({warning,W}, _Mf) -> {warning,W};
-modify_line1({error,W}, _Mf) -> {error,W};
-%% Expressions.
-modify_line1({clauses,Cs}, Mf) -> {clauses,modify_line1(Cs, Mf)};
-modify_line1({typed_record_field,Field,Type}, Mf) ->
- {typed_record_field,modify_line1(Field, Mf),modify_line1(Type, Mf)};
-modify_line1({Tag,L}, Mf) -> {Tag,Mf(L)};
-modify_line1({Tag,L,E1}, Mf) ->
- {Tag,Mf(L),modify_line1(E1, Mf)};
-modify_line1({Tag,L,E1,E2}, Mf) ->
- {Tag,Mf(L),modify_line1(E1, Mf),modify_line1(E2, Mf)};
-modify_line1({bin_element,L,E1,E2,TSL}, Mf) ->
- {bin_element,Mf(L),modify_line1(E1, Mf),modify_line1(E2, Mf), TSL};
-modify_line1({Tag,L,E1,E2,E3}, Mf) ->
- {Tag,Mf(L),modify_line1(E1, Mf),modify_line1(E2, Mf),modify_line1(E3, Mf)};
-modify_line1({Tag,L,E1,E2,E3,E4}, Mf) ->
- {Tag,Mf(L),
- modify_line1(E1, Mf),
- modify_line1(E2, Mf),
- modify_line1(E3, Mf),
- modify_line1(E4, Mf)};
-modify_line1([H|T], Mf) ->
- [modify_line1(H, Mf)|modify_line1(T, Mf)];
-modify_line1([], _Mf) -> [];
-modify_line1(E, _Mf) when not is_tuple(E), not is_list(E) -> E.
+ erl_parse:map_anno(F0, T).
%% Check a record_info call. We have already checked that it is not
%% shadowed by an import.
@@ -3570,6 +3556,20 @@ deprecated_function(Line, M, F, As, St) ->
St
end.
+deprecated_type(L, M, N, As, St) ->
+ NAs = length(As),
+ case otp_internal:obsolete_type(M, N, NAs) of
+ {deprecated, String} when is_list(String) ->
+ case is_warn_enabled(deprecated_type, St) of
+ true ->
+ add_warning(L, {deprecated_type, {M,N,NAs}, String}, St);
+ false ->
+ St
+ end;
+ no ->
+ St
+ end.
+
obsolete_guard({call,Line,{atom,Lr,F},As}, St0) ->
Arity = length(As),
case erl_internal:old_type_test(F, Arity) of
@@ -3797,8 +3797,7 @@ is_autoimport_suppressed(NoAutoSet,{Func,Arity}) ->
gb_sets:is_element({Func,Arity},NoAutoSet).
%% Predicate to find out if a function specific bif-clash suppression (old deprecated) is present
bif_clash_specifically_disabled(St,{F,A}) ->
- Nowarn = nowarn_function(nowarn_bif_clash, St#lint.compile),
- lists:member({F,A},Nowarn).
+ lists:member({F,A},St#lint.nowarn_bif_clash).
%% Predicate to find out if an autoimported guard_bif is not overriden in some way
%% Guard Bif without module name is disallowed if
diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl
index 3502a50eaa..e328e065e3 100644
--- a/lib/stdlib/src/erl_parse.yrl
+++ b/lib/stdlib/src/erl_parse.yrl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -92,7 +92,7 @@ spec_fun -> atom ':' atom '/' integer '::' : {'$1', '$3', '$5'}.
typed_attr_val -> expr ',' typed_record_fields : {typed_record, '$1', '$3'}.
typed_attr_val -> expr '::' top_type : {type_def, '$1', '$3'}.
-typed_record_fields -> '{' typed_exprs '}' : {tuple, ?line('$1'), '$2'}.
+typed_record_fields -> '{' typed_exprs '}' : {tuple, ?anno('$1'), '$2'}.
typed_exprs -> typed_expr : ['$1'].
typed_exprs -> typed_expr ',' typed_exprs : ['$1'|'$3'].
@@ -105,26 +105,26 @@ type_sigs -> type_sig : ['$1'].
type_sigs -> type_sig ';' type_sigs : ['$1'|'$3'].
type_sig -> fun_type : '$1'.
-type_sig -> fun_type 'when' type_guards : {type, ?line('$1'), bounded_fun,
+type_sig -> fun_type 'when' type_guards : {type, ?anno('$1'), bounded_fun,
['$1','$3']}.
type_guards -> type_guard : ['$1'].
type_guards -> type_guard ',' type_guards : ['$1'|'$3'].
-type_guard -> atom '(' top_types ')' : {type, ?line('$1'), constraint,
+type_guard -> atom '(' top_types ')' : {type, ?anno('$1'), constraint,
['$1', '$3']}.
type_guard -> var '::' top_type : build_def('$1', '$3').
top_types -> top_type : ['$1'].
top_types -> top_type ',' top_types : ['$1'|'$3'].
-top_type -> var '::' top_type_100 : {ann_type, ?line('$1'), ['$1','$3']}.
+top_type -> var '::' top_type_100 : {ann_type, ?anno('$1'), ['$1','$3']}.
top_type -> top_type_100 : '$1'.
top_type_100 -> type_200 : '$1'.
top_type_100 -> type_200 '|' top_type_100 : lift_unions('$1','$3').
-type_200 -> type_300 '..' type_300 : {type, ?line('$1'), range,
+type_200 -> type_300 '..' type_300 : {type, ?anno('$1'), range,
[skip_paren('$1'),
skip_paren('$3')]}.
type_200 -> type_300 : '$1'.
@@ -140,61 +140,61 @@ type_400 -> type_500 : '$1'.
type_500 -> prefix_op type : ?mkop1('$1', skip_paren('$2')).
type_500 -> type : '$1'.
-type -> '(' top_type ')' : {paren_type, ?line('$2'), ['$2']}.
+type -> '(' top_type ')' : {paren_type, ?anno('$2'), ['$2']}.
type -> var : '$1'.
type -> atom : '$1'.
type -> atom '(' ')' : build_gen_type('$1').
type -> atom '(' top_types ')' : build_type('$1', '$3').
-type -> atom ':' atom '(' ')' : {remote_type, ?line('$1'),
+type -> atom ':' atom '(' ')' : {remote_type, ?anno('$1'),
['$1', '$3', []]}.
-type -> atom ':' atom '(' top_types ')' : {remote_type, ?line('$1'),
+type -> atom ':' atom '(' top_types ')' : {remote_type, ?anno('$1'),
['$1', '$3', '$5']}.
-type -> '[' ']' : {type, ?line('$1'), nil, []}.
-type -> '[' top_type ']' : {type, ?line('$1'), list, ['$2']}.
-type -> '[' top_type ',' '...' ']' : {type, ?line('$1'),
+type -> '[' ']' : {type, ?anno('$1'), nil, []}.
+type -> '[' top_type ']' : {type, ?anno('$1'), list, ['$2']}.
+type -> '[' top_type ',' '...' ']' : {type, ?anno('$1'),
nonempty_list, ['$2']}.
-type -> '#' '{' '}' : {type, ?line('$1'), map, []}.
-type -> '#' '{' map_pair_types '}' : {type, ?line('$1'), map, '$3'}.
-type -> '{' '}' : {type, ?line('$1'), tuple, []}.
-type -> '{' top_types '}' : {type, ?line('$1'), tuple, '$2'}.
-type -> '#' atom '{' '}' : {type, ?line('$1'), record, ['$2']}.
-type -> '#' atom '{' field_types '}' : {type, ?line('$1'),
+type -> '#' '{' '}' : {type, ?anno('$1'), map, []}.
+type -> '#' '{' map_pair_types '}' : {type, ?anno('$1'), map, '$3'}.
+type -> '{' '}' : {type, ?anno('$1'), tuple, []}.
+type -> '{' top_types '}' : {type, ?anno('$1'), tuple, '$2'}.
+type -> '#' atom '{' '}' : {type, ?anno('$1'), record, ['$2']}.
+type -> '#' atom '{' field_types '}' : {type, ?anno('$1'),
record, ['$2'|'$4']}.
type -> binary_type : '$1'.
type -> integer : '$1'.
-type -> 'fun' '(' ')' : {type, ?line('$1'), 'fun', []}.
+type -> 'fun' '(' ')' : {type, ?anno('$1'), 'fun', []}.
type -> 'fun' '(' fun_type_100 ')' : '$3'.
fun_type_100 -> '(' '...' ')' '->' top_type
- : {type, ?line('$1'), 'fun',
- [{type, ?line('$1'), any}, '$5']}.
+ : {type, ?anno('$1'), 'fun',
+ [{type, ?anno('$1'), any}, '$5']}.
fun_type_100 -> fun_type : '$1'.
-fun_type -> '(' ')' '->' top_type : {type, ?line('$1'), 'fun',
- [{type, ?line('$1'), product, []}, '$4']}.
+fun_type -> '(' ')' '->' top_type : {type, ?anno('$1'), 'fun',
+ [{type, ?anno('$1'), product, []}, '$4']}.
fun_type -> '(' top_types ')' '->' top_type
- : {type, ?line('$1'), 'fun',
- [{type, ?line('$1'), product, '$2'},'$5']}.
+ : {type, ?anno('$1'), 'fun',
+ [{type, ?anno('$1'), product, '$2'},'$5']}.
map_pair_types -> map_pair_type : ['$1'].
map_pair_types -> map_pair_type ',' map_pair_types : ['$1'|'$3'].
-map_pair_type -> top_type '=>' top_type : {type, ?line('$2'), map_field_assoc,['$1','$3']}.
+map_pair_type -> top_type '=>' top_type : {type, ?anno('$2'), map_field_assoc,['$1','$3']}.
field_types -> field_type : ['$1'].
field_types -> field_type ',' field_types : ['$1'|'$3'].
-field_type -> atom '::' top_type : {type, ?line('$1'), field_type,
+field_type -> atom '::' top_type : {type, ?anno('$1'), field_type,
['$1', '$3']}.
-binary_type -> '<<' '>>' : {type, ?line('$1'),binary,
- [abstract(0, ?line('$1')),
- abstract(0, ?line('$1'))]}.
-binary_type -> '<<' bin_base_type '>>' : {type, ?line('$1'),binary,
- ['$2', abstract(0, ?line('$1'))]}.
-binary_type -> '<<' bin_unit_type '>>' : {type, ?line('$1'),binary,
- [abstract(0, ?line('$1')), '$2']}.
+binary_type -> '<<' '>>' : {type, ?anno('$1'),binary,
+ [abstract2(0, ?anno('$1')),
+ abstract2(0, ?anno('$1'))]}.
+binary_type -> '<<' bin_base_type '>>' : {type, ?anno('$1'),binary,
+ ['$2', abstract2(0, ?anno('$1'))]}.
+binary_type -> '<<' bin_unit_type '>>' : {type, ?anno('$1'),binary,
+ [abstract2(0, ?anno('$1')), '$2']}.
binary_type -> '<<' bin_base_type ',' bin_unit_type '>>'
- : {type, ?line('$1'), binary, ['$2', '$4']}.
+ : {type, ?anno('$1'), binary, ['$2', '$4']}.
bin_base_type -> var ':' type : build_bin_type(['$1'], '$3').
@@ -210,7 +210,7 @@ function_clauses -> function_clause : ['$1'].
function_clauses -> function_clause ';' function_clauses : ['$1'|'$3'].
function_clause -> atom clause_args clause_guard clause_body :
- {clause,?line('$1'),element(3, '$1'),'$2','$3','$4'}.
+ {clause,?anno('$1'),element(3, '$1'),'$2','$3','$4'}.
clause_args -> argument_list : element(1, '$1').
@@ -221,10 +221,10 @@ clause_guard -> '$empty' : [].
clause_body -> '->' exprs: '$2'.
-expr -> 'catch' expr : {'catch',?line('$1'),'$2'}.
+expr -> 'catch' expr : {'catch',?anno('$1'),'$2'}.
expr -> expr_100 : '$1'.
-expr_100 -> expr_150 '=' expr_100 : {match,?line('$2'),'$1','$3'}.
+expr_100 -> expr_150 '=' expr_100 : {match,?anno('$2'),'$1','$3'}.
expr_100 -> expr_150 '!' expr_100 : ?mkop2('$1', '$2', '$3').
expr_100 -> expr_150 : '$1'.
@@ -260,7 +260,7 @@ expr_700 -> record_expr : '$1'.
expr_700 -> expr_800 : '$1'.
expr_800 -> expr_max ':' expr_max :
- {remote,?line('$2'),'$1','$3'}.
+ {remote,?anno('$2'),'$1','$3'}.
expr_800 -> expr_max : '$1'.
expr_max -> var : '$1'.
@@ -272,7 +272,7 @@ expr_max -> binary_comprehension : '$1'.
expr_max -> tuple : '$1'.
%%expr_max -> struct : '$1'.
expr_max -> '(' expr ')' : '$2'.
-expr_max -> 'begin' exprs 'end' : {block,?line('$1'),'$2'}.
+expr_max -> 'begin' exprs 'end' : {block,?anno('$1'),'$2'}.
expr_max -> if_expr : '$1'.
expr_max -> case_expr : '$1'.
expr_max -> receive_expr : '$1'.
@@ -280,22 +280,22 @@ expr_max -> fun_expr : '$1'.
expr_max -> try_expr : '$1'.
-list -> '[' ']' : {nil,?line('$1')}.
-list -> '[' expr tail : {cons,?line('$1'),'$2','$3'}.
+list -> '[' ']' : {nil,?anno('$1')}.
+list -> '[' expr tail : {cons,?anno('$1'),'$2','$3'}.
-tail -> ']' : {nil,?line('$1')}.
+tail -> ']' : {nil,?anno('$1')}.
tail -> '|' expr ']' : '$2'.
-tail -> ',' expr tail : {cons,?line('$2'),'$2','$3'}.
+tail -> ',' expr tail : {cons,?anno('$2'),'$2','$3'}.
-binary -> '<<' '>>' : {bin,?line('$1'),[]}.
-binary -> '<<' bin_elements '>>' : {bin,?line('$1'),'$2'}.
+binary -> '<<' '>>' : {bin,?anno('$1'),[]}.
+binary -> '<<' bin_elements '>>' : {bin,?anno('$1'),'$2'}.
bin_elements -> bin_element : ['$1'].
bin_elements -> bin_element ',' bin_elements : ['$1'|'$3'].
bin_element -> bit_expr opt_bit_size_expr opt_bit_type_list :
- {bin_element,?line('$1'),'$1','$2','$3'}.
+ {bin_element,?anno('$1'),'$1','$2','$3'}.
bit_expr -> prefix_op expr_max : ?mkop1('$1', '$2').
bit_expr -> expr_max : '$1'.
@@ -316,29 +316,29 @@ bit_size_expr -> expr_max : '$1'.
list_comprehension -> '[' expr '||' lc_exprs ']' :
- {lc,?line('$1'),'$2','$4'}.
+ {lc,?anno('$1'),'$2','$4'}.
binary_comprehension -> '<<' binary '||' lc_exprs '>>' :
- {bc,?line('$1'),'$2','$4'}.
+ {bc,?anno('$1'),'$2','$4'}.
lc_exprs -> lc_expr : ['$1'].
lc_exprs -> lc_expr ',' lc_exprs : ['$1'|'$3'].
lc_expr -> expr : '$1'.
-lc_expr -> expr '<-' expr : {generate,?line('$2'),'$1','$3'}.
-lc_expr -> binary '<=' expr : {b_generate,?line('$2'),'$1','$3'}.
+lc_expr -> expr '<-' expr : {generate,?anno('$2'),'$1','$3'}.
+lc_expr -> binary '<=' expr : {b_generate,?anno('$2'),'$1','$3'}.
-tuple -> '{' '}' : {tuple,?line('$1'),[]}.
-tuple -> '{' exprs '}' : {tuple,?line('$1'),'$2'}.
+tuple -> '{' '}' : {tuple,?anno('$1'),[]}.
+tuple -> '{' exprs '}' : {tuple,?anno('$1'),'$2'}.
%%struct -> atom tuple :
-%% {struct,?line('$1'),element(3, '$1'),element(3, '$2')}.
+%% {struct,?anno('$1'),element(3, '$1'),element(3, '$2')}.
map_expr -> '#' map_tuple :
- {map, ?line('$1'),'$2'}.
+ {map, ?anno('$1'),'$2'}.
map_expr -> expr_max '#' map_tuple :
- {map, ?line('$2'),'$1','$3'}.
+ {map, ?anno('$2'),'$1','$3'}.
map_expr -> map_expr '#' map_tuple :
- {map, ?line('$2'),'$1','$3'}.
+ {map, ?anno('$2'),'$1','$3'}.
map_tuple -> '{' '}' : [].
map_tuple -> '{' map_fields '}' : '$2'.
@@ -350,10 +350,10 @@ map_field -> map_field_assoc : '$1'.
map_field -> map_field_exact : '$1'.
map_field_assoc -> map_key '=>' expr :
- {map_field_assoc,?line('$1'),'$1','$3'}.
+ {map_field_assoc,?anno('$1'),'$1','$3'}.
map_field_exact -> map_key ':=' expr :
- {map_field_exact,?line('$1'),'$1','$3'}.
+ {map_field_exact,?anno('$1'),'$1','$3'}.
map_key -> expr : '$1'.
@@ -363,17 +363,17 @@ map_key -> expr : '$1'.
%% always atoms for the moment, this might change in the future.
record_expr -> '#' atom '.' atom :
- {record_index,?line('$1'),element(3, '$2'),'$4'}.
+ {record_index,?anno('$1'),element(3, '$2'),'$4'}.
record_expr -> '#' atom record_tuple :
- {record,?line('$1'),element(3, '$2'),'$3'}.
+ {record,?anno('$1'),element(3, '$2'),'$3'}.
record_expr -> expr_max '#' atom '.' atom :
- {record_field,?line('$2'),'$1',element(3, '$3'),'$5'}.
+ {record_field,?anno('$2'),'$1',element(3, '$3'),'$5'}.
record_expr -> expr_max '#' atom record_tuple :
- {record,?line('$2'),'$1',element(3, '$3'),'$4'}.
+ {record,?anno('$2'),'$1',element(3, '$3'),'$4'}.
record_expr -> record_expr '#' atom '.' atom :
- {record_field,?line('$2'),'$1',element(3, '$3'),'$5'}.
+ {record_field,?anno('$2'),'$1',element(3, '$3'),'$5'}.
record_expr -> record_expr '#' atom record_tuple :
- {record,?line('$2'),'$1',element(3, '$3'),'$4'}.
+ {record,?anno('$2'),'$1',element(3, '$3'),'$4'}.
record_tuple -> '{' '}' : [].
record_tuple -> '{' record_fields '}' : '$2'.
@@ -381,47 +381,47 @@ record_tuple -> '{' record_fields '}' : '$2'.
record_fields -> record_field : ['$1'].
record_fields -> record_field ',' record_fields : ['$1' | '$3'].
-record_field -> var '=' expr : {record_field,?line('$1'),'$1','$3'}.
-record_field -> atom '=' expr : {record_field,?line('$1'),'$1','$3'}.
+record_field -> var '=' expr : {record_field,?anno('$1'),'$1','$3'}.
+record_field -> atom '=' expr : {record_field,?anno('$1'),'$1','$3'}.
%% N.B. This is called from expr_700.
function_call -> expr_800 argument_list :
- {call,?line('$1'),'$1',element(1, '$2')}.
+ {call,?anno('$1'),'$1',element(1, '$2')}.
-if_expr -> 'if' if_clauses 'end' : {'if',?line('$1'),'$2'}.
+if_expr -> 'if' if_clauses 'end' : {'if',?anno('$1'),'$2'}.
if_clauses -> if_clause : ['$1'].
if_clauses -> if_clause ';' if_clauses : ['$1' | '$3'].
if_clause -> guard clause_body :
- {clause,?line(hd(hd('$1'))),[],'$1','$2'}.
+ {clause,?anno(hd(hd('$1'))),[],'$1','$2'}.
case_expr -> 'case' expr 'of' cr_clauses 'end' :
- {'case',?line('$1'),'$2','$4'}.
+ {'case',?anno('$1'),'$2','$4'}.
cr_clauses -> cr_clause : ['$1'].
cr_clauses -> cr_clause ';' cr_clauses : ['$1' | '$3'].
cr_clause -> expr clause_guard clause_body :
- {clause,?line('$1'),['$1'],'$2','$3'}.
+ {clause,?anno('$1'),['$1'],'$2','$3'}.
receive_expr -> 'receive' cr_clauses 'end' :
- {'receive',?line('$1'),'$2'}.
+ {'receive',?anno('$1'),'$2'}.
receive_expr -> 'receive' 'after' expr clause_body 'end' :
- {'receive',?line('$1'),[],'$3','$4'}.
+ {'receive',?anno('$1'),[],'$3','$4'}.
receive_expr -> 'receive' cr_clauses 'after' expr clause_body 'end' :
- {'receive',?line('$1'),'$2','$4','$5'}.
+ {'receive',?anno('$1'),'$2','$4','$5'}.
fun_expr -> 'fun' atom '/' integer :
- {'fun',?line('$1'),{function,element(3, '$2'),element(3, '$4')}}.
+ {'fun',?anno('$1'),{function,element(3, '$2'),element(3, '$4')}}.
fun_expr -> 'fun' atom_or_var ':' atom_or_var '/' integer_or_var :
- {'fun',?line('$1'),{function,'$2','$4','$6'}}.
+ {'fun',?anno('$1'),{function,'$2','$4','$6'}}.
fun_expr -> 'fun' fun_clauses 'end' :
- build_fun(?line('$1'), '$2').
+ build_fun(?anno('$1'), '$2').
atom_or_var -> atom : '$1'.
atom_or_var -> var : '$1'.
@@ -433,16 +433,16 @@ fun_clauses -> fun_clause : ['$1'].
fun_clauses -> fun_clause ';' fun_clauses : ['$1' | '$3'].
fun_clause -> argument_list clause_guard clause_body :
- {Args,Pos} = '$1',
- {clause,Pos,'fun',Args,'$2','$3'}.
+ {Args,Anno} = '$1',
+ {clause,Anno,'fun',Args,'$2','$3'}.
fun_clause -> var argument_list clause_guard clause_body :
{clause,element(2, '$1'),element(3, '$1'),element(1, '$2'),'$3','$4'}.
try_expr -> 'try' exprs 'of' cr_clauses try_catch :
- build_try(?line('$1'),'$2','$4','$5').
+ build_try(?anno('$1'),'$2','$4','$5').
try_expr -> 'try' exprs try_catch :
- build_try(?line('$1'),'$2',[],'$3').
+ build_try(?anno('$1'),'$2',[],'$3').
try_catch -> 'catch' try_clauses 'end' :
{'$2',[]}.
@@ -455,18 +455,18 @@ try_clauses -> try_clause : ['$1'].
try_clauses -> try_clause ';' try_clauses : ['$1' | '$3'].
try_clause -> expr clause_guard clause_body :
- L = ?line('$1'),
- {clause,L,[{tuple,L,[{atom,L,throw},'$1',{var,L,'_'}]}],'$2','$3'}.
+ A = ?anno('$1'),
+ {clause,A,[{tuple,A,[{atom,A,throw},'$1',{var,A,'_'}]}],'$2','$3'}.
try_clause -> atom ':' expr clause_guard clause_body :
- L = ?line('$1'),
- {clause,L,[{tuple,L,['$1','$3',{var,L,'_'}]}],'$4','$5'}.
+ A = ?anno('$1'),
+ {clause,A,[{tuple,A,['$1','$3',{var,A,'_'}]}],'$4','$5'}.
try_clause -> var ':' expr clause_guard clause_body :
- L = ?line('$1'),
- {clause,L,[{tuple,L,['$1','$3',{var,L,'_'}]}],'$4','$5'}.
+ A = ?anno('$1'),
+ {clause,A,[{tuple,A,['$1','$3',{var,A,'_'}]}],'$4','$5'}.
-argument_list -> '(' ')' : {[],?line('$1')}.
-argument_list -> '(' exprs ')' : {'$2',?line('$1')}.
+argument_list -> '(' ')' : {[],?anno('$1')}.
+argument_list -> '(' exprs ')' : {'$2',?anno('$1')}.
exprs -> expr : ['$1'].
@@ -483,7 +483,7 @@ atomic -> strings : '$1'.
strings -> string : '$1'.
strings -> string strings :
- {string,?line('$1'),element(3, '$1') ++ element(3, '$2')}.
+ {string,?anno('$1'),element(3, '$1') ++ element(3, '$2')}.
prefix_op -> '+' : '$1'.
prefix_op -> '-' : '$1'.
@@ -524,8 +524,14 @@ Erlang code.
-export([normalise/1,abstract/1,tokens/1,tokens/2]).
-export([abstract/2]).
-export([inop_prec/1,preop_prec/1,func_prec/0,max_prec/0]).
+-export([map_anno/2, fold_anno/3, mapfold_anno/3,
+ new_anno/1, anno_to_term/1, anno_from_term/1]).
-export([set_line/2,get_attribute/2,get_attributes/1]).
+-deprecated([{set_line, 2, next_major_release},
+ {get_attribute, 2, next_major_release},
+ {get_attributes, 1, next_major_release}]).
+
%% The following directive is needed for (significantly) faster compilation
%% of the generated .erl file by the HiPE compiler. Please do not remove.
-compile([{hipe,[{regalloc,linear_scan}]}]).
@@ -533,30 +539,31 @@ Erlang code.
-export_type([abstract_clause/0, abstract_expr/0, abstract_form/0,
error_info/0]).
+%% XXX. To be refined.
-type abstract_clause() :: term().
-type abstract_expr() :: term().
-type abstract_form() :: term().
-type error_description() :: term().
--type error_info() :: {erl_scan:line(), module(), error_description()}.
+-type error_info() :: {erl_anno:line(), module(), error_description()}.
-type token() :: erl_scan:token().
-%% mkop(Op, Arg) -> {op,Line,Op,Arg}.
-%% mkop(Left, Op, Right) -> {op,Line,Op,Left,Right}.
+%% mkop(Op, Arg) -> {op,Anno,Op,Arg}.
+%% mkop(Left, Op, Right) -> {op,Anno,Op,Left,Right}.
--define(mkop2(L, OpPos, R),
+-define(mkop2(L, OpAnno, R),
begin
- {Op,Pos} = OpPos,
- {op,Pos,Op,L,R}
+ {Op,Anno} = OpAnno,
+ {op,Anno,Op,L,R}
end).
--define(mkop1(OpPos, A),
+-define(mkop1(OpAnno, A),
begin
- {Op,Pos} = OpPos,
- {op,Pos,Op,A}
+ {Op,Anno} = OpAnno,
+ {op,Anno,Op,A}
end).
-%% keep track of line info in tokens
--define(line(Tup), element(2, Tup)).
+%% keep track of annotation info in tokens
+-define(anno(Tup), element(2, Tup)).
%% Entry points compatible to old erl_parse.
%% These really suck and are only here until Calle gets multiple
@@ -566,10 +573,10 @@ Erlang code.
Tokens :: [token()],
AbsForm :: abstract_form(),
ErrorInfo :: error_info().
-parse_form([{'-',L1},{atom,L2,spec}|Tokens]) ->
- parse([{'-',L1},{'spec',L2}|Tokens]);
-parse_form([{'-',L1},{atom,L2,callback}|Tokens]) ->
- parse([{'-',L1},{'callback',L2}|Tokens]);
+parse_form([{'-',A1},{atom,A2,spec}|Tokens]) ->
+ parse([{'-',A1},{'spec',A2}|Tokens]);
+parse_form([{'-',A1},{atom,A2,callback}|Tokens]) ->
+ parse([{'-',A1},{'callback',A2}|Tokens]);
parse_form(Tokens) ->
parse(Tokens).
@@ -578,7 +585,8 @@ parse_form(Tokens) ->
ExprList :: [abstract_expr()],
ErrorInfo :: error_info().
parse_exprs(Tokens) ->
- case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
+ A = erl_anno:new(0),
+ case parse([{atom,A,f},{'(',A},{')',A},{'->',A}|Tokens]) of
{ok,{function,_Lf,f,0,[{clause,_Lc,[],[],Exprs}]}} ->
{ok,Exprs};
{error,_} = Err -> Err
@@ -589,42 +597,43 @@ parse_exprs(Tokens) ->
Term :: term(),
ErrorInfo :: error_info().
parse_term(Tokens) ->
- case parse([{atom,0,f},{'(',0},{')',0},{'->',0}|Tokens]) of
- {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[Expr]}]}} ->
+ A = erl_anno:new(0),
+ case parse([{atom,A,f},{'(',A},{')',A},{'->',A}|Tokens]) of
+ {ok,{function,_Af,f,0,[{clause,_Ac,[],[],[Expr]}]}} ->
try normalise(Expr) of
Term -> {ok,Term}
catch
- _:_R -> {error,{?line(Expr),?MODULE,"bad term"}}
+ _:_R -> {error,{location(?anno(Expr)),?MODULE,"bad term"}}
end;
- {ok,{function,_Lf,f,0,[{clause,_Lc,[],[],[_E1,E2|_Es]}]}} ->
- {error,{?line(E2),?MODULE,"bad term"}};
+ {ok,{function,_Af,f,A,[{clause,_Ac,[],[],[_E1,E2|_Es]}]}} ->
+ {error,{location(?anno(E2)),?MODULE,"bad term"}};
{error,_} = Err -> Err
end.
-type attributes() :: 'export' | 'file' | 'import' | 'module'
| 'opaque' | 'record' | 'type'.
-build_typed_attribute({atom,La,record},
- {typed_record, {atom,_Ln,RecordName}, RecTuple}) ->
- {attribute,La,record,{RecordName,record_tuple(RecTuple)}};
-build_typed_attribute({atom,La,Attr},
+build_typed_attribute({atom,Aa,record},
+ {typed_record, {atom,_An,RecordName}, RecTuple}) ->
+ {attribute,Aa,record,{RecordName,record_tuple(RecTuple)}};
+build_typed_attribute({atom,Aa,Attr},
{type_def, {call,_,{atom,_,TypeName},Args}, Type})
when Attr =:= 'type' ; Attr =:= 'opaque' ->
case lists:all(fun({var, _, _}) -> true;
(_) -> false
end, Args) of
- true -> {attribute,La,Attr,{TypeName,Type,Args}};
- false -> error_bad_decl(La, Attr)
+ true -> {attribute,Aa,Attr,{TypeName,Type,Args}};
+ false -> error_bad_decl(Aa, Attr)
end;
-build_typed_attribute({atom,La,Attr},_) ->
+build_typed_attribute({atom,Aa,Attr},_) ->
case Attr of
- record -> error_bad_decl(La, record);
- type -> error_bad_decl(La, type);
- opaque -> error_bad_decl(La, opaque);
- _ -> ret_err(La, "bad attribute")
+ record -> error_bad_decl(Aa, record);
+ type -> error_bad_decl(Aa, type);
+ opaque -> error_bad_decl(Aa, opaque);
+ _ -> ret_err(Aa, "bad attribute")
end.
-build_type_spec({Kind,La}, {SpecFun, TypeSpecs})
+build_type_spec({Kind,Aa}, {SpecFun, TypeSpecs})
when (Kind =:= spec) or (Kind =:= callback) ->
NewSpecFun =
case SpecFun of
@@ -639,7 +648,7 @@ build_type_spec({Kind,La}, {SpecFun, TypeSpecs})
%% Old style spec. Allow this for now.
{Mod,Fun,Arity}
end,
- {attribute,La,Kind,{NewSpecFun, TypeSpecs}}.
+ {attribute,Aa,Kind,{NewSpecFun, TypeSpecs}}.
find_arity_from_specs([Spec|_]) ->
%% Use the first spec to find the arity. If all are not the same,
@@ -651,40 +660,40 @@ find_arity_from_specs([Spec|_]) ->
{type, _, 'fun', [{type, _, product, Args},_]} = Fun,
length(Args).
-build_def({var, L, '_'}, _Types) ->
- ret_err(L, "bad type variable");
+build_def({var, A, '_'}, _Types) ->
+ ret_err(A, "bad type variable");
build_def(LHS, Types) ->
- IsSubType = {atom, ?line(LHS), is_subtype},
- {type, ?line(LHS), constraint, [IsSubType, [LHS, Types]]}.
+ IsSubType = {atom, ?anno(LHS), is_subtype},
+ {type, ?anno(LHS), constraint, [IsSubType, [LHS, Types]]}.
-lift_unions(T1, {type, _La, union, List}) ->
- {type, ?line(T1), union, [T1|List]};
+lift_unions(T1, {type, _Aa, union, List}) ->
+ {type, ?anno(T1), union, [T1|List]};
lift_unions(T1, T2) ->
- {type, ?line(T1), union, [T1, T2]}.
+ {type, ?anno(T1), union, [T1, T2]}.
-skip_paren({paren_type,_L,[Type]}) ->
+skip_paren({paren_type,_A,[Type]}) ->
skip_paren(Type);
skip_paren(Type) ->
Type.
-build_gen_type({atom, La, tuple}) ->
- {type, La, tuple, any};
-build_gen_type({atom, La, map}) ->
- {type, La, map, any};
-build_gen_type({atom, La, Name}) ->
+build_gen_type({atom, Aa, tuple}) ->
+ {type, Aa, tuple, any};
+build_gen_type({atom, Aa, map}) ->
+ {type, Aa, map, any};
+build_gen_type({atom, Aa, Name}) ->
Tag = type_tag(Name, 0),
- {Tag, La, Name, []}.
+ {Tag, Aa, Name, []}.
build_bin_type([{var, _, '_'}|Left], Int) ->
build_bin_type(Left, Int);
build_bin_type([], Int) ->
skip_paren(Int);
-build_bin_type([{var, La, _}|_], _) ->
- ret_err(La, "Bad binary type").
+build_bin_type([{var, Aa, _}|_], _) ->
+ ret_err(Aa, "Bad binary type").
-build_type({atom, L, Name}, Types) ->
+build_type({atom, A, Name}, Types) ->
Tag = type_tag(Name, length(Types)),
- {Tag, L, Name, Types}.
+ {Tag, A, Name, Types}.
type_tag(TypeName, NumberOfTypeVariables) ->
case erl_internal:is_type(TypeName, NumberOfTypeVariables) of
@@ -692,71 +701,75 @@ type_tag(TypeName, NumberOfTypeVariables) ->
false -> user_type
end.
+abstract2(Term, Anno) ->
+ Line = erl_anno:line(Anno),
+ abstract(Term, Line).
+
%% build_attribute(AttrName, AttrValue) ->
-%% {attribute,Line,module,Module}
-%% {attribute,Line,export,Exports}
-%% {attribute,Line,import,Imports}
-%% {attribute,Line,record,{Name,Inits}}
-%% {attribute,Line,file,{Name,Line}}
-%% {attribute,Line,Name,Val}
-
-build_attribute({atom,La,module}, Val) ->
+%% {attribute,Anno,module,Module}
+%% {attribute,Anno,export,Exports}
+%% {attribute,Anno,import,Imports}
+%% {attribute,Anno,record,{Name,Inits}}
+%% {attribute,Anno,file,{Name,Line}}
+%% {attribute,Anno,Name,Val}
+
+build_attribute({atom,Aa,module}, Val) ->
case Val of
- [{atom,_Lm,Module}] ->
- {attribute,La,module,Module};
- [{atom,_Lm,Module},ExpList] ->
- {attribute,La,module,{Module,var_list(ExpList)}};
+ [{atom,_Am,Module}] ->
+ {attribute,Aa,module,Module};
+ [{atom,_Am,Module},ExpList] ->
+ {attribute,Aa,module,{Module,var_list(ExpList)}};
_Other ->
- error_bad_decl(La, module)
+ error_bad_decl(Aa, module)
end;
-build_attribute({atom,La,export}, Val) ->
+build_attribute({atom,Aa,export}, Val) ->
case Val of
[ExpList] ->
- {attribute,La,export,farity_list(ExpList)};
- _Other -> error_bad_decl(La, export)
+ {attribute,Aa,export,farity_list(ExpList)};
+ _Other -> error_bad_decl(Aa, export)
end;
-build_attribute({atom,La,import}, Val) ->
+build_attribute({atom,Aa,import}, Val) ->
case Val of
- [{atom,_Lm,Mod},ImpList] ->
- {attribute,La,import,{Mod,farity_list(ImpList)}};
- _Other -> error_bad_decl(La, import)
+ [{atom,_Am,Mod},ImpList] ->
+ {attribute,Aa,import,{Mod,farity_list(ImpList)}};
+ _Other -> error_bad_decl(Aa, import)
end;
-build_attribute({atom,La,record}, Val) ->
+build_attribute({atom,Aa,record}, Val) ->
case Val of
- [{atom,_Ln,Record},RecTuple] ->
- {attribute,La,record,{Record,record_tuple(RecTuple)}};
- _Other -> error_bad_decl(La, record)
+ [{atom,_An,Record},RecTuple] ->
+ {attribute,Aa,record,{Record,record_tuple(RecTuple)}};
+ _Other -> error_bad_decl(Aa, record)
end;
-build_attribute({atom,La,file}, Val) ->
+build_attribute({atom,Aa,file}, Val) ->
case Val of
- [{string,_Ln,Name},{integer,_Ll,Line}] ->
- {attribute,La,file,{Name,Line}};
- _Other -> error_bad_decl(La, file)
+ [{string,_An,Name},{integer,_Al,Line}] ->
+ {attribute,Aa,file,{Name,Line}};
+ _Other -> error_bad_decl(Aa, file)
end;
-build_attribute({atom,La,Attr}, Val) ->
+build_attribute({atom,Aa,Attr}, Val) ->
case Val of
[Expr0] ->
Expr = attribute_farity(Expr0),
- {attribute,La,Attr,term(Expr)};
- _Other -> ret_err(La, "bad attribute")
+ {attribute,Aa,Attr,term(Expr)};
+ _Other -> ret_err(Aa, "bad attribute")
end.
-var_list({cons,_Lc,{var,_,V},Tail}) ->
+var_list({cons,_Ac,{var,_,V},Tail}) ->
[V|var_list(Tail)];
-var_list({nil,_Ln}) -> [];
+var_list({nil,_An}) -> [];
var_list(Other) ->
- ret_err(?line(Other), "bad variable list").
+ ret_err(?anno(Other), "bad variable list").
-attribute_farity({cons,L,H,T}) ->
- {cons,L,attribute_farity(H),attribute_farity(T)};
-attribute_farity({tuple,L,Args0}) ->
+attribute_farity({cons,A,H,T}) ->
+ {cons,A,attribute_farity(H),attribute_farity(T)};
+attribute_farity({tuple,A,Args0}) ->
Args = attribute_farity_list(Args0),
- {tuple,L,Args};
-attribute_farity({map,L,Args0}) ->
+ {tuple,A,Args};
+attribute_farity({map,A,Args0}) ->
Args = attribute_farity_map(Args0),
- {map,L,Args};
-attribute_farity({op,L,'/',{atom,_,_}=Name,{integer,_,_}=Arity}) ->
- {tuple,L,[Name,Arity]};
+ {map,A,Args};
+attribute_farity({op,A,'/',{atom,_,_}=Name,{integer,_,_}=Arity}) ->
+ {tuple,A,[Name,Arity]};
attribute_farity(Other) -> Other.
attribute_farity_list(Args) ->
@@ -764,45 +777,45 @@ attribute_farity_list(Args) ->
%% It is not meaningful to have farity keys.
attribute_farity_map(Args) ->
- [{Op,L,K,attribute_farity(V)} || {Op,L,K,V} <- Args].
+ [{Op,A,K,attribute_farity(V)} || {Op,A,K,V} <- Args].
--spec error_bad_decl(integer(), attributes()) -> no_return().
+-spec error_bad_decl(erl_anno:anno(), attributes()) -> no_return().
-error_bad_decl(L, S) ->
- ret_err(L, io_lib:format("bad ~w declaration", [S])).
+error_bad_decl(Anno, S) ->
+ ret_err(Anno, io_lib:format("bad ~w declaration", [S])).
-farity_list({cons,_Lc,{op,_Lo,'/',{atom,_La,A},{integer,_Li,I}},Tail}) ->
+farity_list({cons,_Ac,{op,_Ao,'/',{atom,_Aa,A},{integer,_Ai,I}},Tail}) ->
[{A,I}|farity_list(Tail)];
-farity_list({nil,_Ln}) -> [];
+farity_list({nil,_An}) -> [];
farity_list(Other) ->
- ret_err(?line(Other), "bad function arity").
+ ret_err(?anno(Other), "bad function arity").
-record_tuple({tuple,_Lt,Fields}) ->
+record_tuple({tuple,_At,Fields}) ->
record_fields(Fields);
record_tuple(Other) ->
- ret_err(?line(Other), "bad record declaration").
+ ret_err(?anno(Other), "bad record declaration").
-record_fields([{atom,La,A}|Fields]) ->
- [{record_field,La,{atom,La,A}}|record_fields(Fields)];
-record_fields([{match,_Lm,{atom,La,A},Expr}|Fields]) ->
- [{record_field,La,{atom,La,A},Expr}|record_fields(Fields)];
+record_fields([{atom,Aa,A}|Fields]) ->
+ [{record_field,Aa,{atom,Aa,A}}|record_fields(Fields)];
+record_fields([{match,_Am,{atom,Aa,A},Expr}|Fields]) ->
+ [{record_field,Aa,{atom,Aa,A},Expr}|record_fields(Fields)];
record_fields([{typed,Expr,TypeInfo}|Fields]) ->
[Field] = record_fields([Expr]),
TypeInfo1 =
case Expr of
{match, _, _, _} -> TypeInfo; %% If we have an initializer.
- {atom, La, _} ->
+ {atom, Aa, _} ->
case has_undefined(TypeInfo) of
false ->
TypeInfo2 = maybe_add_paren(TypeInfo),
- lift_unions(abstract(undefined, La), TypeInfo2);
+ lift_unions(abstract2(undefined, Aa), TypeInfo2);
true ->
TypeInfo
end
end,
[{typed_record_field,Field,TypeInfo1}|record_fields(Fields)];
record_fields([Other|_Fields]) ->
- ret_err(?line(Other), "bad record field");
+ ret_err(?anno(Other), "bad record field");
record_fields([]) -> [].
has_undefined({atom,_,undefined}) ->
@@ -816,52 +829,53 @@ has_undefined({type,_,union,Ts}) ->
has_undefined(_) ->
false.
-maybe_add_paren({ann_type,L,T}) ->
- {paren_type,L,[{ann_type,L,T}]};
+maybe_add_paren({ann_type,A,T}) ->
+ {paren_type,A,[{ann_type,A,T}]};
maybe_add_paren(T) ->
T.
term(Expr) ->
try normalise(Expr)
- catch _:_R -> ret_err(?line(Expr), "bad attribute")
+ catch _:_R -> ret_err(?anno(Expr), "bad attribute")
end.
-%% build_function([Clause]) -> {function,Line,Name,Arity,[Clause]}
+%% build_function([Clause]) -> {function,Anno,Name,Arity,[Clause]}
build_function(Cs) ->
Name = element(3, hd(Cs)),
Arity = length(element(4, hd(Cs))),
- {function,?line(hd(Cs)),Name,Arity,check_clauses(Cs, Name, Arity)}.
+ {function,?anno(hd(Cs)),Name,Arity,check_clauses(Cs, Name, Arity)}.
-%% build_fun(Line, [Clause]) -> {'fun',Line,{clauses,[Clause]}}.
+%% build_fun(Anno, [Clause]) -> {'fun',Anno,{clauses,[Clause]}}.
-build_fun(Line, Cs) ->
+build_fun(Anno, Cs) ->
Name = element(3, hd(Cs)),
Arity = length(element(4, hd(Cs))),
CheckedCs = check_clauses(Cs, Name, Arity),
case Name of
'fun' ->
- {'fun',Line,{clauses,CheckedCs}};
+ {'fun',Anno,{clauses,CheckedCs}};
Name ->
- {named_fun,Line,Name,CheckedCs}
+ {named_fun,Anno,Name,CheckedCs}
end.
check_clauses(Cs, Name, Arity) ->
[case C of
- {clause,L,N,As,G,B} when N =:= Name, length(As) =:= Arity ->
- {clause,L,As,G,B};
- {clause,L,_N,_As,_G,_B} ->
- ret_err(L, "head mismatch")
+ {clause,A,N,As,G,B} when N =:= Name, length(As) =:= Arity ->
+ {clause,A,As,G,B};
+ {clause,A,_N,_As,_G,_B} ->
+ ret_err(A, "head mismatch")
end || C <- Cs].
-build_try(L,Es,Scs,{Ccs,As}) ->
- {'try',L,Es,Scs,Ccs,As}.
+build_try(A,Es,Scs,{Ccs,As}) ->
+ {'try',A,Es,Scs,Ccs,As}.
-spec ret_err(_, _) -> no_return().
-ret_err(L, S) ->
- {location,Location} = get_attribute(L, location),
- return_error(Location, S).
+ret_err(Anno, S) ->
+ return_error(location(Anno), S).
+location(Anno) ->
+ erl_anno:location(Anno).
%% Convert between the abstract form of a term and a term.
@@ -909,7 +923,8 @@ normalise_list([]) ->
Data :: term(),
AbsTerm :: abstract_expr().
abstract(T) ->
- abstract(T, 0, enc_func(epp:default_encoding())).
+ Anno = erl_anno:new(0),
+ abstract(T, Anno, enc_func(epp:default_encoding())).
-type encoding_func() :: fun((non_neg_integer()) -> boolean()).
@@ -919,16 +934,18 @@ abstract(T) ->
Options :: Line | [Option],
Option :: {line, Line} | {encoding, Encoding},
Encoding :: 'latin1' | 'unicode' | 'utf8' | 'none' | encoding_func(),
- Line :: erl_scan:line(),
+ Line :: erl_anno:line(),
AbsTerm :: abstract_expr().
abstract(T, Line) when is_integer(Line) ->
- abstract(T, Line, enc_func(epp:default_encoding()));
+ Anno = erl_anno:new(Line),
+ abstract(T, Anno, enc_func(epp:default_encoding()));
abstract(T, Options) when is_list(Options) ->
Line = proplists:get_value(line, Options, 0),
Encoding = proplists:get_value(encoding, Options,epp:default_encoding()),
EncFunc = enc_func(Encoding),
- abstract(T, Line, EncFunc).
+ Anno = erl_anno:new(Line),
+ abstract(T, Anno, EncFunc).
-define(UNICODE(C),
(C < 16#D800 orelse
@@ -942,53 +959,53 @@ enc_func(none) -> none;
enc_func(Fun) when is_function(Fun, 1) -> Fun;
enc_func(Term) -> erlang:error({badarg, Term}).
-abstract(T, L, _E) when is_integer(T) -> {integer,L,T};
-abstract(T, L, _E) when is_float(T) -> {float,L,T};
-abstract(T, L, _E) when is_atom(T) -> {atom,L,T};
-abstract([], L, _E) -> {nil,L};
-abstract(B, L, _E) when is_bitstring(B) ->
- {bin, L, [abstract_byte(Byte, L) || Byte <- bitstring_to_list(B)]};
-abstract([H|T], L, none=E) ->
- {cons,L,abstract(H, L, E),abstract(T, L, E)};
-abstract(List, L, E) when is_list(List) ->
- abstract_list(List, [], L, E);
-abstract(Tuple, L, E) when is_tuple(Tuple) ->
- {tuple,L,abstract_tuple_list(tuple_to_list(Tuple), L, E)};
-abstract(Map, L, E) when is_map(Map) ->
- {map,L,abstract_map_fields(maps:to_list(Map),L,E)}.
-
-abstract_list([H|T], String, L, E) ->
+abstract(T, A, _E) when is_integer(T) -> {integer,A,T};
+abstract(T, A, _E) when is_float(T) -> {float,A,T};
+abstract(T, A, _E) when is_atom(T) -> {atom,A,T};
+abstract([], A, _E) -> {nil,A};
+abstract(B, A, _E) when is_bitstring(B) ->
+ {bin, A, [abstract_byte(Byte, A) || Byte <- bitstring_to_list(B)]};
+abstract([H|T], A, none=E) ->
+ {cons,A,abstract(H, A, E),abstract(T, A, E)};
+abstract(List, A, E) when is_list(List) ->
+ abstract_list(List, [], A, E);
+abstract(Tuple, A, E) when is_tuple(Tuple) ->
+ {tuple,A,abstract_tuple_list(tuple_to_list(Tuple), A, E)};
+abstract(Map, A, E) when is_map(Map) ->
+ {map,A,abstract_map_fields(maps:to_list(Map),A,E)}.
+
+abstract_list([H|T], String, A, E) ->
case is_integer(H) andalso H >= 0 andalso E(H) of
true ->
- abstract_list(T, [H|String], L, E);
+ abstract_list(T, [H|String], A, E);
false ->
- AbstrList = {cons,L,abstract(H, L, E),abstract(T, L, E)},
- not_string(String, AbstrList, L, E)
+ AbstrList = {cons,A,abstract(H, A, E),abstract(T, A, E)},
+ not_string(String, AbstrList, A, E)
end;
-abstract_list([], String, L, _E) ->
- {string, L, lists:reverse(String)};
-abstract_list(T, String, L, E) ->
- not_string(String, abstract(T, L, E), L, E).
-
-not_string([C|T], Result, L, E) ->
- not_string(T, {cons, L, {integer, L, C}, Result}, L, E);
-not_string([], Result, _L, _E) ->
+abstract_list([], String, A, _E) ->
+ {string, A, lists:reverse(String)};
+abstract_list(T, String, A, E) ->
+ not_string(String, abstract(T, A, E), A, E).
+
+not_string([C|T], Result, A, E) ->
+ not_string(T, {cons, A, {integer, A, C}, Result}, A, E);
+not_string([], Result, _A, _E) ->
Result.
-abstract_tuple_list([H|T], L, E) ->
- [abstract(H, L, E)|abstract_tuple_list(T, L, E)];
-abstract_tuple_list([], _L, _E) ->
+abstract_tuple_list([H|T], A, E) ->
+ [abstract(H, A, E)|abstract_tuple_list(T, A, E)];
+abstract_tuple_list([], _A, _E) ->
[].
-abstract_map_fields(Fs,L,E) ->
- [{map_field_assoc,L,abstract(K,L,E),abstract(V,L,E)}||{K,V}<-Fs].
+abstract_map_fields(Fs,A,E) ->
+ [{map_field_assoc,A,abstract(K,A,E),abstract(V,A,E)}||{K,V}<-Fs].
-abstract_byte(Byte, L) when is_integer(Byte) ->
- {bin_element, L, {integer, L, Byte}, default, default};
-abstract_byte(Bits, L) ->
+abstract_byte(Byte, A) when is_integer(Byte) ->
+ {bin_element, A, {integer, A, Byte}, default, default};
+abstract_byte(Bits, A) ->
Sz = bit_size(Bits),
<<Val:Sz>> = Bits,
- {bin_element, L, {integer, L, Val}, {integer, L, Sz}, default}.
+ {bin_element, A, {integer, A, Val}, {integer, A, Sz}, default}.
%% Generate a list of tokens representing the abstract term.
@@ -1002,32 +1019,32 @@ tokens(Abs) ->
AbsTerm :: abstract_expr(),
MoreTokens :: [token()],
Tokens :: [token()].
-tokens({char,L,C}, More) -> [{char,L,C}|More];
-tokens({integer,L,N}, More) -> [{integer,L,N}|More];
-tokens({float,L,F}, More) -> [{float,L,F}|More];
-tokens({atom,L,A}, More) -> [{atom,L,A}|More];
-tokens({var,L,V}, More) -> [{var,L,V}|More];
-tokens({string,L,S}, More) -> [{string,L,S}|More];
-tokens({nil,L}, More) -> [{'[',L},{']',L}|More];
-tokens({cons,L,Head,Tail}, More) ->
- [{'[',L}|tokens(Head, tokens_tail(Tail, More))];
-tokens({tuple,L,[]}, More) ->
- [{'{',L},{'}',L}|More];
-tokens({tuple,L,[E|Es]}, More) ->
- [{'{',L}|tokens(E, tokens_tuple(Es, ?line(E), More))].
-
-tokens_tail({cons,L,Head,Tail}, More) ->
- [{',',L}|tokens(Head, tokens_tail(Tail, More))];
-tokens_tail({nil,L}, More) ->
- [{']',L}|More];
+tokens({char,A,C}, More) -> [{char,A,C}|More];
+tokens({integer,A,N}, More) -> [{integer,A,N}|More];
+tokens({float,A,F}, More) -> [{float,A,F}|More];
+tokens({atom,Aa,A}, More) -> [{atom,Aa,A}|More];
+tokens({var,A,V}, More) -> [{var,A,V}|More];
+tokens({string,A,S}, More) -> [{string,A,S}|More];
+tokens({nil,A}, More) -> [{'[',A},{']',A}|More];
+tokens({cons,A,Head,Tail}, More) ->
+ [{'[',A}|tokens(Head, tokens_tail(Tail, More))];
+tokens({tuple,A,[]}, More) ->
+ [{'{',A},{'}',A}|More];
+tokens({tuple,A,[E|Es]}, More) ->
+ [{'{',A}|tokens(E, tokens_tuple(Es, ?anno(E), More))].
+
+tokens_tail({cons,A,Head,Tail}, More) ->
+ [{',',A}|tokens(Head, tokens_tail(Tail, More))];
+tokens_tail({nil,A}, More) ->
+ [{']',A}|More];
tokens_tail(Other, More) ->
- L = ?line(Other),
- [{'|',L}|tokens(Other, [{']',L}|More])].
+ A = ?anno(Other),
+ [{'|',A}|tokens(Other, [{']',A}|More])].
-tokens_tuple([E|Es], Line, More) ->
- [{',',Line}|tokens(E, tokens_tuple(Es, ?line(E), More))];
-tokens_tuple([], Line, More) ->
- [{'}',Line}|More].
+tokens_tuple([E|Es], Anno, More) ->
+ [{',',Anno}|tokens(E, tokens_tuple(Es, ?anno(E), More))];
+tokens_tuple([], Anno, More) ->
+ [{'}',Anno}|More].
%% Give the relative precedences of operators.
@@ -1092,13 +1109,168 @@ max_prec() -> 900.
%%% longer apply. To get all present attributes as a property list
%%% get_attributes() should be used.
+-compile({nowarn_deprecated_function,{erl_scan,set_attribute,3}}).
set_line(L, F) ->
erl_scan:set_attribute(line, L, F).
+-compile({nowarn_deprecated_function,{erl_scan,attributes_info,2}}).
get_attribute(L, Name) ->
erl_scan:attributes_info(L, Name).
+-compile({nowarn_deprecated_function,{erl_scan,attributes_info,1}}).
get_attributes(L) ->
erl_scan:attributes_info(L).
+-spec map_anno(Fun, Abstr) -> NewAbstr when
+ Fun :: fun((Anno) -> Anno),
+ Anno :: erl_anno:anno(),
+ Abstr :: abstract_form() | abstract_expr(),
+ NewAbstr :: abstract_form() | abstract_expr().
+
+map_anno(F0, Abstr) ->
+ F = fun(A, Acc) -> {F0(A), Acc} end,
+ {NewAbstr, []} = modify_anno1(Abstr, [], F),
+ NewAbstr.
+
+-spec fold_anno(Fun, Acc0, Abstr) -> NewAbstr when
+ Fun :: fun((Anno, AccIn) -> AccOut),
+ Anno :: erl_anno:anno(),
+ Acc0 :: term(),
+ AccIn :: term(),
+ AccOut :: term(),
+ Abstr :: abstract_form() | abstract_expr(),
+ NewAbstr :: abstract_form() | abstract_expr().
+
+fold_anno(F0, Acc0, Abstr) ->
+ F = fun(A, Acc) -> {A, F0(A, Acc)} end,
+ {_, NewAcc} = modify_anno1(Abstr, Acc0, F),
+ NewAcc.
+
+-spec mapfold_anno(Fun, Acc0, Abstr) -> {NewAbstr, Acc1} when
+ Fun :: fun((Anno, AccIn) -> {Anno, AccOut}),
+ Anno :: erl_anno:anno(),
+ Acc0 :: term(),
+ Acc1 :: term(),
+ AccIn :: term(),
+ AccOut :: term(),
+ Abstr :: abstract_form() | abstract_expr(),
+ NewAbstr :: abstract_form() | abstract_expr().
+
+mapfold_anno(F, Acc0, Abstr) ->
+ modify_anno1(Abstr, Acc0, F).
+
+-spec new_anno(Term) -> Abstr when
+ Term :: term(),
+ Abstr :: abstract_form() | abstract_expr().
+
+new_anno(Term) ->
+ map_anno(fun erl_anno:new/1, Term).
+
+-spec anno_to_term(Abstr) -> term() when
+ Abstr :: abstract_form() | abstract_expr().
+
+anno_to_term(Abstract) ->
+ map_anno(fun erl_anno:to_term/1, Abstract).
+
+-spec anno_from_term(Term) -> abstract_form() | abstract_expr() when
+ Term :: term().
+
+anno_from_term(Term) ->
+ map_anno(fun erl_anno:from_term/1, Term).
+
+%% Forms.
+%% Recognize what sys_pre_expand does:
+modify_anno1({'fun',A,F,{_,_,_}=Id}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {F1,Ac2} = modify_anno1(F, Ac1, Mf),
+ {{'fun',A1,F1,Id},Ac2};
+modify_anno1({named_fun,A,N,F,{_,_,_}=Id}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {F1,Ac2} = modify_anno1(F, Ac1, Mf),
+ {{named_fun,A1,N,F1,Id},Ac2};
+modify_anno1({attribute,A,N,[V]}, Ac, Mf) ->
+ {{attribute,A1,N1,V1},Ac1} = modify_anno1({attribute,A,N,V}, Ac, Mf),
+ {{attribute,A1,N1,[V1]},Ac1};
+%% End of sys_pre_expand special forms.
+modify_anno1({function,F,A}, Ac, _Mf) ->
+ {{function,F,A},Ac};
+modify_anno1({function,M,F,A}, Ac, Mf) ->
+ {M1,Ac1} = modify_anno1(M, Ac, Mf),
+ {F1,Ac2} = modify_anno1(F, Ac1, Mf),
+ {A1,Ac3} = modify_anno1(A, Ac2, Mf),
+ {{function,M1,F1,A1},Ac3};
+modify_anno1({attribute,A,record,{Name,Fields}}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {Fields1,Ac2} = modify_anno1(Fields, Ac1, Mf),
+ {{attribute,A1,record,{Name,Fields1}},Ac2};
+modify_anno1({attribute,A,spec,{Fun,Types}}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {Types1,Ac2} = modify_anno1(Types, Ac1, Mf),
+ {{attribute,A1,spec,{Fun,Types1}},Ac2};
+modify_anno1({attribute,A,callback,{Fun,Types}}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {Types1,Ac2} = modify_anno1(Types, Ac1, Mf),
+ {{attribute,A1,callback,{Fun,Types1}},Ac2};
+modify_anno1({attribute,A,type,{TypeName,TypeDef,Args}}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {TypeDef1,Ac2} = modify_anno1(TypeDef, Ac1, Mf),
+ {Args1,Ac3} = modify_anno1(Args, Ac2, Mf),
+ {{attribute,A1,type,{TypeName,TypeDef1,Args1}},Ac3};
+modify_anno1({attribute,A,opaque,{TypeName,TypeDef,Args}}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {TypeDef1,Ac2} = modify_anno1(TypeDef, Ac1, Mf),
+ {Args1,Ac3} = modify_anno1(Args, Ac2, Mf),
+ {{attribute,A1,opaque,{TypeName,TypeDef1,Args1}},Ac3};
+modify_anno1({attribute,A,Attr,Val}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {{attribute,A1,Attr,Val},Ac1};
+modify_anno1({warning,W}, Ac, _Mf) ->
+ {{warning,W},Ac};
+modify_anno1({error,W}, Ac, _Mf) ->
+ {{error,W},Ac};
+%% Expressions.
+modify_anno1({clauses,Cs}, Ac, Mf) ->
+ {Cs1,Ac1} = modify_anno1(Cs, Ac, Mf),
+ {{clauses,Cs1},Ac1};
+modify_anno1({typed_record_field,Field,Type}, Ac, Mf) ->
+ {Field1,Ac1} = modify_anno1(Field, Ac, Mf),
+ {Type1,Ac2} = modify_anno1(Type, Ac1, Mf),
+ {{typed_record_field,Field1,Type1},Ac2};
+modify_anno1({Tag,A}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {{Tag,A1},Ac1};
+modify_anno1({Tag,A,E1}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {E11,Ac2} = modify_anno1(E1, Ac1, Mf),
+ {{Tag,A1,E11},Ac2};
+modify_anno1({Tag,A,E1,E2}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {E11,Ac2} = modify_anno1(E1, Ac1, Mf),
+ {E21,Ac3} = modify_anno1(E2, Ac2, Mf),
+ {{Tag,A1,E11,E21},Ac3};
+modify_anno1({bin_element,A,E1,E2,TSL}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {E11,Ac2} = modify_anno1(E1, Ac1, Mf),
+ {E21,Ac3} = modify_anno1(E2, Ac2, Mf),
+ {{bin_element,A1,E11,E21, TSL},Ac3};
+modify_anno1({Tag,A,E1,E2,E3}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {E11,Ac2} = modify_anno1(E1, Ac1, Mf),
+ {E21,Ac3} = modify_anno1(E2, Ac2, Mf),
+ {E31,Ac4} = modify_anno1(E3, Ac3, Mf),
+ {{Tag,A1,E11,E21,E31},Ac4};
+modify_anno1({Tag,A,E1,E2,E3,E4}, Ac, Mf) ->
+ {A1,Ac1} = Mf(A, Ac),
+ {E11,Ac2} = modify_anno1(E1, Ac1, Mf),
+ {E21,Ac3} = modify_anno1(E2, Ac2, Mf),
+ {E31,Ac4} = modify_anno1(E3, Ac3, Mf),
+ {E41,Ac5} = modify_anno1(E4, Ac4, Mf),
+ {{Tag,A1,E11,E21,E31,E41},Ac5};
+modify_anno1([H|T], Ac, Mf) ->
+ {H1,Ac1} = modify_anno1(H, Ac, Mf),
+ {T1,Ac2} = modify_anno1(T, Ac1, Mf),
+ {[H1|T1],Ac2};
+modify_anno1([], Ac, _Mf) -> {[],Ac};
+modify_anno1(E, Ac, _Mf) when not is_tuple(E), not is_list(E) -> {E,Ac}.
+
%% vim: ft=erlang
diff --git a/lib/stdlib/src/erl_pp.erl b/lib/stdlib/src/erl_pp.erl
index 469ce544c7..623a29f923 100644
--- a/lib/stdlib/src/erl_pp.erl
+++ b/lib/stdlib/src/erl_pp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -46,6 +46,23 @@
-record(options, {hook, encoding, opts}).
+%-define(DEBUG, true).
+
+-ifdef(DEBUG).
+-define(TEST(T),
+ %% Assumes that erl_anno has been compiled with DEBUG=true.
+ %% erl_pp does not use the annoations, but test it anyway.
+ %% Note: hooks are not handled.
+ _ = try
+ erl_parse:map_anno(fun(A) when is_list(A) -> A end, T)
+ catch
+ _:_ ->
+ erlang:error(badarg, [T])
+ end).
+-else.
+-define(TEST(T), ok).
+-endif.
+
%%%
%%% Exported functions
%%%
@@ -61,6 +78,7 @@ form(Thing) ->
Options :: options()).
form(Thing, Options) ->
+ ?TEST(Thing),
State = state(Options),
frmt(lform(Thing, options(Options), State), State).
@@ -75,6 +93,7 @@ attribute(Thing) ->
Options :: options()).
attribute(Thing, Options) ->
+ ?TEST(Thing),
State = state(Options),
frmt(lattribute(Thing, options(Options), State), State).
@@ -89,6 +108,7 @@ function(F) ->
Options :: options()).
function(F, Options) ->
+ ?TEST(F),
frmt(lfunction(F, options(Options)), state(Options)).
-spec(guard(Guard) -> io_lib:chars() when
@@ -102,6 +122,7 @@ guard(Gs) ->
Options :: options()).
guard(Gs, Options) ->
+ ?TEST(Gs),
frmt(lguard(Gs, options(Options)), state(Options)).
-spec(exprs(Expressions) -> io_lib:chars() when
@@ -123,12 +144,14 @@ exprs(Es, Options) ->
Options :: options()).
exprs(Es, I, Options) ->
+ ?TEST(Es),
frmt({seq,[],[],[$,],lexprs(Es, options(Options))}, I, state(Options)).
-spec(expr(Expression) -> io_lib:chars() when
Expression :: erl_parse:abstract_expr()).
expr(E) ->
+ ?TEST(E),
frmt(lexpr(E, 0, options(none)), state(none)).
-spec(expr(Expression, Options) -> io_lib:chars() when
@@ -136,6 +159,7 @@ expr(E) ->
Options :: options()).
expr(E, Options) ->
+ ?TEST(E),
frmt(lexpr(E, 0, options(Options)), state(Options)).
-spec(expr(Expression, Indent, Options) -> io_lib:chars() when
@@ -144,6 +168,7 @@ expr(E, Options) ->
Options :: options()).
expr(E, I, Options) ->
+ ?TEST(E),
frmt(lexpr(E, 0, options(Options)), I, state(Options)).
-spec(expr(Expression, Indent, Precedence, Options) -> io_lib:chars() when
@@ -153,6 +178,7 @@ expr(E, I, Options) ->
Options :: options()).
expr(E, I, P, Options) ->
+ ?TEST(E),
frmt(lexpr(E, P, options(Options)), I, state(Options)).
%%%
@@ -213,24 +239,25 @@ lattribute({attribute,_Line,Name,Arg}, Opts, State) ->
[lattribute(Name, Arg, Opts, State),leaf(".\n")].
lattribute(module, {M,Vs}, _Opts, _State) ->
- attr("module",[{var,0,pname(M)},
- foldr(fun(V, C) -> {cons,0,{var,0,V},C}
- end, {nil,0}, Vs)]);
+ A = a0(),
+ attr("module",[{var,A,pname(M)},
+ foldr(fun(V, C) -> {cons,A,{var,A,V},C}
+ end, {nil,A}, Vs)]);
lattribute(module, M, _Opts, _State) ->
- attr("module", [{var,0,pname(M)}]);
+ attr("module", [{var,a0(),pname(M)}]);
lattribute(export, Falist, _Opts, _State) ->
- call({var,0,"-export"}, [falist(Falist)], 0, options(none));
+ call({var,a0(),"-export"}, [falist(Falist)], 0, options(none));
lattribute(import, Name, _Opts, _State) when is_list(Name) ->
- attr("import", [{var,0,pname(Name)}]);
+ attr("import", [{var,a0(),pname(Name)}]);
lattribute(import, {From,Falist}, _Opts, _State) ->
- attr("import",[{var,0,pname(From)},falist(Falist)]);
+ attr("import",[{var,a0(),pname(From)},falist(Falist)]);
lattribute(optional_callbacks, Falist, Opts, _State) ->
ArgL = try falist(Falist)
catch _:_ -> abstract(Falist, Opts)
end,
- call({var,0,"-optional_callbacks"}, [ArgL], 0, options(none));
+ call({var,a0(),"-optional_callbacks"}, [ArgL], 0, options(none));
lattribute(file, {Name,Line}, _Opts, State) ->
- attr("file", [{var,0,(State#pp.string_fun)(Name)},{integer,0,Line}]);
+ attr("file", [{var,a0(),(State#pp.string_fun)(Name)},{integer,a0(),Line}]);
lattribute(record, {Name,Is}, Opts, _State) ->
Nl = leaf(format("-record(~w,", [Name])),
[{first,Nl,record_fields(Is, Opts)},$)];
@@ -242,7 +269,7 @@ abstract(Arg, #options{encoding = Encoding}) ->
typeattr(Tag, {TypeName,Type,Args}, _Opts) ->
{first,leaf("-"++atom_to_list(Tag)++" "),
- typed(call({atom,0,TypeName}, Args, 0, options(none)), Type)}.
+ typed(call({atom,a0(),TypeName}, Args, 0, options(none)), Type)}.
ltype({ann_type,_Line,[V,T]}) ->
typed(lexpr(V, options(none)), T);
@@ -384,7 +411,7 @@ ltypes(Ts, F) ->
[F(T) || T <- Ts].
attr(Name, Args) ->
- call({var,0,format("-~s", [Name])}, Args, 0, options(none)).
+ call({var,a0(),format("-~s", [Name])}, Args, 0, options(none)).
pname(['' | As]) ->
[$. | pname(As)];
@@ -396,9 +423,10 @@ pname(A) when is_atom(A) ->
write(A).
falist([]) ->
- {nil,0};
+ {nil,a0()};
falist([{Name,Arity}|Falist]) ->
- {cons,0,{var,0,format("~w/~w", [Name,Arity])},falist(Falist)}.
+ A = a0(),
+ {cons,A,{var,A,format("~w/~w", [Name,Arity])},falist(Falist)}.
lfunction({function,_Line,Name,_Arity,Cs}, Opts) ->
Cll = nl_clauses(fun (C, H) -> func_clause(Name, C, H) end, $;, Opts, Cs),
@@ -1111,6 +1139,9 @@ write_char(C, PP) ->
%% Utilities
%%
+a0() ->
+ erl_anno:new(0).
+
chars_size([C | Es]) when is_integer(C) ->
1 + chars_size(Es);
chars_size([E | Es]) ->
diff --git a/lib/stdlib/src/erl_scan.erl b/lib/stdlib/src/erl_scan.erl
index 4960a86760..5e7cc5f6d6 100644
--- a/lib/stdlib/src/erl_scan.erl
+++ b/lib/stdlib/src/erl_scan.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -55,6 +55,15 @@
token_info/1,token_info/2,
attributes_info/1,attributes_info/2,set_attribute/3]).
+-export([column/1,end_location/1,line/1,location/1,text/1,
+ category/1,symbol/1]).
+
+-deprecated([{attributes_info, 1, next_major_release},
+ {attributes_info, 2, next_major_release},
+ {set_attribute, 3, next_major_release},
+ {token_info, 1, next_major_release},
+ {token_info, 2, next_major_release}]).
+
%%% Private
-export([continuation_location/1]).
@@ -78,9 +87,9 @@
-define(SETATTRFUN(F), is_function(F, 1)).
-type category() :: atom().
--type column() :: pos_integer().
--type line() :: integer().
--type location() :: line() | {line(),column()}.
+-type column() :: pos_integer(). % Deprecated
+-type line() :: integer(). % Deprecated
+-type location() :: line() | {line(),column()}. % Deprecated
-type resword_fun() :: fun((atom()) -> boolean()).
-type option() :: 'return' | 'return_white_spaces' | 'return_comments'
| 'text' | {'reserved_word_fun', resword_fun()}.
@@ -197,6 +206,56 @@ continuation_location({erl_scan_continuation,_,no_col,_,Line,_,_,_}) ->
continuation_location({erl_scan_continuation,_,Col,_,Line,_,_,_}) ->
{Line,Col}.
+-spec column(Token) -> erl_anno:column() | 'undefined' when
+ Token :: token().
+
+column(Token) ->
+ erl_anno:column(element(2, Token)).
+
+-spec end_location(Token) -> erl_anno:location() | 'undefined' when
+ Token :: token().
+
+end_location(Token) ->
+ erl_anno:end_location(element(2, Token)).
+
+-spec line(Token) -> erl_anno:line() when
+ Token :: token().
+
+line(Token) ->
+ erl_anno:line(element(2, Token)).
+
+-spec location(Token) -> erl_anno:location() when
+ Token :: token().
+
+location(Token) ->
+ erl_anno:location(element(2, Token)).
+
+-spec text(Token) -> erl_anno:text() | 'undefined' when
+ Token :: token().
+
+text(Token) ->
+ erl_anno:text(element(2, Token)).
+
+-spec category(Token) -> category() when
+ Token :: token().
+
+category({Category,_Anno}) ->
+ Category;
+category({Category,_Anno,_Symbol}) ->
+ Category;
+category(T) ->
+ erlang:error(badarg, [T]).
+
+-spec symbol(Token) -> symbol() when
+ Token :: token().
+
+symbol({Category,_Anno}) ->
+ Category;
+symbol({_Category,_Anno,Symbol}) ->
+ Symbol;
+symbol(T) ->
+ erlang:error(badarg, [T]).
+
-type attribute_item() :: 'column' | 'length' | 'line'
| 'location' | 'text'.
-type info_location() :: location() | term().
@@ -276,7 +335,17 @@ attributes_info({Line,Column}, column=Item) when ?ALINE(Line),
attributes_info(Line, column) when ?ALINE(Line) ->
undefined;
attributes_info(Attrs, column=Item) ->
- attr_info(Attrs, Item);
+ case attr_info(Attrs, Item) of
+ undefined ->
+ case erl_anno:column(Attrs) of
+ undefined ->
+ undefined;
+ Column ->
+ {Item,Column}
+ end;
+ T ->
+ T
+ end;
attributes_info(Attrs, length=Item) ->
case attributes_info(Attrs, text) of
undefined ->
@@ -290,14 +359,26 @@ attributes_info({Line,Column}, line=Item) when ?ALINE(Line),
?COLUMN(Column) ->
{Item,Line};
attributes_info(Attrs, line=Item) ->
- attr_info(Attrs, Item);
+ case attr_info(Attrs, Item) of
+ undefined ->
+ case attr_info(Attrs, location) of
+ {location,{Line,_Column}} ->
+ {Item,Line};
+ {location,Line} ->
+ {Item,Line};
+ undefined ->
+ undefined
+ end;
+ T ->
+ T
+ end;
attributes_info({Line,Column}=Location, location=Item) when ?ALINE(Line),
?COLUMN(Column) ->
{Item,Location};
attributes_info(Line, location=Item) when ?ALINE(Line) ->
{Item,Line};
attributes_info(Attrs, location=Item) ->
- {line,Line} = attributes_info(Attrs, line), % assume line is present
+ {line,Line} = attributes_info(Attrs, line),
case attributes_info(Attrs, column) of
undefined ->
%% If set_attribute() has assigned a term such as {17,42}
@@ -419,12 +500,28 @@ set_attr(line, {Line,Column}, Fun) when ?ALINE(Line), ?COLUMN(Column) ->
[{line,Ln},{column,Column}]
end;
set_attr(line=Tag, Attrs, Fun) when is_list(Attrs) ->
- {line,Line} = lists:keyfind(Tag, 1, Attrs),
- case lists:keyreplace(Tag, 1, Attrs, {line,Fun(Line)}) of
- [{line,Ln}] when ?ALINE(Ln) ->
- Ln;
- As ->
- As
+ case lists:keyfind(Tag, 1, Attrs) of
+ {line,Line} ->
+ case lists:keyreplace(Tag, 1, Attrs, {line,Fun(Line)}) of
+ [{line,Ln}] when ?ALINE(Ln) ->
+ Ln;
+ As ->
+ As
+ end;
+ false ->
+ {location, Location} = lists:keyfind(location, 1, Attrs),
+ Ln = case Location of
+ {Line,Column} when ?ALINE(Line), ?COLUMN(Column) ->
+ {Fun(Line),Column};
+ _ ->
+ Fun(Location)
+ end,
+ case lists:keyreplace(location, 1, Attrs, {location,Ln}) of
+ [{location,Ln}] when ?ALINE(Ln) ->
+ Ln;
+ As ->
+ As
+ end
end;
set_attr(T1, T2, T3) ->
erlang:error(badarg, [T1,T2,T3]).
@@ -708,17 +805,17 @@ scan_name(Cs, Ncs) ->
-define(STR(St, S), if St#erl_scan.text -> S; true -> [] end).
scan_dot([$%|_]=Cs, St, Line, Col, Toks, Ncs) ->
- Attrs = attributes(Line, Col, St, Ncs),
- {ok,[{dot,Attrs}|Toks],Cs,Line,incr_column(Col, 1)};
+ Anno = anno(Line, Col, St, Ncs),
+ {ok,[{dot,Anno}|Toks],Cs,Line,incr_column(Col, 1)};
scan_dot([$\n=C|Cs], St, Line, Col, Toks, Ncs) ->
- Attrs = attributes(Line, Col, St, ?STR(St, Ncs++[C])),
- {ok,[{dot,Attrs}|Toks],Cs,Line+1,new_column(Col, 1)};
+ Anno = anno(Line, Col, St, ?STR(St, Ncs++[C])),
+ {ok,[{dot,Anno}|Toks],Cs,Line+1,new_column(Col, 1)};
scan_dot([C|Cs], St, Line, Col, Toks, Ncs) when ?WHITE_SPACE(C) ->
- Attrs = attributes(Line, Col, St, ?STR(St, Ncs++[C])),
- {ok,[{dot,Attrs}|Toks],Cs,Line,incr_column(Col, 2)};
+ Anno = anno(Line, Col, St, ?STR(St, Ncs++[C])),
+ {ok,[{dot,Anno}|Toks],Cs,Line,incr_column(Col, 2)};
scan_dot(eof=Cs, St, Line, Col, Toks, Ncs) ->
- Attrs = attributes(Line, Col, St, Ncs),
- {ok,[{dot,Attrs}|Toks],Cs,Line,incr_column(Col, 1)};
+ Anno = anno(Line, Col, St, Ncs),
+ {ok,[{dot,Anno}|Toks],Cs,Line,incr_column(Col, 1)};
scan_dot(Cs, St, Line, Col, Toks, Ncs) ->
tok2(Cs, St, Line, Col, Toks, Ncs, '.', 1).
@@ -773,12 +870,12 @@ scan_nl_tabs(Cs, St, Line, Col, Toks, N) ->
%% stop anyway, nothing is gained by not collecting all white spaces.
scan_nl_white_space([$\n|Cs], #erl_scan{text = false}=St, Line, no_col=Col,
Toks0, Ncs) ->
- Toks = [{white_space,Line,lists:reverse(Ncs)}|Toks0],
+ Toks = [{white_space,anno(Line),lists:reverse(Ncs)}|Toks0],
scan_newline(Cs, St, Line+1, Col, Toks);
scan_nl_white_space([$\n|Cs], St, Line, Col, Toks, Ncs0) ->
Ncs = lists:reverse(Ncs0),
- Attrs = attributes(Line, Col, St, Ncs),
- Token = {white_space,Attrs,Ncs},
+ Anno = anno(Line, Col, St, Ncs),
+ Token = {white_space,Anno,Ncs},
scan_newline(Cs, St, Line+1, new_column(Col, length(Ncs)), [Token|Toks]);
scan_nl_white_space([C|Cs], St, Line, Col, Toks, Ncs) when ?WHITE_SPACE(C) ->
scan_nl_white_space(Cs, St, Line, Col, Toks, [C|Ncs]);
@@ -786,19 +883,20 @@ scan_nl_white_space([]=Cs, _St, Line, Col, Toks, Ncs) ->
{more,{Cs,Col,Toks,Line,Ncs,fun scan_nl_white_space/6}};
scan_nl_white_space(Cs, #erl_scan{text = false}=St, Line, no_col=Col,
Toks, Ncs) ->
- scan1(Cs, St, Line+1, Col, [{white_space,Line,lists:reverse(Ncs)}|Toks]);
+ Anno = anno(Line),
+ scan1(Cs, St, Line+1, Col, [{white_space,Anno,lists:reverse(Ncs)}|Toks]);
scan_nl_white_space(Cs, St, Line, Col, Toks, Ncs0) ->
Ncs = lists:reverse(Ncs0),
- Attrs = attributes(Line, Col, St, Ncs),
- Token = {white_space,Attrs,Ncs},
+ Anno = anno(Line, Col, St, Ncs),
+ Token = {white_space,Anno,Ncs},
scan1(Cs, St, Line+1, new_column(Col, length(Ncs)), [Token|Toks]).
newline_end(Cs, #erl_scan{text = false}=St, Line, no_col=Col,
Toks, _N, Ncs) ->
- scan1(Cs, St, Line+1, Col, [{white_space,Line,Ncs}|Toks]);
+ scan1(Cs, St, Line+1, Col, [{white_space,anno(Line),Ncs}|Toks]);
newline_end(Cs, St, Line, Col, Toks, N, Ncs) ->
- Attrs = attributes(Line, Col, St, Ncs),
- scan1(Cs, St, Line+1, new_column(Col, N), [{white_space,Attrs,Ncs}|Toks]).
+ Anno = anno(Line, Col, St, Ncs),
+ scan1(Cs, St, Line+1, new_column(Col, N), [{white_space,Anno,Ncs}|Toks]).
scan_spcs([$\s|Cs], St, Line, Col, Toks, N) when N < 16 ->
scan_spcs(Cs, St, Line, Col, Toks, N+1);
@@ -847,20 +945,20 @@ scan_char([$\\|Cs]=Cs0, St, Line, Col, Toks) ->
{eof,Ncol} ->
scan_error(char, Line, Col, Line, Ncol, eof);
{nl,Val,Str,Ncs,Ncol} ->
- Attrs = attributes(Line, Col, St, ?STR(St, "$\\"++Str)), %"
- Ntoks = [{char,Attrs,Val}|Toks],
+ Anno = anno(Line, Col, St, ?STR(St, "$\\"++Str)), %"
+ Ntoks = [{char,Anno,Val}|Toks],
scan1(Ncs, St, Line+1, Ncol, Ntoks);
{Val,Str,Ncs,Ncol} ->
- Attrs = attributes(Line, Col, St, ?STR(St, "$\\"++Str)), %"
- Ntoks = [{char,Attrs,Val}|Toks],
+ Anno = anno(Line, Col, St, ?STR(St, "$\\"++Str)), %"
+ Ntoks = [{char,Anno,Val}|Toks],
scan1(Ncs, St, Line, Ncol, Ntoks)
end;
scan_char([$\n=C|Cs], St, Line, Col, Toks) ->
- Attrs = attributes(Line, Col, St, ?STR(St, [$$,C])),
- scan1(Cs, St, Line+1, new_column(Col, 1), [{char,Attrs,C}|Toks]);
+ Anno = anno(Line, Col, St, ?STR(St, [$$,C])),
+ scan1(Cs, St, Line+1, new_column(Col, 1), [{char,Anno,C}|Toks]);
scan_char([C|Cs], St, Line, Col, Toks) when ?UNICODE(C) ->
- Attrs = attributes(Line, Col, St, ?STR(St, [$$,C])),
- scan1(Cs, St, Line, incr_column(Col, 2), [{char,Attrs,C}|Toks]);
+ Anno = anno(Line, Col, St, ?STR(St, [$$,C])),
+ scan1(Cs, St, Line, incr_column(Col, 2), [{char,Anno,C}|Toks]);
scan_char([C|_Cs], _St, Line, Col, _Toks) when ?CHAR(C) ->
scan_error({illegal,character}, Line, Col, Line, incr_column(Col, 1), eof);
scan_char([], _St, Line, Col, Toks) ->
@@ -879,8 +977,8 @@ scan_string(Cs, St, Line, Col, Toks, {Wcs,Str,Line0,Col0}) ->
Estr = string:substr(Nwcs, 1, 16), % Expanded escape chars.
scan_error({string,$\",Estr}, Line0, Col0, Nline, Ncol, Ncs); %"
{Ncs,Nline,Ncol,Nstr,Nwcs} ->
- Attrs = attributes(Line0, Col0, St, Nstr),
- scan1(Ncs, St, Nline, Ncol, [{string,Attrs,Nwcs}|Toks])
+ Anno = anno(Line0, Col0, St, Nstr),
+ scan1(Ncs, St, Nline, Ncol, [{string,Anno,Nwcs}|Toks])
end.
scan_qatom(Cs, St, Line, Col, Toks, {Wcs,Str,Line0,Col0}) ->
@@ -896,8 +994,8 @@ scan_qatom(Cs, St, Line, Col, Toks, {Wcs,Str,Line0,Col0}) ->
{Ncs,Nline,Ncol,Nstr,Nwcs} ->
case catch list_to_atom(Nwcs) of
A when is_atom(A) ->
- Attrs = attributes(Line0, Col0, St, Nstr),
- scan1(Ncs, St, Nline, Ncol, [{atom,Attrs,A}|Toks]);
+ Anno = anno(Line0, Col0, St, Nstr),
+ scan1(Ncs, St, Nline, Ncol, [{atom,Anno,A}|Toks]);
_ ->
scan_error({illegal,atom}, Line0, Col0, Nline, Ncol, Ncs)
end
@@ -1173,28 +1271,28 @@ scan_comment(Cs, St, Line, Col, Toks, Ncs0) ->
tok3(Cs, St, Line, Col, Toks, comment, Ncs, Ncs).
tok2(Cs, #erl_scan{text = false}=St, Line, no_col=Col, Toks, _Wcs, P) ->
- scan1(Cs, St, Line, Col, [{P,Line}|Toks]);
+ scan1(Cs, St, Line, Col, [{P,anno(Line)}|Toks]);
tok2(Cs, St, Line, Col, Toks, Wcs, P) ->
- Attrs = attributes(Line, Col, St, Wcs),
- scan1(Cs, St, Line, incr_column(Col, length(Wcs)), [{P,Attrs}|Toks]).
+ Anno = anno(Line, Col, St, Wcs),
+ scan1(Cs, St, Line, incr_column(Col, length(Wcs)), [{P,Anno}|Toks]).
tok2(Cs, #erl_scan{text = false}=St, Line, no_col=Col, Toks, _Wcs, P, _N) ->
- scan1(Cs, St, Line, Col, [{P,Line}|Toks]);
+ scan1(Cs, St, Line, Col, [{P,anno(Line)}|Toks]);
tok2(Cs, St, Line, Col, Toks, Wcs, P, N) ->
- Attrs = attributes(Line, Col, St, Wcs),
- scan1(Cs, St, Line, incr_column(Col, N), [{P,Attrs}|Toks]).
+ Anno = anno(Line, Col, St, Wcs),
+ scan1(Cs, St, Line, incr_column(Col, N), [{P,Anno}|Toks]).
tok3(Cs, #erl_scan{text = false}=St, Line, no_col=Col, Toks, Item, _S, Sym) ->
- scan1(Cs, St, Line, Col, [{Item,Line,Sym}|Toks]);
+ scan1(Cs, St, Line, Col, [{Item,anno(Line),Sym}|Toks]);
tok3(Cs, St, Line, Col, Toks, Item, String, Sym) ->
- Token = {Item,attributes(Line, Col, St, String),Sym},
+ Token = {Item,anno(Line, Col, St, String),Sym},
scan1(Cs, St, Line, incr_column(Col, length(String)), [Token|Toks]).
tok3(Cs, #erl_scan{text = false}=St, Line, no_col=Col, Toks, Item,
_String, Sym, _Length) ->
- scan1(Cs, St, Line, Col, [{Item,Line,Sym}|Toks]);
+ scan1(Cs, St, Line, Col, [{Item,anno(Line),Sym}|Toks]);
tok3(Cs, St, Line, Col, Toks, Item, String, Sym, Length) ->
- Token = {Item,attributes(Line, Col, St, String),Sym},
+ Token = {Item,anno(Line, Col, St, String),Sym},
scan1(Cs, St, Line, incr_column(Col, Length), [Token|Toks]).
scan_error(Error, Line, Col, EndLine, EndCol, Rest) ->
@@ -1205,23 +1303,28 @@ scan_error(Error, Line, Col, EndLine, EndCol, Rest) ->
scan_error(Error, ErrorLoc, EndLoc, Rest) ->
{{error,{ErrorLoc,?MODULE,Error},EndLoc},Rest}.
--compile({inline,[attributes/4]}).
+-compile({inline,[anno/4]}).
-attributes(Line, no_col, #erl_scan{text = false}, _String) ->
- Line;
-attributes(Line, no_col, #erl_scan{text = true}, String) ->
- [{line,Line},{text,String}];
-attributes(Line, Col, #erl_scan{text = false}, _String) ->
- {Line,Col};
-attributes(Line, Col, #erl_scan{text = true}, String) ->
- [{line,Line},{column,Col},{text,String}].
+anno(Line, no_col, #erl_scan{text = false}, _String) ->
+ anno(Line);
+anno(Line, no_col, #erl_scan{text = true}, String) ->
+ Anno = anno(Line),
+ erl_anno:set_text(String, Anno);
+anno(Line, Col, #erl_scan{text = false}, _String) ->
+ anno({Line, Col});
+anno(Line, Col, #erl_scan{text = true}, String) ->
+ Anno = anno({Line, Col}),
+ erl_anno:set_text(String, Anno).
location(Line, no_col) ->
Line;
location(Line, Col) when is_integer(Col) ->
{Line,Col}.
--compile({inline,[incr_column/2,new_column/2]}).
+-compile({inline,[anno/1,incr_column/2,new_column/2]}).
+
+anno(Location) ->
+ erl_anno:new(Location).
incr_column(no_col=Col, _N) ->
Col;
diff --git a/lib/stdlib/src/erl_tar.erl b/lib/stdlib/src/erl_tar.erl
index caa3276d09..72bd54fa29 100644
--- a/lib/stdlib/src/erl_tar.erl
+++ b/lib/stdlib/src/erl_tar.erl
@@ -300,7 +300,7 @@ format_error(Term) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
add1(TarFile, Bin, NameInArchive, Opts) when is_binary(Bin) ->
- Now = calendar:now_to_local_time(now()),
+ Now = calendar:now_to_local_time(erlang:timestamp()),
Info = #file_info{size = byte_size(Bin),
type = regular,
access = read_write,
diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl
index 6bd0eb8a22..f0827009a5 100644
--- a/lib/stdlib/src/escript.erl
+++ b/lib/stdlib/src/escript.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2007-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2007-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -620,12 +620,13 @@ parse_source(S, File, Fd, StartLine, HeaderSz, CheckOnly) ->
{ok, {attribute,_, module, M} = Form} ->
epp_parse_file(Epp, S2#state{module = M}, [Form, FileForm]);
{ok, _} ->
- ModForm = {attribute,1,module, Module},
+ ModForm = {attribute,a1(),module, Module},
epp_parse_file2(Epp, S2, [ModForm, FileForm], OptModRes);
{error, _} ->
epp_parse_file2(Epp, S2, [FileForm], OptModRes);
- {eof, _LastLine} = Eof ->
- S#state{forms_or_bin = [FileForm, Eof]}
+ {eof, LastLine} ->
+ Anno = anno(LastLine),
+ S#state{forms_or_bin = [FileForm, {eof, Anno}]}
end,
ok = epp:close(Epp),
ok = file:close(Fd),
@@ -644,7 +645,7 @@ check_source(S, CheckOnly) ->
%% Optionally add export of main/1
Forms2 =
case ExpMain of
- false -> [{attribute,0,export, [{main,1}]} | Forms];
+ false -> [{attribute, a0(), export, [{main,1}]} | Forms];
true -> Forms
end,
Forms3 = [FileForm2, ModForm2 | Forms2],
@@ -663,7 +664,8 @@ check_source(S, CheckOnly) ->
end.
pre_def_macros(File) ->
- {MegaSecs, Secs, MicroSecs} = erlang:now(),
+ {MegaSecs, Secs, MicroSecs} = erlang:timestamp(),
+ Unique = erlang:unique_integer([positive]),
Replace = fun(Char) ->
case Char of
$\. -> $\_;
@@ -675,8 +677,9 @@ pre_def_macros(File) ->
CleanBase ++ "__" ++
"escript__" ++
integer_to_list(MegaSecs) ++ "__" ++
- integer_to_list(Secs) ++ "__" ++
- integer_to_list(MicroSecs),
+ integer_to_list(Secs) ++ "__" ++
+ integer_to_list(MicroSecs) ++ "__" ++
+ integer_to_list(Unique),
Module = list_to_atom(ModuleStr),
PreDefMacros = [{'MODULE', Module, redefine},
{'MODULE_STRING', ModuleStr, redefine}],
@@ -720,8 +723,9 @@ epp_parse_file2(Epp, S, Forms, Parsed) ->
io:format("~ts:~w: ~ts\n",
[S#state.file,Ln,Mod:format_error(Args)]),
epp_parse_file(Epp, S#state{n_errors = S#state.n_errors + 1}, [Form | Forms]);
- {eof, _LastLine} = Eof ->
- S#state{forms_or_bin = lists:reverse([Eof | Forms])}
+ {eof, LastLine} ->
+ Anno = anno(LastLine),
+ S#state{forms_or_bin = lists:reverse([{eof, Anno} | Forms])}
end.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -776,7 +780,8 @@ interpret(Forms, HasRecs, File, Args) ->
end,
Dict = parse_to_dict(Forms2),
ArgsA = erl_parse:abstract(Args, 0),
- Call = {call,0,{atom,0,main},[ArgsA]},
+ Anno = a0(),
+ Call = {call,Anno,{atom,Anno,main},[ArgsA]},
try
_ = erl_eval:expr(Call,
erl_eval:new_bindings(),
@@ -888,6 +893,15 @@ enc() ->
Enc -> [Enc]
end.
+a0() ->
+ anno(0).
+
+a1() ->
+ anno(1).
+
+anno(L) ->
+ erl_anno:new(L).
+
fatal(Str) ->
throw(Str).
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 42b11a97e2..0e2d59d0c3 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -71,7 +71,8 @@
rename/2, safe_fixtable/2, select/1, select/2, select/3,
select_count/2, select_delete/2, select_reverse/1,
select_reverse/2, select_reverse/3, setopts/2, slot/2,
- update_counter/3, update_element/3]).
+ take/2,
+ update_counter/3, update_counter/4, update_element/3]).
-spec all() -> [Tab] when
Tab :: tab().
@@ -133,7 +134,9 @@ give_away(_, _, _) ->
| {owner, pid()}
| {protection, access()}
| {size, non_neg_integer()}
- | {type, type()}.
+ | {type, type()}
+ | {write_concurrency, boolean()}
+ | {read_concurrency, boolean()}.
info(_) ->
erlang:nif_error(undef).
@@ -142,7 +145,8 @@ info(_) ->
Tab :: tab(),
Item :: compressed | fixed | heir | keypos | memory
| name | named_table | node | owner | protection
- | safe_fixed | size | stats | type,
+ | safe_fixed | size | stats | type
+ | write_concurrency | read_concurrency,
Value :: term().
info(_, _) ->
@@ -400,6 +404,14 @@ setopts(_, _) ->
slot(_, _) ->
erlang:nif_error(undef).
+-spec take(Tab, Key) -> [Object] when
+ Tab :: tab(),
+ Key :: term(),
+ Object :: tuple().
+
+take(_, _) ->
+ erlang:nif_error(undef).
+
-spec update_counter(Tab, Key, UpdateOp) -> Result when
Tab :: tab(),
Key :: term(),
@@ -427,6 +439,38 @@ slot(_, _) ->
update_counter(_, _, _) ->
erlang:nif_error(undef).
+-spec update_counter(Tab, Key, UpdateOp, Default) -> Result when
+ Tab :: tab(),
+ Key :: term(),
+ UpdateOp :: {Pos, Incr}
+ | {Pos, Incr, Threshold, SetValue},
+ Pos :: integer(),
+ Incr :: integer(),
+ Threshold :: integer(),
+ SetValue :: integer(),
+ Result :: integer(),
+ Default :: tuple();
+ (Tab, Key, [UpdateOp], Default) -> [Result] when
+ Tab :: tab(),
+ Key :: term(),
+ UpdateOp :: {Pos, Incr}
+ | {Pos, Incr, Threshold, SetValue},
+ Pos :: integer(),
+ Incr :: integer(),
+ Threshold :: integer(),
+ SetValue :: integer(),
+ Result :: integer(),
+ Default :: tuple();
+ (Tab, Key, Incr, Default) -> Result when
+ Tab :: tab(),
+ Key :: term(),
+ Incr :: integer(),
+ Result :: integer(),
+ Default :: tuple().
+
+update_counter(_, _, _, _) ->
+ erlang:nif_error(undef).
+
-spec update_element(Tab, Key, ElementSpec :: {Pos, Value}) -> boolean() when
Tab :: tab(),
Key :: term(),
@@ -695,7 +739,8 @@ do_filter(Tab, Key, F, A, Ack) ->
-record(filetab_options,
{
object_count = false :: boolean(),
- md5sum = false :: boolean()
+ md5sum = false :: boolean(),
+ sync = false :: boolean()
}).
-spec tab2file(Tab, Filename) -> 'ok' | {'error', Reason} when
@@ -710,7 +755,7 @@ tab2file(Tab, File) ->
Tab :: tab(),
Filename :: file:name(),
Options :: [Option],
- Option :: {'extended_info', [ExtInfo]},
+ Option :: {'extended_info', [ExtInfo]} | {'sync', boolean()},
ExtInfo :: 'md5sum' | 'object_count',
Reason :: term().
@@ -791,6 +836,15 @@ tab2file(Tab, File, Options) ->
List ->
LogFun(NewState1,[['$end_of_table',List]])
end,
+ case FtOptions#filetab_options.sync of
+ true ->
+ case disk_log:sync(Name) of
+ ok -> ok;
+ {error, Reason2} -> throw(Reason2)
+ end;
+ false ->
+ ok
+ end,
disk_log:close(Name)
catch
throw:TReason ->
@@ -843,23 +897,24 @@ md5terms(State, [H|T]) ->
{FinState, [B|TL]}.
parse_ft_options(Options) when is_list(Options) ->
- {Opt,Rest} = case (catch lists:keytake(extended_info,1,Options)) of
- false ->
- {[],Options};
- {value,{extended_info,L},R} when is_list(L) ->
- {L,R}
- end,
- case Rest of
- [] ->
- parse_ft_info_options(#filetab_options{}, Opt);
- Other ->
- throw({unknown_option, Other})
- end;
-parse_ft_options(Malformed) ->
+ {ok, parse_ft_options(Options, #filetab_options{}, false)}.
+
+parse_ft_options([], FtOpt, _) ->
+ FtOpt;
+parse_ft_options([{sync,true} | Rest], FtOpt, EI) ->
+ parse_ft_options(Rest, FtOpt#filetab_options{sync = true}, EI);
+parse_ft_options([{sync,false} | Rest], FtOpt, EI) ->
+ parse_ft_options(Rest, FtOpt, EI);
+parse_ft_options([{extended_info,L} | Rest], FtOpt0, false) ->
+ FtOpt1 = parse_ft_info_options(FtOpt0, L),
+ parse_ft_options(Rest, FtOpt1, true);
+parse_ft_options([Other | _], _, _) ->
+ throw({unknown_option, Other});
+parse_ft_options(Malformed, _, _) ->
throw({malformed_option, Malformed}).
parse_ft_info_options(FtOpt,[]) ->
- {ok,FtOpt};
+ FtOpt;
parse_ft_info_options(FtOpt,[object_count | T]) ->
parse_ft_info_options(FtOpt#filetab_options{object_count = true}, T);
parse_ft_info_options(FtOpt,[md5sum | T]) ->
@@ -1613,13 +1668,18 @@ choice(Height, Width, P, Mode, Tab, Key, Turn, Opos) ->
end.
get_line(P, Default) ->
- case io:get_line(P) of
+ case line_string(io:get_line(P)) of
"\n" ->
Default;
L ->
L
end.
+%% If the standard input is set to binary mode
+%% convert it to a list so we can properly match.
+line_string(Binary) when is_binary(Binary) -> unicode:characters_to_list(Binary);
+line_string(Other) -> Other.
+
nonl(S) -> string:strip(S, right, $\n).
print_number(Tab, Key, Num) ->
diff --git a/lib/stdlib/src/file_sorter.erl b/lib/stdlib/src/file_sorter.erl
index 687d72b4bd..61b489513a 100644
--- a/lib/stdlib/src/file_sorter.erl
+++ b/lib/stdlib/src/file_sorter.erl
@@ -1425,8 +1425,8 @@ tmp_prefix1(Dir, TmpDirOpt) ->
U = "_",
Node = node(),
Pid = os:getpid(),
- {MSecs,Secs,MySecs} = now(),
- F = lists:concat(["fs_",Node,U,Pid,U,MSecs,U,Secs,U,MySecs,"."]),
+ Unique = erlang:unique_integer([positive]),
+ F = lists:concat(["fs_",Node,U,Pid,U,Unique,"."]),
TmpDir = case TmpDirOpt of
default ->
Dir;
diff --git a/lib/stdlib/src/filename.erl b/lib/stdlib/src/filename.erl
index 632af17e2a..68bd4f71cc 100644
--- a/lib/stdlib/src/filename.erl
+++ b/lib/stdlib/src/filename.erl
@@ -648,7 +648,7 @@ split(Name0) ->
unix_splitb(Name) ->
L = binary:split(Name,[<<"/">>],[global]),
LL = case L of
- [<<>>|Rest] ->
+ [<<>>|Rest] when Rest =/= [] ->
[<<"/">>|Rest];
_ ->
L
diff --git a/lib/stdlib/src/gb_sets.erl b/lib/stdlib/src/gb_sets.erl
index 0a26d0182d..d3fbd542f7 100644
--- a/lib/stdlib/src/gb_sets.erl
+++ b/lib/stdlib/src/gb_sets.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -137,6 +137,10 @@
%% approach is that it does not require the complete list of all
%% elements to be built in memory at one time.
%%
+%% - iterator_from(X, S): returns an iterator that can be used for
+%% traversing the elements of set S greater than or equal to X;
+%% see `next'.
+%%
%% - next(T): returns {X, T1} where X is the smallest element referred
%% to by the iterator T, and T1 is the new iterator to be used for
%% traversing the remaining elements, or the atom `none' if no
@@ -157,8 +161,8 @@
insert/2, add/2, delete/2, delete_any/2, balance/1, union/2,
union/1, intersection/2, intersection/1, is_disjoint/2, difference/2,
is_subset/2, to_list/1, from_list/1, from_ordset/1, smallest/1,
- largest/1, take_smallest/1, take_largest/1, iterator/1, next/1,
- filter/2, fold/3, is_set/1]).
+ largest/1, take_smallest/1, take_largest/1, iterator/1,
+ iterator_from/2, next/1, filter/2, fold/3, is_set/1]).
%% `sets' compatibility aliases:
@@ -207,21 +211,19 @@
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%% gb_sets:set() in OTP 17 only.
-
-spec empty() -> Set when
- Set :: gb_sets:set().
+ Set :: set().
empty() ->
{0, nil}.
-spec new() -> Set when
- Set :: gb_sets:set().
+ Set :: set().
new() -> empty().
-spec is_empty(Set) -> boolean() when
- Set :: gb_sets:set().
+ Set :: set().
is_empty({0, nil}) ->
true;
@@ -229,7 +231,7 @@ is_empty(_) ->
false.
-spec size(Set) -> non_neg_integer() when
- Set :: gb_sets:set().
+ Set :: set().
size({Size, _}) ->
Size.
@@ -502,6 +504,22 @@ iterator({_, L, _} = T, As) ->
iterator(nil, As) ->
As.
+-spec iterator_from(Element, Set) -> Iter when
+ Set :: set(Element),
+ Iter :: iter(Element).
+
+iterator_from(S, {_, T}) ->
+ iterator_from(S, T, []).
+
+iterator_from(S, {K, _, T}, As) when K < S ->
+ iterator_from(S, T, As);
+iterator_from(_, {_, nil, _} = T, As) ->
+ [T | As];
+iterator_from(S, {_, L, _} = T, As) ->
+ iterator_from(S, L, [T | As]);
+iterator_from(_, nil, As) ->
+ As.
+
-spec next(Iter1) -> {Element, Iter2} | 'none' when
Iter1 :: iter(Element),
Iter2 :: iter(Element).
diff --git a/lib/stdlib/src/gb_trees.erl b/lib/stdlib/src/gb_trees.erl
index 7069b61873..259e8f718b 100644
--- a/lib/stdlib/src/gb_trees.erl
+++ b/lib/stdlib/src/gb_trees.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -102,6 +102,10 @@
%% approach is that it does not require the complete list of all
%% elements to be built in memory at one time.
%%
+%% - iterator_from(K, T): returns an iterator that can be used for
+%% traversing the entries of tree T with key greater than or
+%% equal to K; see `next'.
+%%
%% - next(S): returns {X, V, S1} where X is the smallest key referred to
%% by the iterator S, and S1 is the new iterator to be used for
%% traversing the remaining entries, or the atom `none' if no entries
@@ -117,7 +121,7 @@
update/3, enter/3, delete/2, delete_any/2, balance/1,
is_defined/2, keys/1, values/1, to_list/1, from_orddict/1,
smallest/1, largest/1, take_smallest/1, take_largest/1,
- iterator/1, next/1, map/2]).
+ iterator/1, iterator_from/2, next/1, map/2]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -529,6 +533,29 @@ iterator({_, _, L, _} = T, As) ->
iterator(nil, As) ->
As.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+-spec iterator_from(Key, Tree) -> Iter when
+ Tree :: tree(Key, Value),
+ Iter :: iter(Key, Value).
+
+iterator_from(S, {_, T}) ->
+ iterator_1_from(S, T).
+
+iterator_1_from(S, T) ->
+ iterator_from(S, T, []).
+
+iterator_from(S, {K, _, _, T}, As) when K < S ->
+ iterator_from(S, T, As);
+iterator_from(_, {_, _, nil, _} = T, As) ->
+ [T | As];
+iterator_from(S, {_, _, L, _} = T, As) ->
+ iterator_from(S, L, [T | As]);
+iterator_from(_, nil, As) ->
+ As.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
-spec next(Iter1) -> 'none' | {Key, Value, Iter2} when
Iter1 :: iter(Key, Value),
Iter2 :: iter(Key, Value).
diff --git a/lib/stdlib/src/io.erl b/lib/stdlib/src/io.erl
index b9ace2f442..0b59546dc4 100644
--- a/lib/stdlib/src/io.erl
+++ b/lib/stdlib/src/io.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -45,7 +45,7 @@
%% ErrorDescription is whatever the I/O-server sends.
-type server_no_data() :: {'error', ErrorDescription :: term()} | 'eof'.
--type location() :: erl_scan:location().
+-type location() :: erl_anno:location().
%%-------------------------------------------------------------------------
diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl
index adc9a0cf5f..3378d668a5 100644
--- a/lib/stdlib/src/io_lib.erl
+++ b/lib/stdlib/src/io_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -60,6 +60,7 @@
-module(io_lib).
-export([fwrite/2,fread/2,fread/3,format/2]).
+-export([scan_format/2,unscan_format/1,build_text/1]).
-export([print/1,print/4,indentation/2]).
-export([write/1,write/2,write/3,nl/0,format_prompt/1,format_prompt/2]).
@@ -83,7 +84,7 @@
deep_unicode_char_list/1]).
-export_type([chars/0, latin1_string/0, continuation/0,
- fread_error/0, fread_item/0]).
+ fread_error/0, fread_item/0, format_spec/0]).
%%----------------------------------------------------------------------
@@ -108,6 +109,18 @@
-type fread_item() :: string() | atom() | integer() | float().
+-type format_spec() ::
+ #{
+ control_char => char(),
+ args => [any()],
+ width => 'none' | integer(),
+ adjust => 'left' | 'right',
+ precision => 'none' | integer(),
+ pad_char => char(),
+ encoding => 'unicode' | 'latin1',
+ strings => boolean()
+ }.
+
%%----------------------------------------------------------------------
%% Interface calls to sub-modules.
@@ -156,6 +169,31 @@ format(Format, Args) ->
Other
end.
+-spec scan_format(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | format_spec()].
+
+scan_format(Format, Args) ->
+ try io_lib_format:scan(Format, Args)
+ catch
+ _:_ -> erlang:error(badarg, [Format, Args])
+ end.
+
+-spec unscan_format(FormatList) -> {Format, Data} when
+ FormatList :: [char() | format_spec()],
+ Format :: io:format(),
+ Data :: [term()].
+
+unscan_format(FormatList) ->
+ io_lib_format:unscan(FormatList).
+
+-spec build_text(FormatList) -> chars() when
+ FormatList :: [char() | format_spec()].
+
+build_text(FormatList) ->
+ io_lib_format:build(FormatList).
+
-spec print(Term) -> chars() when
Term :: term().
@@ -249,6 +287,8 @@ write([H|T], D) ->
end;
write(F, _D) when is_function(F) ->
erlang:fun_to_list(F);
+write(Term, D) when is_map(Term) ->
+ write_map(Term, D);
write(T, D) when is_tuple(T) ->
if
D =:= 1 -> "{...}";
@@ -257,9 +297,7 @@ write(T, D) when is_tuple(T) ->
[write(element(1, T), D-1)|
write_tail(tl(tuple_to_list(T)), D-1, $,)],
$}]
- end;
-%write(Term, D) when is_map(Term) -> write_map(Term, D);
-write(Term, D) -> write_map(Term, D).
+ end.
%% write_tail(List, Depth, CharacterBeforeDots)
%% Test the terminating case first as this looks better with depth.
diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl
index 89ae6fb187..015afb317a 100644
--- a/lib/stdlib/src/io_lib_format.erl
+++ b/lib/stdlib/src/io_lib_format.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -20,10 +20,9 @@
%% Formatting functions of io library.
--export([fwrite/2,fwrite_g/1,indentation/2]).
+-export([fwrite/2,fwrite_g/1,indentation/2,scan/2,unscan/1,build/1]).
-%% fwrite(Format, ArgList) -> string().
-%% Format the arguments in ArgList after string Format. Just generate
+%% Format the arguments in Args after string Format. Just generate
%% an error if there is an error in the arguments.
%%
%% To do the printing command correctly we need to calculate the
@@ -37,15 +36,84 @@
%% and it also splits the handling of the control characters into two
%% parts.
-fwrite(Format, Args) when is_atom(Format) ->
- fwrite(atom_to_list(Format), Args);
-fwrite(Format, Args) when is_binary(Format) ->
- fwrite(binary_to_list(Format), Args);
+-spec fwrite(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | io_lib:format_spec()].
+
fwrite(Format, Args) ->
- Cs = collect(Format, Args),
+ build(scan(Format, Args)).
+
+%% Build the output text for a pre-parsed format list.
+
+-spec build(FormatList) -> io_lib:chars() when
+ FormatList :: [char() | io_lib:format_spec()].
+
+build(Cs) ->
Pc = pcount(Cs),
build(Cs, Pc, 0).
+%% Parse all control sequences in the format string.
+
+-spec scan(Format, Data) -> FormatList when
+ Format :: io:format(),
+ Data :: [term()],
+ FormatList :: [char() | io_lib:format_spec()].
+
+scan(Format, Args) when is_atom(Format) ->
+ scan(atom_to_list(Format), Args);
+scan(Format, Args) when is_binary(Format) ->
+ scan(binary_to_list(Format), Args);
+scan(Format, Args) ->
+ collect(Format, Args).
+
+%% Revert a pre-parsed format list to a plain character list and a
+%% list of arguments.
+
+-spec unscan(FormatList) -> {Format, Data} when
+ FormatList :: [char() | io_lib:format_spec()],
+ Format :: io:format(),
+ Data :: [term()].
+
+unscan(Cs) ->
+ {print(Cs), args(Cs)}.
+
+args([#{args := As} | Cs]) ->
+ As ++ args(Cs);
+args([_C | Cs]) ->
+ args(Cs);
+args([]) ->
+ [].
+
+print([#{control_char := C, width := F, adjust := Ad, precision := P,
+ pad_char := Pad, encoding := Encoding, strings := Strings} | Cs]) ->
+ print(C, F, Ad, P, Pad, Encoding, Strings) ++ print(Cs);
+print([C | Cs]) ->
+ [C | print(Cs)];
+print([]) ->
+ [].
+
+print(C, F, Ad, P, Pad, Encoding, Strings) ->
+ [$~] ++ print_field_width(F, Ad) ++ print_precision(P) ++
+ print_pad_char(Pad) ++ print_encoding(Encoding) ++
+ print_strings(Strings) ++ [C].
+
+print_field_width(none, _Ad) -> "";
+print_field_width(F, left) -> integer_to_list(-F);
+print_field_width(F, right) -> integer_to_list(F).
+
+print_precision(none) -> "";
+print_precision(P) -> [$. | integer_to_list(P)].
+
+print_pad_char($\s) -> ""; % default, no need to make explicit
+print_pad_char(Pad) -> [$., Pad].
+
+print_encoding(unicode) -> "t";
+print_encoding(latin1) -> "".
+
+print_strings(false) -> "l";
+print_strings(true) -> "".
+
collect([$~|Fmt0], Args0) ->
{C,Fmt1,Args1} = collect_cseq(Fmt0, Args0),
[C|collect(Fmt1, Args1)];
@@ -60,7 +128,10 @@ collect_cseq(Fmt0, Args0) ->
{Encoding,Fmt4,Args4} = encoding(Fmt3, Args3),
{Strings,Fmt5,Args5} = strings(Fmt4, Args4),
{C,As,Fmt6,Args6} = collect_cc(Fmt5, Args5),
- {{C,As,F,Ad,P,Pad,Encoding,Strings},Fmt6,Args6}.
+ FormatSpec = #{control_char => C, args => As, width => F, adjust => Ad,
+ precision => P, pad_char => Pad, encoding => Encoding,
+ strings => Strings},
+ {FormatSpec,Fmt6,Args6}.
encoding([$t|Fmt],Args) ->
true = hd(Fmt) =/= $l,
@@ -136,17 +207,19 @@ collect_cc([$i|Fmt], [A|Args]) -> {$i,[A],Fmt,Args}.
pcount(Cs) -> pcount(Cs, 0).
-pcount([{$p,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1);
-pcount([{$P,_As,_F,_Ad,_P,_Pad,_Enc,_Str}|Cs], Acc) -> pcount(Cs, Acc+1);
+pcount([#{control_char := $p}|Cs], Acc) -> pcount(Cs, Acc+1);
+pcount([#{control_char := $P}|Cs], Acc) -> pcount(Cs, Acc+1);
pcount([_|Cs], Acc) -> pcount(Cs, Acc);
pcount([], Acc) -> Acc.
-%% build([Control], Pc, Indentation) -> string().
+%% build([Control], Pc, Indentation) -> io_lib:chars().
%% Interpret the control structures. Count the number of print
%% remaining and only calculate indentation when necessary. Must also
%% be smart when calculating indentation for characters in format.
-build([{C,As,F,Ad,P,Pad,Enc,Str}|Cs], Pc0, I) ->
+build([#{control_char := C, args := As, width := F, adjust := Ad,
+ precision := P, pad_char := Pad, encoding := Enc,
+ strings := Str} | Cs], Pc0, I) ->
S = control(C, As, F, Ad, P, Pad, Enc, Str, I),
Pc1 = decr_pc(C, Pc0),
if
@@ -162,10 +235,14 @@ decr_pc($p, Pc) -> Pc - 1;
decr_pc($P, Pc) -> Pc - 1;
decr_pc(_, Pc) -> Pc.
-%% indentation(String, Indentation) -> Indentation.
+
%% Calculate the indentation of the end of a string given its start
%% indentation. We assume tabs at 8 cols.
+-spec indentation(String, StartIndent) -> integer() when
+ String :: io_lib:chars(),
+ StartIndent :: integer().
+
indentation([$\n|Cs], _I) -> indentation(Cs, 0);
indentation([$\t|Cs], I) -> indentation(Cs, ((I + 8) div 8) * 8);
indentation([C|Cs], I) when is_integer(C) ->
@@ -366,7 +443,6 @@ float_data([D|Cs], Ds) when D >= $0, D =< $9 ->
float_data([_|Cs], Ds) ->
float_data(Cs, Ds).
-%% fwrite_g(Float)
%% Writes the shortest, correctly rounded string that converts
%% to Float when read back with list_to_float/1.
%%
@@ -374,6 +450,8 @@ float_data([_|Cs], Ds) ->
%% in Proceedings of the SIGPLAN '96 Conference on Programming
%% Language Design and Implementation.
+-spec fwrite_g(float()) -> string().
+
fwrite_g(0.0) ->
"0.0";
fwrite_g(Float) when is_float(Float) ->
@@ -642,7 +720,7 @@ prefixed_integer(Int, F, Adj, Base, Pad, Prefix, Lowercase)
term([Prefix|S], F, Adj, none, Pad)
end.
-%% char(Char, Field, Adjust, Precision, PadChar) -> string().
+%% char(Char, Field, Adjust, Precision, PadChar) -> chars().
char(C, none, _Adj, none, _Pad) -> [C];
char(C, F, _Adj, none, _Pad) -> chars(C, F);
diff --git a/lib/stdlib/src/maps.erl b/lib/stdlib/src/maps.erl
index ba4d6a5c87..533ff08726 100644
--- a/lib/stdlib/src/maps.erl
+++ b/lib/stdlib/src/maps.erl
@@ -19,31 +19,16 @@
-module(maps).
--export([
- fold/3,
- map/2,
- size/1,
- without/2,
- with/2,
- get/3
- ]).
+-export([get/3,filter/2,fold/3, map/2,
+ size/1,
+ without/2, with/2]).
%%% BIFs
--export([
- get/2,
- find/2,
- from_list/1,
- is_key/2,
- keys/1,
- merge/2,
- new/0,
- put/3,
- remove/2,
- to_list/1,
- update/3,
- values/1
- ]).
+-export([get/2, find/2, from_list/1,
+ is_key/2, keys/1, merge/2,
+ new/0, put/3, remove/2,
+ to_list/1, update/3, values/1]).
-spec get(Key,Map) -> Value when
Key :: term(),
@@ -150,13 +135,28 @@ values(_) -> erlang:nif_error(undef).
Value :: term(),
Default :: term().
-get(Key, Map, Default) ->
+get(Key,Map,Default) when is_map(Map) ->
case maps:find(Key, Map) of
{ok, Value} ->
Value;
error ->
Default
- end.
+ end;
+get(Key,Map,Default) ->
+ erlang:error({badmap,Map},[Key,Map,Default]).
+
+
+-spec filter(Pred,Map1) -> Map2 when
+ Pred :: fun((Key, Value) -> boolean()),
+ Key :: term(),
+ Value :: term(),
+ Map1 :: map(),
+ Map2 :: map().
+
+filter(Pred,Map) when is_function(Pred,2), is_map(Map) ->
+ maps:from_list([{K,V}||{K,V}<-maps:to_list(Map),Pred(K,V)]);
+filter(Pred,Map) ->
+ erlang:error(error_type(Map),[Pred,Map]).
-spec fold(Fun,Init,Map) -> Acc when
@@ -169,8 +169,10 @@ get(Key, Map, Default) ->
K :: term(),
V :: term().
-fold(Fun, Init, Map) when is_function(Fun,3), is_map(Map) ->
- lists:foldl(fun({K,V},A) -> Fun(K,V,A) end,Init,maps:to_list(Map)).
+fold(Fun,Init,Map) when is_function(Fun,3), is_map(Map) ->
+ lists:foldl(fun({K,V},A) -> Fun(K,V,A) end,Init,maps:to_list(Map));
+fold(Fun,Init,Map) ->
+ erlang:error(error_type(Map),[Fun,Init,Map]).
-spec map(Fun,Map1) -> Map2 when
Fun :: fun((K, V1) -> V2),
@@ -180,18 +182,19 @@ fold(Fun, Init, Map) when is_function(Fun,3), is_map(Map) ->
V1 :: term(),
V2 :: term().
-map(Fun, Map) when is_function(Fun, 2), is_map(Map) ->
- maps:from_list(lists:map(fun
- ({K,V}) ->
- {K,Fun(K,V)}
- end,maps:to_list(Map))).
+map(Fun,Map) when is_function(Fun, 2), is_map(Map) ->
+ maps:from_list([{K,Fun(K,V)}||{K,V}<-maps:to_list(Map)]);
+map(Fun,Map) ->
+ erlang:error(error_type(Map),[Fun,Map]).
-spec size(Map) -> non_neg_integer() when
Map :: map().
size(Map) when is_map(Map) ->
- erlang:map_size(Map).
+ erlang:map_size(Map);
+size(Val) ->
+ erlang:error({badmap,Val},[Val]).
-spec without(Ks,Map1) -> Map2 when
@@ -200,8 +203,10 @@ size(Map) when is_map(Map) ->
Map2 :: map(),
K :: term().
-without(Ks, M) when is_list(Ks), is_map(M) ->
- maps:from_list([{K,V}||{K,V} <- maps:to_list(M), not lists:member(K, Ks)]).
+without(Ks,M) when is_list(Ks), is_map(M) ->
+ maps:from_list([{K,V}||{K,V} <- maps:to_list(M), not lists:member(K, Ks)]);
+without(Ks,M) ->
+ erlang:error(error_type(M),[Ks,M]).
-spec with(Ks, Map1) -> Map2 when
@@ -210,5 +215,11 @@ without(Ks, M) when is_list(Ks), is_map(M) ->
Map2 :: map(),
K :: term().
-with(Ks, M) when is_list(Ks), is_map(M) ->
- maps:from_list([{K,V}||{K,V} <- maps:to_list(M), lists:member(K, Ks)]).
+with(Ks,M) when is_list(Ks), is_map(M) ->
+ maps:from_list([{K,V}||{K,V} <- maps:to_list(M), lists:member(K, Ks)]);
+with(Ks,M) ->
+ erlang:error(error_type(M),[Ks,M]).
+
+
+error_type(M) when is_map(M) -> badarg;
+error_type(V) -> {badmap, V}.
diff --git a/lib/stdlib/src/math.erl b/lib/stdlib/src/math.erl
index 98a70b1644..43f736e54c 100644
--- a/lib/stdlib/src/math.erl
+++ b/lib/stdlib/src/math.erl
@@ -24,7 +24,7 @@
-export([sin/1, cos/1, tan/1, asin/1, acos/1, atan/1, atan2/2, sinh/1,
cosh/1, tanh/1, asinh/1, acosh/1, atanh/1, exp/1, log/1,
- log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
+ log2/1, log10/1, pow/2, sqrt/1, erf/1, erfc/1]).
-spec acos(X) -> float() when
X :: number().
@@ -92,6 +92,11 @@ exp(_) ->
log(_) ->
erlang:nif_error(undef).
+-spec log2(X) -> float() when
+ X :: number().
+log2(_) ->
+ erlang:nif_error(undef).
+
-spec log10(X) -> float() when
X :: number().
log10(_) ->
diff --git a/lib/stdlib/src/ms_transform.erl b/lib/stdlib/src/ms_transform.erl
index 7b6f4e5b50..6e3723bb98 100644
--- a/lib/stdlib/src/ms_transform.erl
+++ b/lib/stdlib/src/ms_transform.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2002-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2002-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -822,9 +822,10 @@ th(T,B,OB) when is_tuple(T) ->
th(Nonstruct,B,_OB) ->
{Nonstruct,B}.
-warn_var_clash(Line,Name,OuterBound) ->
+warn_var_clash(Anno,Name,OuterBound) ->
case gb_sets:is_member(Name,OuterBound) of
true ->
+ Line = erl_anno:line(Anno),
add_warning(Line,{?WARN_SHADOW_VAR,Name});
_ ->
ok
diff --git a/lib/stdlib/src/orddict.erl b/lib/stdlib/src/orddict.erl
index c98d78b34d..cbdf25d757 100644
--- a/lib/stdlib/src/orddict.erl
+++ b/lib/stdlib/src/orddict.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -25,11 +25,13 @@
-export([store/3,append/3,append_list/3,update/3,update/4,update_counter/3]).
-export([fold/3,map/2,filter/2,merge/3]).
--export_type([orddict/0]).
+-export_type([orddict/0, orddict/2]).
%%---------------------------------------------------------------------------
--type orddict() :: [{Key :: term(), Value :: term()}].
+-type orddict() :: orddict(_, _).
+
+-type orddict(Key, Value) :: [{Key, Value}].
%%---------------------------------------------------------------------------
@@ -38,8 +40,7 @@
new() -> [].
-spec is_key(Key, Orddict) -> boolean() when
- Key :: term(),
- Orddict :: orddict().
+ Orddict :: orddict(Key, Value :: term()).
is_key(Key, [{K,_}|_]) when Key < K -> false;
is_key(Key, [{K,_}|Dict]) when Key > K -> is_key(Key, Dict);
@@ -47,14 +48,14 @@ is_key(_Key, [{_K,_Val}|_]) -> true; %Key == K
is_key(_, []) -> false.
-spec to_list(Orddict) -> List when
- Orddict :: orddict(),
- List :: [{Key :: term(), Value :: term()}].
+ Orddict :: orddict(Key, Value),
+ List :: [{Key, Value}].
to_list(Dict) -> Dict.
-spec from_list(List) -> Orddict when
- List :: [{Key :: term(), Value :: term()}],
- Orddict :: orddict().
+ List :: [{Key, Value}],
+ Orddict :: orddict(Key, Value).
from_list([]) -> [];
from_list([{_,_}]=Pair) -> Pair;
@@ -73,17 +74,13 @@ is_empty([]) -> true;
is_empty([_|_]) -> false.
-spec fetch(Key, Orddict) -> Value when
- Key :: term(),
- Value :: term(),
- Orddict :: orddict().
+ Orddict :: orddict(Key, Value).
fetch(Key, [{K,_}|D]) when Key > K -> fetch(Key, D);
fetch(Key, [{K,Value}|_]) when Key == K -> Value.
-spec find(Key, Orddict) -> {'ok', Value} | 'error' when
- Key :: term(),
- Orddict :: orddict(),
- Value :: term().
+ Orddict :: orddict(Key, Value).
find(Key, [{K,_}|_]) when Key < K -> error;
find(Key, [{K,_}|D]) when Key > K -> find(Key, D);
@@ -91,17 +88,16 @@ find(_Key, [{_K,Value}|_]) -> {ok,Value}; %Key == K
find(_, []) -> error.
-spec fetch_keys(Orddict) -> Keys when
- Orddict :: orddict(),
- Keys :: [term()].
+ Orddict :: orddict(Key, Value :: term()),
+ Keys :: [Key].
fetch_keys([{Key,_}|Dict]) ->
[Key|fetch_keys(Dict)];
fetch_keys([]) -> [].
-spec erase(Key, Orddict1) -> Orddict2 when
- Key :: term(),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
erase(Key, [{K,_}=E|Dict]) when Key < K -> [E|Dict];
erase(Key, [{K,_}=E|Dict]) when Key > K ->
@@ -110,13 +106,11 @@ erase(_Key, [{_K,_Val}|Dict]) -> Dict; %Key == K
erase(_, []) -> [].
-spec store(Key, Value, Orddict1) -> Orddict2 when
- Key :: term(),
- Value :: term(),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
-store(Key, New, [{K,_}=E|Dict]) when Key < K ->
- [{Key,New},E|Dict];
+store(Key, New, [{K,_}|_]=Dict) when Key < K ->
+ [{Key,New}|Dict];
store(Key, New, [{K,_}=E|Dict]) when Key > K ->
[E|store(Key, New, Dict)];
store(Key, New, [{_K,_Old}|Dict]) -> %Key == K
@@ -124,13 +118,11 @@ store(Key, New, [{_K,_Old}|Dict]) -> %Key == K
store(Key, New, []) -> [{Key,New}].
-spec append(Key, Value, Orddict1) -> Orddict2 when
- Key :: term(),
- Value :: term(),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
-append(Key, New, [{K,_}=E|Dict]) when Key < K ->
- [{Key,[New]},E|Dict];
+append(Key, New, [{K,_}|_]=Dict) when Key < K ->
+ [{Key,[New]}|Dict];
append(Key, New, [{K,_}=E|Dict]) when Key > K ->
[E|append(Key, New, Dict)];
append(Key, New, [{_K,Old}|Dict]) -> %Key == K
@@ -138,13 +130,12 @@ append(Key, New, [{_K,Old}|Dict]) -> %Key == K
append(Key, New, []) -> [{Key,[New]}].
-spec append_list(Key, ValList, Orddict1) -> Orddict2 when
- Key :: term(),
- ValList :: [Value :: term()],
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ ValList :: [Value],
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
-append_list(Key, NewList, [{K,_}=E|Dict]) when Key < K ->
- [{Key,NewList},E|Dict];
+append_list(Key, NewList, [{K,_}|_]=Dict) when Key < K ->
+ [{Key,NewList}|Dict];
append_list(Key, NewList, [{K,_}=E|Dict]) when Key > K ->
[E|append_list(Key, NewList, Dict)];
append_list(Key, NewList, [{_K,Old}|Dict]) -> %Key == K
@@ -153,10 +144,9 @@ append_list(Key, NewList, []) ->
[{Key,NewList}].
-spec update(Key, Fun, Orddict1) -> Orddict2 when
- Key :: term(),
- Fun :: fun((Value1 :: term()) -> Value2 :: term()),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Fun :: fun((Value1 :: Value) -> Value2 :: Value),
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
update(Key, Fun, [{K,_}=E|Dict]) when Key > K ->
[E|update(Key, Fun, Dict)];
@@ -164,14 +154,13 @@ update(Key, Fun, [{K,Val}|Dict]) when Key == K ->
[{Key,Fun(Val)}|Dict].
-spec update(Key, Fun, Initial, Orddict1) -> Orddict2 when
- Key :: term(),
- Initial :: term(),
- Fun :: fun((Value1 :: term()) -> Value2 :: term()),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
-
-update(Key, _, Init, [{K,_}=E|Dict]) when Key < K ->
- [{Key,Init},E|Dict];
+ Initial :: Value,
+ Fun :: fun((Value1 :: Value) -> Value2 :: Value),
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
+
+update(Key, _, Init, [{K,_}|_]=Dict) when Key < K ->
+ [{Key,Init}|Dict];
update(Key, Fun, Init, [{K,_}=E|Dict]) when Key > K ->
[E|update(Key, Fun, Init, Dict)];
update(Key, Fun, _Init, [{_K,Val}|Dict]) -> %Key == K
@@ -179,13 +168,12 @@ update(Key, Fun, _Init, [{_K,Val}|Dict]) -> %Key == K
update(Key, _, Init, []) -> [{Key,Init}].
-spec update_counter(Key, Increment, Orddict1) -> Orddict2 when
- Key :: term(),
- Increment :: number(),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value),
+ Increment :: number().
-update_counter(Key, Incr, [{K,_}=E|Dict]) when Key < K ->
- [{Key,Incr},E|Dict];
+update_counter(Key, Incr, [{K,_}|_]=Dict) when Key < K ->
+ [{Key,Incr}|Dict];
update_counter(Key, Incr, [{K,_}=E|Dict]) when Key > K ->
[E|update_counter(Key, Incr, Dict)];
update_counter(Key, Incr, [{_K,Val}|Dict]) -> %Key == K
@@ -193,28 +181,30 @@ update_counter(Key, Incr, [{_K,Val}|Dict]) -> %Key == K
update_counter(Key, Incr, []) -> [{Key,Incr}].
-spec fold(Fun, Acc0, Orddict) -> Acc1 when
- Fun :: fun((Key :: term(), Value :: term(), AccIn :: term()) -> AccOut :: term()),
- Acc0 :: term(),
- Acc1 :: term(),
- Orddict :: orddict().
+ Fun :: fun((Key, Value, AccIn) -> AccOut),
+ Orddict :: orddict(Key, Value),
+ Acc0 :: Acc,
+ Acc1 :: Acc,
+ AccIn :: Acc,
+ AccOut :: Acc.
fold(F, Acc, [{Key,Val}|D]) ->
fold(F, F(Key, Val, Acc), D);
fold(F, Acc, []) when is_function(F, 3) -> Acc.
-spec map(Fun, Orddict1) -> Orddict2 when
- Fun :: fun((Key :: term(), Value1 :: term()) -> Value2 :: term()),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Fun :: fun((Key, Value1) -> Value2),
+ Orddict1 :: orddict(Key, Value1),
+ Orddict2 :: orddict(Key, Value2).
map(F, [{Key,Val}|D]) ->
[{Key,F(Key, Val)}|map(F, D)];
map(F, []) when is_function(F, 2) -> [].
-spec filter(Pred, Orddict1) -> Orddict2 when
- Pred :: fun((Key :: term(), Value :: term()) -> boolean()),
- Orddict1 :: orddict(),
- Orddict2 :: orddict().
+ Pred :: fun((Key, Value) -> boolean()),
+ Orddict1 :: orddict(Key, Value),
+ Orddict2 :: orddict(Key, Value).
filter(F, [{Key,Val}=E|D]) ->
case F(Key, Val) of
@@ -224,10 +214,10 @@ filter(F, [{Key,Val}=E|D]) ->
filter(F, []) when is_function(F, 2) -> [].
-spec merge(Fun, Orddict1, Orddict2) -> Orddict3 when
- Fun :: fun((Key :: term(), Value1 :: term(), Value2 :: term()) -> Value :: term()),
- Orddict1 :: orddict(),
- Orddict2 :: orddict(),
- Orddict3 :: orddict().
+ Fun :: fun((Key, Value1, Value2) -> Value),
+ Orddict1 :: orddict(Key, Value1),
+ Orddict2 :: orddict(Key, Value2),
+ Orddict3 :: orddict(Key, Value).
merge(F, [{K1,_}=E1|D1], [{K2,_}=E2|D2]) when K1 < K2 ->
[E1|merge(F, D1, [E2|D2])];
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 0ace87ef5c..0340015c35 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -18,7 +18,7 @@
%%
-module(otp_internal).
--export([obsolete/3]).
+-export([obsolete/3, obsolete_type/3]).
%%----------------------------------------------------------------------
@@ -26,7 +26,7 @@
-type mfas() :: mfa() | {atom(), atom(), [byte()]}.
-type release() :: string().
--spec obsolete(atom(), atom(), byte()) ->
+-spec obsolete(module(), atom(), arity()) ->
'no' | {tag(), string()} | {tag(), mfas(), release()}.
obsolete(Module, Name, Arity) ->
@@ -59,6 +59,11 @@ obsolete_1(erl_eval, arg_list, 3) ->
obsolete_1(erlang, hash, 2) ->
{deprecated, {erlang, phash2, 2}};
+obsolete_1(erlang, now, 0) ->
+ {deprecated,
+ "Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
+ "chapter of the ERTS User's Guide for more information."};
+
obsolete_1(calendar, local_time_to_universal_time, 1) ->
{deprecated, {calendar, local_time_to_universal_time_dst, 1}};
@@ -578,6 +583,57 @@ obsolete_1(asn1rt, utf8_binary_to_list, 1) ->
obsolete_1(asn1rt, utf8_list_to_binary, 1) ->
{deprecated,{unicode,characters_to_binary,1}};
+%% Added in OTP 18.
+obsolete_1(core_lib, get_anno, 1) ->
+ {deprecated,{cerl,get_ann,1}};
+obsolete_1(core_lib, set_anno, 2) ->
+ {deprecated,{cerl,set_ann,2}};
+obsolete_1(core_lib, is_literal, 1) ->
+ {deprecated,{cerl,is_literal,1}};
+obsolete_1(core_lib, is_literal_list, 1) ->
+ {deprecated,"deprecated; use lists:all(fun cerl:is_literal/1, L)"
+ " instead"};
+obsolete_1(core_lib, literal_value, 1) ->
+ {deprecated,{core_lib,concrete,1}};
+obsolete_1(erl_scan, set_attribute, 3) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_anno:set_line/2 instead"};
+obsolete_1(erl_scan, attributes_info, 1) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_anno:{column,line,location,text}/1 instead"};
+obsolete_1(erl_scan, attributes_info, 2) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_anno:{column,line,location,text}/1 instead"};
+obsolete_1(erl_scan, token_info, 1) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_scan:{category,column,line,location,symbol,text}/1 instead"};
+obsolete_1(erl_scan, token_info, 2) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_scan:{category,column,line,location,symbol,text}/1 instead"};
+obsolete_1(erl_parse, set_line, 2) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_anno:set_line/2 instead"};
+obsolete_1(erl_parse, get_attributes, 1) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_anno:{column,line,location,text}/1 instead"};
+obsolete_1(erl_parse, get_attribute, 2) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use "
+ "erl_anno:{column,line,location,text}/1 instead"};
+obsolete_1(erl_lint, modify_line, 2) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_parse:map_anno/2 instead"};
+obsolete_1(ssl, negotiated_next_protocol, 1) ->
+ {deprecated,{ssl,negotiated_protocol,1}};
+
+obsolete_1(ssl, connection_info, 1) ->
+ {deprecated, "deprecated; use connection_information/[1,2] instead"};
+
obsolete_1(_, _, _) ->
no.
@@ -624,3 +680,30 @@ is_snmp_agent_function(add_agent_caps, 2) -> true;
is_snmp_agent_function(del_agent_caps, 1) -> true;
is_snmp_agent_function(get_agent_caps, 0) -> true;
is_snmp_agent_function(_, _) -> false.
+
+-spec obsolete_type(module(), atom(), arity()) ->
+ 'no' | {tag(), string()} | {tag(), mfas(), release()}.
+
+obsolete_type(Module, Name, NumberOfVariables) ->
+ case obsolete_type_1(Module, Name, NumberOfVariables) of
+%% {deprecated=Tag,{_,_,_}=Replacement} ->
+%% {Tag,Replacement,"in a future release"};
+ {_,String}=Ret when is_list(String) ->
+ Ret;
+%% {_,_,_}=Ret ->
+%% Ret;
+ no ->
+ no
+ end.
+
+obsolete_type_1(erl_scan,column,0) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_anno:column() instead"};
+obsolete_type_1(erl_scan,line,0) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_anno:line() instead"};
+obsolete_type_1(erl_scan,location,0) ->
+ {deprecated,
+ "deprecated (will be removed in OTP 19); use erl_anno:location() instead"};
+obsolete_type_1(_,_,_) ->
+ no.
diff --git a/lib/stdlib/src/qlc.erl b/lib/stdlib/src/qlc.erl
index 002032d48d..ad8aafbb1a 100644
--- a/lib/stdlib/src/qlc.erl
+++ b/lib/stdlib/src/qlc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -1006,7 +1006,7 @@ listify(T) ->
-record(simple_qlc,
{p, % atom(), pattern variable
le,
- line,
+ line :: erl_anno:anno(),
init_value,
optz % #optz
}).
@@ -1148,15 +1148,18 @@ abstract(Info, true=_Flat, NElements, Depth) ->
[{match,_,Expr,Q}] ->
Q;
[{match,_,Expr,Q} | Body] ->
- {block, 0, lists:reverse(Body, [Q])};
+ {block, anno0(), lists:reverse(Body, [Q])};
_ ->
- {block, 0, lists:reverse(Body0, [Expr])}
+ {block, anno0(), lists:reverse(Body0, [Expr])}
end.
-abstract({qlc, E0, Qs0, Opt}, NElements, Depth) ->
+abstract(Info, NElements, Depth) ->
+ abstract1(Info, NElements, Depth, anno1()).
+
+abstract1({qlc, E0, Qs0, Opt}, NElements, Depth, A) ->
Qs = lists:map(fun({generate, P, LE}) ->
- {generate, 1, binary_to_term(P),
- abstract(LE, NElements, Depth)};
+ {generate, A, binary_to_term(P),
+ abstract1(LE, NElements, Depth, A)};
(F) ->
binary_to_term(F)
end, Qs0),
@@ -1165,12 +1168,12 @@ abstract({qlc, E0, Qs0, Opt}, NElements, Depth) ->
[] -> [];
_ -> [abstract_term(Opt, 1)]
end,
- ?QLC_Q(1, 1, 1, 1, {lc,1,E,Qs}, Os);
-abstract({table, {M, F, As0}}, _NElements, _Depth)
+ ?QLC_Q(A, A, A, A, {lc,A,E,Qs}, Os);
+abstract1({table, {M, F, As0}}, _NElements, _Depth, Anno)
when is_atom(M), is_atom(F), is_list(As0) ->
As = [abstract_term(A, 1) || A <- As0],
- {call, 1, {remote, 1, {atom, 1, M}, {atom, 1, F}}, As};
-abstract({table, TableDesc}, _NElements, _Depth) ->
+ {call, Anno, {remote, Anno, {atom, Anno, M}, {atom, Anno, F}}, As};
+abstract1({table, TableDesc}, _NElements, _Depth, _A) ->
case io_lib:deep_char_list(TableDesc) of
true ->
{ok, Tokens, _} = erl_scan:string(lists:flatten(TableDesc++".")),
@@ -1179,27 +1182,28 @@ abstract({table, TableDesc}, _NElements, _Depth) ->
false -> % abstract expression
TableDesc
end;
-abstract({append, Infos}, NElements, Depth) ->
+abstract1({append, Infos}, NElements, Depth, A) ->
As = lists:foldr(fun(Info, As0) ->
- {cons,1,abstract(Info, NElements, Depth),As0}
- end, {nil, 1}, Infos),
- {call, 1, {remote, 1, {atom, 1, ?MODULE}, {atom, 1, append}}, [As]};
-abstract({sort, Info, SortOptions}, NElements, Depth) ->
- {call, 1, {remote, 1, {atom, 1, ?MODULE}, {atom, 1, sort}},
- [abstract(Info, NElements, Depth), abstract_term(SortOptions, 1)]};
-abstract({keysort, Info, Kp, SortOptions}, NElements, Depth) ->
- {call, 1, {remote, 1, {atom, 1, ?MODULE}, {atom, 1, keysort}},
- [abstract_term(Kp, 1), abstract(Info, NElements, Depth),
+ {cons,A,abstract1(Info, NElements, Depth, A),
+ As0}
+ end, {nil, A}, Infos),
+ {call, A, {remote, A, {atom, A, ?MODULE}, {atom, A, append}}, [As]};
+abstract1({sort, Info, SortOptions}, NElements, Depth, A) ->
+ {call, A, {remote, A, {atom, A, ?MODULE}, {atom, A, sort}},
+ [abstract1(Info, NElements, Depth, A), abstract_term(SortOptions, 1)]};
+abstract1({keysort, Info, Kp, SortOptions}, NElements, Depth, A) ->
+ {call, A, {remote, A, {atom, A, ?MODULE}, {atom, A, keysort}},
+ [abstract_term(Kp, 1), abstract1(Info, NElements, Depth, A),
abstract_term(SortOptions, 1)]};
-abstract({list,L,MS}, NElements, Depth) ->
- {call, 1, {remote, 1, {atom, 1, ets}, {atom, 1, match_spec_run}},
- [abstract(L, NElements, Depth),
- {call, 1, {remote, 1, {atom, 1, ets}, {atom, 1, match_spec_compile}},
+abstract1({list,L,MS}, NElements, Depth, A) ->
+ {call, A, {remote, A, {atom, A, ets}, {atom, A, match_spec_run}},
+ [abstract1(L, NElements, Depth, A),
+ {call, A, {remote, A, {atom, A, ets}, {atom, A, match_spec_compile}},
[abstract_term(depth(MS, Depth), 1)]}]};
-abstract({list, L}, NElements, Depth) when NElements =:= infinity;
- NElements >= length(L) ->
+abstract1({list, L}, NElements, Depth, _A) when NElements =:= infinity;
+ NElements >= length(L) ->
abstract_term(depth(L, Depth), 1);
-abstract({list, L}, NElements, Depth) ->
+abstract1({list, L}, NElements, Depth, _A) ->
abstract_term(depth(lists:sublist(L, NElements), Depth) ++ '...', 1).
depth(List, infinity) ->
@@ -1251,14 +1255,14 @@ abstract_term(Term) ->
abstract_term(Term, 0).
abstract_term(Term, Line) ->
- abstr_term(Term, Line).
+ abstr_term(Term, anno(Line)).
abstr_term(Tuple, Line) when is_tuple(Tuple) ->
{tuple,Line,[abstr_term(E, Line) || E <- tuple_to_list(Tuple)]};
abstr_term([_ | _]=L, Line) ->
case io_lib:char_list(L) of
true ->
- erl_parse:abstract(L, Line);
+ erl_parse:abstract(L, erl_anno:line(Line));
false ->
abstr_list(L, Line)
end;
@@ -1285,7 +1289,7 @@ abstr_term(Fun, Line) when is_function(Fun) ->
abstr_term(PPR, Line) when is_pid(PPR); is_port(PPR); is_reference(PPR) ->
{special, Line, lists:flatten(io_lib:write(PPR))};
abstr_term(Simple, Line) ->
- erl_parse:abstract(Simple, Line).
+ erl_parse:abstract(Simple, erl_anno:line(Line)).
abstr_list([H | T], Line) ->
{cons, Line, abstr_term(H, Line), abstr_list(T, Line)};
@@ -1519,7 +1523,7 @@ join_info(Join, QInfo, Qdata, Code) ->
%% Only compared constants (==).
[Cs1_0, Cs2_0]
end,
- L = 0,
+ L = anno0(),
G1_0 = {var,L,'G1'}, G2_0 = {var,L,'G2'},
JP = element(JQNum + 1, Code),
%% Create code for wh1 and wh2 in #join{}:
@@ -1571,7 +1575,7 @@ join_merge_info(QNum, QInfo, Code, G, ExtraConstants) ->
{P, P};
_ ->
{PV, _} = aux_name1('P', 0, abstract_vars(P)),
- L = 0,
+ L = erl_anno:new(0),
V = {var, L, PV},
{V, {match, L, V, P}}
end,
@@ -1579,19 +1583,20 @@ join_merge_info(QNum, QInfo, Code, G, ExtraConstants) ->
LEI = {generate, term_to_binary(M), LEInfo},
TP = term_to_binary(G),
CFs = [begin
- Call = {call,0,{atom,0,element},[{integer,0,Col},EPV]},
- F = list2op([{op,0,Op,abstract_term(Con),Call}
- || {Con,Op} <- ConstOps], 'or'),
+ A = anno0(),
+ Call = {call,A,{atom,A,element},[{integer,A,Col},EPV]},
+ F = list2op([{op,A,Op,abstract_term(Con),Call}
+ || {Con,Op} <- ConstOps], 'or', A),
term_to_binary(F)
end ||
{Col,ConstOps} <- ExtraConstants],
{{I,G}, [{generate, TP, {qlc, DQP, [LEI | CFs], []}}]}
end.
-list2op([E], _Op) ->
+list2op([E], _Op, _Anno) ->
E;
-list2op([E | Es], Op) ->
- {op,0,Op,E,list2op(Es, Op)}.
+list2op([E | Es], Op, Anno) ->
+ {op,Anno,Op,E,list2op(Es, Op, Anno)}.
join_lookup_info(QNum, QInfo, G) ->
{generate, _, LEInfo}=I = lists:nth(QNum, QInfo),
@@ -1704,7 +1709,7 @@ eval_le(LE_fun, GOpt) ->
prep_qlc_lc({simple_v1, PVar, LE_fun, L}, Opt, GOpt, _H) ->
check_lookup_option(Opt, false),
- prep_simple_qlc(PVar, L, eval_le(LE_fun, GOpt), Opt);
+ prep_simple_qlc(PVar, anno(L), eval_le(LE_fun, GOpt), Opt);
prep_qlc_lc({qlc_v1, QFun, CodeF, Qdata0, QOpt}, Opt, GOpt, _H) ->
F = fun(?qual_data(_QNum, _GoI, _SI, fil)=QualData, ModGens) ->
{QualData, ModGens};
@@ -1821,7 +1826,7 @@ may_create_simple(#qlc_opt{unique = Unique, cache = Cache} = Opt,
if
Unique and not IsUnique;
(Cache =/= false) and not IsCached ->
- prep_simple_qlc(?SIMPLE_QVAR, 1, Prep, Opt);
+ prep_simple_qlc(?SIMPLE_QVAR, anno(1), Prep, Opt);
true ->
Prep
end.
@@ -2764,8 +2769,8 @@ tmp_filename(TmpDirOpt) ->
U = "_",
Node = node(),
Pid = os:getpid(),
- {MSecs,Secs,MySecs} = erlang:now(),
- F = lists:concat([?MODULE,U,Node,U,Pid,U,MSecs,U,Secs,U,MySecs]),
+ Unique = erlang:unique_integer(),
+ F = lists:concat([?MODULE,U,Node,U,Pid,U,Unique]),
TmpDir = case TmpDirOpt of
"" ->
{ok, CurDir} = file:get_cwd(),
@@ -3772,6 +3777,15 @@ grd(Fun, Arg) ->
false
end.
+anno0() ->
+ anno(0).
+
+anno1() ->
+ anno(1).
+
+anno(L) ->
+ erl_anno:new(L).
+
family(L) ->
sofs:to_external(sofs:relation_to_family(sofs:relation(L))).
diff --git a/lib/stdlib/src/qlc_pt.erl b/lib/stdlib/src/qlc_pt.erl
index b6bb758dfb..a4d2157b35 100644
--- a/lib/stdlib/src/qlc_pt.erl
+++ b/lib/stdlib/src/qlc_pt.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -39,7 +39,12 @@
opt % #qlc_opt
}).
--record(state, {imp, maxargs, records, xwarnings = []}).
+-record(state, {imp,
+ maxargs,
+ records,
+ xwarnings = [],
+ intro_vars,
+ node_info}).
%-define(debug, true).
@@ -66,37 +71,49 @@
Options :: [Option],
Option :: type_checker | compile:option()).
-parse_transform(Forms, Options) ->
+parse_transform(Forms0, Options) ->
?DEBUG("qlc Parse Transform~n", []),
- State = #state{imp = is_qlc_q_imported(Forms),
- maxargs = ?COMPILE_MAX_NUM_OF_ARGS,
- records = record_attributes(Forms)},
- case called_from_type_checker(Options) of
- true ->
- %% The returned value should conform to the types, but
- %% need not evaluate to anything meaningful.
- L = 0,
- {tuple,_,Fs0} = abstr(#qlc_lc{}, L),
- F = fun(_Id, LC, A) ->
- Init = simple(L, 'V', LC, L),
- {{tuple,L,set_field(#qlc_lc.lc, Fs0, Init)}, A}
- end,
- {Forms1,ok} = qlc_mapfold(F, ok, Forms, State),
- Forms1;
- false ->
- FormsNoShadows = no_shadows(Forms, State),
- case compile_messages(Forms, FormsNoShadows, Options, State) of
- {[],[],Warnings} ->
- {NewForms, State1} = transform(FormsNoShadows, State),
- ExtraWs = State1#state.xwarnings,
- {[],WForms} = no_duplicates(NewForms, [], Warnings,
- ExtraWs, Options),
- WForms ++ NewForms;
- {E0,Errors,Warnings} ->
- {EForms,WForms} = no_duplicates(Forms, E0++Errors,
- Warnings, [], Options),
- EForms ++ WForms ++ Forms
- end
+ Imported = is_qlc_q_imported(Forms0),
+ {Forms, FormsNoShadows, State} = initiate(Forms0, Imported),
+ NodeInfo = State#state.node_info,
+ try
+ case called_from_type_checker(Options) of
+ true ->
+ %% The returned value should conform to the types, but
+ %% need not evaluate to anything meaningful.
+ L = anno0(),
+ {tuple,_,Fs0} = abstr(#qlc_lc{}, L),
+ F = fun(_Id, LC, A) ->
+ Init = simple(L, 'V', LC, L),
+ {{tuple,L,set_field(#qlc_lc.lc, Fs0, Init)}, A}
+ end,
+ {Forms1,ok} = qlc_mapfold(F, ok, Forms, State),
+ Forms1;
+ false ->
+ case
+ compile_messages(Forms, FormsNoShadows, Options, State)
+ of
+ {[],Warnings} ->
+ ?DEBUG("node info1 ~p~n",
+ [lists:sort(ets:tab2list(NodeInfo))]),
+ {NewForms, State1} =
+ transform(FormsNoShadows, State),
+ ExtraWs = State1#state.xwarnings,
+ {[],WForms} = no_duplicates(NewForms, [], Warnings,
+ ExtraWs, Options),
+ (restore_locations(WForms, State) ++
+ restore_anno(NewForms, NodeInfo));
+ {Errors,Warnings} ->
+ ?DEBUG("node info2 ~p~n",
+ [lists:sort(ets:tab2list(NodeInfo))]),
+ {EForms,WForms} = no_duplicates(FormsNoShadows, Errors,
+ Warnings, [],
+ Options),
+ restore_locations(EForms ++ WForms, State) ++ Forms0
+ end
+ end
+ after
+ true = ets:delete(NodeInfo)
end.
-spec(transform_from_evaluator(LC, Bs) -> Expr when
@@ -124,30 +141,78 @@ called_from_type_checker(Options) ->
lists:member(type_checker, Options).
transform_expression(LC, Bs0, WithLintErrors) ->
- L = 1,
+ L = anno1(),
As = [{var,L,V} || {V,_Val} <- Bs0],
Ar = length(As),
F = {function,L,bar,Ar,[{clause,L,As,[],[?QLC_Q(L, L, L, L, LC, [])]}]},
- Forms = [{attribute,L,file,{"foo",L}},
- {attribute,L,module,foo}, F],
- State = #state{imp = false,
- maxargs = ?EVAL_MAX_NUM_OF_ARGS,
- records = record_attributes(Forms)},
+ Forms0 = [{attribute,L,file,{"foo",L}},
+ {attribute,L,module,foo}, F],
+ {Forms, FormsNoShadows, State} = initiate(Forms0, false),
+ NodeInfo = State#state.node_info,
Options = [],
- FormsNoShadows = no_shadows(Forms, State),
- case compile_messages(Forms, FormsNoShadows, Options, State) of
- {[],[],_Warnings} ->
- {NewForms,_State1} = transform(FormsNoShadows, State),
- {function,L,bar,Ar,[{clause,L,As,[],[NF]}]} =
- lists:last(NewForms),
- {ok,NF};
- {E0,Errors,_Warnings} when WithLintErrors ->
- {not_ok,mforms(error, E0 ++ Errors)};
- {E0,Errors0,_Warnings} ->
- [{error,Reason} | _] = mforms(error, E0++Errors0),
- {not_ok, {error, ?APIMOD, Reason}}
+ try compile_messages(Forms, FormsNoShadows, Options, State) of
+ {Errors0,_Warnings} ->
+ case restore_locations(Errors0, State) of
+ [] ->
+ {NewForms,_State1} = transform(FormsNoShadows, State),
+ NewForms1 = restore_anno(NewForms, NodeInfo),
+ {function,L,bar,Ar,[{clause,L,As,[],[NF]}]} =
+ lists:last(NewForms1),
+ {ok,NF};
+ Errors when WithLintErrors ->
+ {not_ok,mforms(error, Errors)};
+ Errors ->
+ [{error,Reason} | _] = mforms(error, Errors),
+ {not_ok, {error, ?APIMOD, Reason}}
+ end
+ after
+ true = ets:delete(NodeInfo)
end.
+-ifdef(DEBUG).
+-define(ILIM, 0).
+-else.
+-define(ILIM, 255).
+-endif.
+
+initiate(Forms0, Imported) ->
+ NodeInfo = ets:new(?APIMOD, []),
+ true = ets:insert(NodeInfo, {var_n, ?ILIM}),
+ exclude_integers_from_unique_line_numbers(Forms0, NodeInfo),
+ ?DEBUG("node info0 ~p~n",
+ [lists:sort(ets:tab2list(NodeInfo))]),
+ State0 = #state{imp = Imported,
+ maxargs = ?EVAL_MAX_NUM_OF_ARGS,
+ records = record_attributes(Forms0),
+ node_info = NodeInfo},
+ Forms = save_anno(Forms0, NodeInfo),
+ FormsNoShadows = no_shadows(Forms, State0),
+ IntroVars = intro_variables(FormsNoShadows, State0),
+ State = State0#state{intro_vars = IntroVars},
+ {Forms, FormsNoShadows, State}.
+
+%% Make sure restore_locations() does not confuse integers with (the
+%% unique) line numbers.
+exclude_integers_from_unique_line_numbers(Forms, NodeInfo) ->
+ Integers = find_integers(Forms),
+ lists:foreach(fun(I) -> ets:insert(NodeInfo, {I}) end, Integers).
+
+find_integers(Forms) ->
+ F = fun(A) ->
+ Fs1 = erl_parse:map_anno(fun(_) -> A end, Forms),
+ ordsets:from_list(integers(Fs1, []))
+ end,
+ ordsets:to_list(ordsets:intersection(F(anno0()), F(anno1()))).
+
+integers([E | Es], L) ->
+ integers(Es, integers(E, L));
+integers(T, L) when is_tuple(T) ->
+ integers(tuple_to_list(T), L);
+integers(I, L) when is_integer(I), I > ?ILIM ->
+ [I | L];
+integers(_, L) ->
+ L.
+
-define(I(I), {integer, L, I}).
-define(A(A), {atom, L, A}).
-define(V(V), {var, L, V}).
@@ -164,9 +229,15 @@ mforms(Tag, L) ->
%% Avoid duplicated lint warnings and lint errors. Care has been taken
%% not to introduce unused variables in the transformed code.
%%
-no_duplicates(Forms, Errors, Warnings0, ExtraWarnings, Options) ->
+no_duplicates(Forms, Errors, Warnings0, ExtraWarnings0, Options) ->
%% Some mistakes such as "{X} =:= {}" are found by strong
%% validation as well as by qlc. Prefer the warnings from qlc:
+ %% The Compiler and qlc do not agree on the location of errors.
+ %% For now, qlc's messages about failing patterns and filters
+ %% are ignored.
+ ExtraWarnings = [W || W={_File,[{_,qlc,Tag}]} <-
+ ExtraWarnings0,
+ not lists:member(Tag, [nomatch_pattern,nomatch_filter])],
Warnings1 = mforms(Warnings0) --
([{File,[{L,v3_core,nomatch}]} ||
{File,[{L,qlc,M}]} <- mforms(ExtraWarnings),
@@ -185,13 +256,22 @@ mforms(L) ->
lists:sort([{File,[M]} || {File,Ms} <- L, M <- Ms]).
mforms2(Tag, L) ->
- Line = 0,
+ Line = anno0(),
ML = lists:flatmap(fun({File,Ms}) ->
- [[{attribute,Line,file,{File,Line}}, {Tag,M}] ||
+ [[{attribute,Line,file,{File,0}}, {Tag,M}] ||
M <- Ms]
end, lists:sort(L)),
lists:flatten(lists:sort(ML)).
+restore_locations([T | Ts], State) ->
+ [restore_locations(T, State) | restore_locations(Ts, State)];
+restore_locations(T, State) when is_tuple(T) ->
+ list_to_tuple(restore_locations(tuple_to_list(T), State));
+restore_locations(I, State) when I > ?ILIM ->
+ restore_loc(I, State);
+restore_locations(T, _State) ->
+ T.
+
is_qlc_q_imported(Forms) ->
[[] || {attribute,_,import,{?APIMOD,FAs}} <- Forms, {?Q,1} <- FAs] =/= [].
@@ -212,13 +292,20 @@ compile_messages(Forms, FormsNoShadows, Options, State) ->
(_QId, Q, GA, A) ->
{Q,GA,A}
end,
- {_,BGens} = qual_fold(BGenF, [], [], FormsNoShadows, State),
+ {_,BGens} = qual_fold(BGenF, [], [], Forms, State),
GenForm = used_genvar_check(FormsNoShadows, State),
?DEBUG("GenForm = ~ts~n", [catch erl_pp:form(GenForm)]),
- WarnFun = fun(Id, LC, A) -> {tag_lines(LC, get_lcid_no(Id)), A} end,
+ {GEs,_} = compile_forms([GenForm], Options),
+ UsedGenVarMsgs = used_genvar_messages(GEs, State),
+ NodeInfo = State#state.node_info,
+ WarnFun = fun(_Id, LC, A) -> {lc_nodes(LC, NodeInfo), A} end,
{WForms,ok} = qlc_mapfold(WarnFun, ok, Forms, State),
- {Es,Ws} = compile_forms(WForms ++ [GenForm], Options),
- {badarg(Forms, State),tagged_messages(Es)++BGens,tagged_messages(Ws)}.
+ {Es,Ws} = compile_forms(WForms, Options),
+ LcEs = lc_messages(Es, NodeInfo),
+ LcWs = lc_messages(Ws, NodeInfo),
+ Errors = badarg(Forms, State) ++ UsedGenVarMsgs++LcEs++BGens,
+ Warnings = LcWs,
+ {Errors,Warnings}.
badarg(Forms, State) ->
F = fun(_Id, {lc,_L,_E,_Qs}=LC, Es) ->
@@ -230,54 +317,39 @@ badarg(Forms, State) ->
{_,E0} = qlc_mapfold(F, [], Forms, State),
E0.
-tag_lines(E, No) ->
- map_lines(fun(Id) ->
- case is_lcid(Id) of
- true -> Id;
- false -> make_lcid(Id, No)
- end
- end, E).
-
-map_lines(F, E) ->
- erl_lint:modify_line(E, F).
-
-tagged_messages(MsL) ->
- [{File,
- [{Loc,Mod,untag(T)} || {Loc0,Mod,T} <- Ms,
- {true,Loc} <- [tloc(Loc0)]]}
- || {File,Ms} <- MsL]
- ++
+lc_nodes(E, NodeInfo) ->
+ erl_parse:map_anno(fun(Anno) ->
+ N = erl_anno:line(Anno),
+ [{N, Data}] = ets:lookup(NodeInfo, N),
+ NData = Data#{inside_lc => true},
+ true = ets:insert(NodeInfo, {N, NData}),
+ Anno
+ end, E).
+
+used_genvar_messages(MsL, S) ->
[{File,[{Loc,?APIMOD,{used_generator_variable,V}}]}
- || {_, Ms} <- MsL,
+ || {_, Ms} <- MsL,
{XLoc,erl_lint,{unbound_var,_}} <- Ms,
- {Loc,File,V} <- [extra(XLoc)]].
-
-tloc({Id,Column}) ->
- {IsLcid,T} = tloc(Id),
- {IsLcid,{T,Column}};
-tloc(Id) ->
- IsLcid = is_lcid(Id),
- {IsLcid,case IsLcid of
- true -> get_lcid_line(Id);
- false -> any
- end}.
-
-extra({extra,Line,File,V}) ->
- {Line,File,V};
-extra({Line,Column}) ->
- case extra(Line) of
- {L,File,V} -> {{L,Column},File,V};
- Else -> Else
- end;
-extra(Else) ->
- Else.
-
-untag([E | Es]) -> [untag(E) | untag(Es)];
-untag(T) when is_tuple(T) -> list_to_tuple(untag(tuple_to_list(T)));
-untag(E) ->
- case is_lcid(E) of
- true -> get_lcid_line(E);
- false -> E
+ {Loc,File,V} <- [genvar_pos(XLoc, S)]].
+
+lc_messages(MsL, NodeInfo) ->
+ [{File,[{Loc,Mod,T} || {Loc,Mod,T} <- Ms, lc_loc(Loc, NodeInfo)]} ||
+ {File,Ms} <- MsL].
+
+lc_loc(N, NodeInfo) ->
+ case ets:lookup(NodeInfo, N) of
+ [{N, #{inside_lc := true}}] ->
+ true;
+ [{N, _}] ->
+ false
+ end.
+
+genvar_pos(Location, S) ->
+ case ets:lookup(S#state.node_info, Location) of
+ [{Location, #{genvar_pos := Pos}}] ->
+ Pos;
+ [] ->
+ Location
end.
%% -> [{Qid,[variable()]}].
@@ -293,6 +365,7 @@ untag(E) ->
%% variables (unless they are unsafe).
%%
intro_variables(FormsNoShadows, State) ->
+ NodeInfo = State#state.node_info,
Fun = fun(QId, {T,_L,P0,_E0}=Q, {GVs,QIds}, Foo) when T =:= b_generate;
T =:= generate ->
PVs = qlc:var_ufold(fun({var,_,V}) -> {QId,V} end, P0),
@@ -302,10 +375,11 @@ intro_variables(FormsNoShadows, State) ->
%% where E is an LC expression consisting of a
%% template mentioning all variables occurring in F.
Vs = ordsets:to_list(qlc:vars(Filter0)),
- Id = QId#qid.lcid,
- LC1 = embed_vars(intro_set_line({QId,f1}, Vs), Id),
- LC2 = embed_vars(intro_set_line({QId,f2}, Vs), Id),
- AnyLine = -1,
+ AnyLine = anno0(),
+ Vars = [{var,AnyLine,V} || V <- Vs],
+ LC = embed_vars(Vars, AnyLine),
+ LC1 = intro_anno(LC, before, QId, NodeInfo),
+ LC2 = intro_anno(LC, 'after', QId, NodeInfo),
Filter = {block,AnyLine,[LC1,Filter0,LC2]},
{Filter,{GVs,[{QId,[]} | QIds]},Foo}
end,
@@ -317,9 +391,15 @@ intro_variables(FormsNoShadows, State) ->
Es0 = compile_errors(FForms),
%% A variable is bound inside the filter if it is not bound before
%% the filter, but it is bound after the filter (obviously).
- Before = [{QId,V} || {{QId,f1},erl_lint,{unbound_var,V}} <- Es0],
- After = [{QId,V} || {{QId,f2},erl_lint,{unbound_var,V}} <- Es0],
- Unsafe = [{QId,V} || {{QId,f2},erl_lint,{unsafe_var,V,_Where}} <- Es0],
+ Before = [{QId,V} ||
+ {L,erl_lint,{unbound_var,V}} <- Es0,
+ {_L,{QId,before}} <- ets:lookup(NodeInfo, L)],
+ After = [{QId,V} ||
+ {L,erl_lint,{unbound_var,V}} <- Es0,
+ {_L,{QId,'after'}} <- ets:lookup(NodeInfo, L)],
+ Unsafe = [{QId,V} ||
+ {L,erl_lint,{unsafe_var,V,_Where}} <- Es0,
+ {_L,{QId,'after'}} <- ets:lookup(NodeInfo, L)],
?DEBUG("Before = ~p~n", [Before]),
?DEBUG("After = ~p~n", [After]),
?DEBUG("Unsafe = ~p~n", [Unsafe]),
@@ -328,9 +408,14 @@ intro_variables(FormsNoShadows, State) ->
I1 = family(IV ++ GenVars),
sofs:to_external(sofs:family_union(sofs:family(QIds), I1)).
-intro_set_line(Tag, Vars) ->
- L = erl_parse:set_line(1, fun(_) -> Tag end),
- [{var,L,V} || V <- Vars].
+intro_anno(LC, Where, QId, NodeInfo) ->
+ Data = {QId,Where},
+ Fun = fun(Anno) ->
+ Location = erl_anno:location(Anno),
+ true = ets:insert(NodeInfo, {Location,Data}),
+ Anno
+ end,
+ erl_parse:map_anno(Fun, save_anno(LC, NodeInfo)).
compile_errors(FormsNoShadows) ->
case compile_forms(FormsNoShadows, []) of
@@ -341,11 +426,8 @@ compile_errors(FormsNoShadows) ->
lists:flatmap(fun({_File,Es}) -> Es end, Errors)
end.
--define(MAX_NUM_OF_LINES, 23). % assume max 1^23 lines (> 8 millions)
-
compile_forms(Forms0, Options) ->
- Forms = [F || F <- Forms0, element(1, F) =/= eof] ++
- [{eof,1 bsl ?MAX_NUM_OF_LINES}],
+ Forms = [F || F <- Forms0, element(1, F) =/= eof] ++ [{eof,anno0()}],
try
case compile:noenv_forms(Forms, compile_options(Options)) of
{ok, _ModName, Ws0} ->
@@ -384,20 +466,23 @@ bitstr_options() ->
%% for each ListExpr. The expression mentions all introduced variables
%% occurring in ListExpr. Running the function through the compiler
%% yields error messages for erroneous use of introduced variables.
-%% The messages have the form
-%% {{extra,LineNo,File,Var},Module,{unbound_var,V}}, where Var is the
-%% original variable name and V is the name invented by no_shadows/2.
%%
used_genvar_check(FormsNoShadows, State) ->
- F = fun(QId, {T, Ln, _P, LE}=Q, {QsIVs0, Exprs0}, IVsSoFar0)
+ NodeInfo = State#state.node_info,
+ F = fun(QId, {T, Ln, _P, LE}=Q, {QsIVs0, Exprs0}, IVsSoFar0)
when T =:= b_generate; T =:= generate ->
- F = fun({var, _, V}=Var) ->
- {var, L, OrigVar} = undo_no_shadows(Var),
- AF = fun(Line) ->
- {extra, Line, get(?QLC_FILE), OrigVar}
- end,
- L2 = erl_parse:set_line(L, AF),
- {var, L2, V}
+ F = fun(Var) ->
+ {var, Anno0, OrigVar} =
+ undo_no_shadows(Var, State),
+ {var, Anno, _} = NewVar = save_anno(Var, NodeInfo),
+ Location0 = erl_anno:location(Anno0),
+ Location = erl_anno:location(Anno),
+ [{Location, Data}] =
+ ets:lookup(NodeInfo, Location),
+ Pos = {Location0,get(?QLC_FILE),OrigVar},
+ NData = Data#{genvar_pos => Pos},
+ true = ets:insert(NodeInfo, {Location, NData}),
+ NewVar
end,
Vs = [Var || {var, _, V}=Var <- qlc:var_fold(F, [], LE),
lists:member(V, IVsSoFar0)],
@@ -411,12 +496,12 @@ used_genvar_check(FormsNoShadows, State) ->
{QsIVs, IVsSoFar} = q_intro_vars(QId, QsIVs0, IVsSoFar0),
{Filter, {QsIVs, Exprs}, IVsSoFar}
end,
- IntroVars = intro_variables(FormsNoShadows, State),
- Acc0 = {IntroVars, [{atom, 0, true}]},
+ Acc0 = {State#state.intro_vars, [{atom, anno0(), true}]},
{_, {[], Exprs}} = qual_fold(F, Acc0, [], FormsNoShadows, State),
FunctionNames = [Name || {function, _, Name, _, _} <- FormsNoShadows],
UniqueFName = qlc:aux_name(used_genvar, 1, sets:from_list(FunctionNames)),
- {function,0,UniqueFName,0,[{clause,0,[],[],lists:reverse(Exprs)}]}.
+ A = anno0(),
+ {function,A,UniqueFName,0,[{clause,A,[],[],lists:reverse(Exprs)}]}.
q_intro_vars(QId, [{QId, IVs} | QsIVs], IVsSoFar) -> {QsIVs, IVs ++ IVsSoFar}.
@@ -514,7 +599,8 @@ q_intro_vars(QId, [{QId, IVs} | QsIVs], IVsSoFar) -> {QsIVs, IVs ++ IVsSoFar}.
%% (calling LEf returns the objects generated by LE).
transform(FormsNoShadows, State) ->
- IntroVars = intro_variables(FormsNoShadows, State),
+ _ = erlang:system_flag(backtrace_depth, 500),
+ IntroVars = State#state.intro_vars,
AllVars = sets:from_list(ordsets:to_list(qlc:vars(FormsNoShadows))),
?DEBUG("AllVars = ~p~n", [sets:to_list(AllVars)]),
F1 = fun(QId, {generate,_,P,LE}, Foo, {GoI,SI}) ->
@@ -588,8 +674,8 @@ transform(FormsNoShadows, State) ->
[{match,L,{var,L,Fun},FunC},
{call,L,{var,L,Fun},As0}]}]}},
{ok, OrigE0} = dict:find(Id, Source),
- OrigE = undo_no_shadows(OrigE0),
- QCode = qcode(OrigE, XQCs, Source, L),
+ OrigE = undo_no_shadows(OrigE0, State),
+ QCode = qcode(OrigE, XQCs, Source, L, State),
Qdata = qdata(XQCs, L),
TemplateInfo =
template_columns(Qs, E, AllIVs, Dependencies, State),
@@ -598,7 +684,7 @@ transform(FormsNoShadows, State) ->
Opt = opt_info(TemplateInfo, SizeInfo, JoinInfo, MSQs, L,
EqColumnConstants, EqualColumnConstants),
LCTuple =
- case qlc_kind(OrigE, Qs) of
+ case qlc_kind(OrigE, Qs, State) of
qlc ->
{tuple,L,[?A(qlc_v1),FunW,QCode,Qdata,Opt]};
{simple, PL, LE, V} ->
@@ -612,7 +698,7 @@ transform(FormsNoShadows, State) ->
end,
{NForms,{[],XW}} = qlc_mapfold(F2, {IntroVars,[]}, ModifiedForms1, State),
display_forms(NForms),
- {restore_line_numbers(NForms), State#state{xwarnings = XW}}.
+ {NForms, State#state{xwarnings = XW}}.
join_kind(Qs, LcL, AllIVs, Dependencies, State) ->
{EqualCols2, EqualColsN} = equal_columns(Qs, AllIVs, Dependencies, State),
@@ -623,20 +709,21 @@ join_kind(Qs, LcL, AllIVs, Dependencies, State) ->
if
EqualColsN =/= []; MatchColsN =/= [] ->
{[],
- [{get(?QLC_FILE),[{abs(LcL),?APIMOD,too_complex_join}]}]};
+ [{get(?QLC_FILE),[{LcL,?APIMOD,too_complex_join}]}]};
EqualCols2 =:= [], MatchCols2 =:= [] ->
{[], []};
length(Tables) > 2 ->
{[],
- [{get(?QLC_FILE),[{abs(LcL),?APIMOD,too_many_joins}]}]};
+ [{get(?QLC_FILE),[{LcL,?APIMOD,too_many_joins}]}]};
EqualCols2 =:= MatchCols2 ->
{EqualCols2, []};
true ->
{{EqualCols2, MatchCols2}, []}
end.
-qlc_kind(OrigE, Qs) ->
- {OrigFilterData, OrigGeneratorData} = qual_data(undo_no_shadows(Qs)),
+qlc_kind(OrigE, Qs, State) ->
+ {OrigFilterData, OrigGeneratorData} =
+ qual_data(undo_no_shadows(Qs, State)),
OrigAllFilters = filters_as_one(OrigFilterData),
{_FilterData, GeneratorData} = qual_data(Qs),
case {OrigE, OrigAllFilters, OrigGeneratorData} of
@@ -663,12 +750,12 @@ warn_failing_qualifiers(Qualifiers, AllIVs, Dependencies, State) ->
lists:foldl(fun({_QId,{fil,_Filter}}, {[]=Frames,Warnings}) ->
{Frames,Warnings};
({_QId,{fil,Filter}}, {Frames,Warnings}) ->
- case filter(set_line(Filter, 0), Frames, BindFun,
+ case filter(reset_anno(Filter), Frames, BindFun,
State, Imported) of
[] ->
{[],
[{get(?QLC_FILE),
- [{abs_loc(element(2, Filter)),?APIMOD,
+ [{loc(element(2, Filter)),?APIMOD,
nomatch_filter}]} | Warnings]};
Frames1 ->
{Frames1,Warnings}
@@ -678,7 +765,7 @@ warn_failing_qualifiers(Qualifiers, AllIVs, Dependencies, State) ->
{failed, _, _} ->
{Frames,
[{get(?QLC_FILE),
- [{abs_loc(element(2, Pattern)),?APIMOD,
+ [{loc(element(2, Pattern)),?APIMOD,
nomatch_pattern}]} | Warnings]};
_ ->
{Frames,Warnings}
@@ -751,8 +838,8 @@ opt_constants(L, ColumnConstants) ->
|| IdNo <- Ns]
++ [{clause,L,[?V('_')],[],[?A(no_column_fun)]}].
-abstr(Term, Line) ->
- erl_parse:abstract(Term, Line).
+abstr(Term, Anno) ->
+ erl_parse:abstract(Term, loc(Anno)).
%% Extra generators are introduced for join.
join_quals(JoinInfo, QCs, L, LcNo, ExtraConstants, AllVars) ->
@@ -837,9 +924,10 @@ join_handle(AP, L, [F, H, O, C], Constants) ->
{{var, _, _}, []} ->
{'fun',L,{clauses,[{clause,L,[H],[],[H]}]}};
_ ->
+ A = anno0(),
G0 = [begin
- Call = {call,0,{atom,0,element},[{integer,0,Col},O]},
- list2op([{op,0,Op,Con,Call} || {Con,Op} <- Cs], 'or')
+ Call = {call,A,{atom,A,element},[{integer,A,Col},O]},
+ list2op([{op,A,Op,Con,Call} || {Con,Op} <- Cs], 'or')
end || {Col,Cs} <- Constants],
G = if G0 =:= [] -> G0; true -> [G0] end,
CC1 = {clause,L,[AP],G,[{cons,L,O,closure({call,L,F,[F,C]},L)}]},
@@ -876,14 +964,15 @@ join_handle_constants(QId, ExtraConstants) ->
%% order the traverse fun would return them.
column_fun(Columns, QualifierNumber, LcL) ->
+ A = anno0(),
ColCls0 =
[begin
true = Vs0 =/= [], % at least one value to look up
Vs1 = list2cons(Vs0),
- Fils1 = {tuple,0,[{atom,0,FTag},
+ Fils1 = {tuple,A,[{atom,A,FTag},
lists:foldr
- (fun(F, A) -> {cons,0,{integer,0,F},A}
- end, {nil,0}, Fils)]},
+ (fun(F, Ac) -> {cons,A,{integer,A,F},Ac}
+ end, {nil,A}, Fils)]},
Tag = case ordsets:to_list(qlc:vars(Vs1)) of
Imp when length(Imp) > 0, % imported vars
length(Vs0) > 1 ->
@@ -891,13 +980,13 @@ column_fun(Columns, QualifierNumber, LcL) ->
_ ->
values
end,
- Vs = {tuple,0,[{atom,0,Tag},Vs1,Fils1]},
- {clause,0,[erl_parse:abstract(Col)],[],[Vs]}
+ Vs = {tuple,A,[{atom,A,Tag},Vs1,Fils1]},
+ {clause,A,[erl_parse:abstract(Col)],[],[Vs]}
end ||
{{CIdNo,Col}, Vs0, {FTag,Fils}} <- Columns,
CIdNo =:= QualifierNumber]
- ++ [{clause,0,[{var,0,'_'}],[],[{atom,0,false}]}],
- ColCls = set_line(ColCls0, LcL),
+ ++ [{clause,A,[{var,A,'_'}],[],[{atom,A,false}]}],
+ ColCls = set_anno(ColCls0, LcL),
{'fun', LcL, {clauses, ColCls}}.
%% Tries to find columns of the template that (1) are equal to (or
@@ -920,7 +1009,7 @@ template_columns(Qs0, E0, AllIVs, Dependencies, State) ->
MatchColumns = eq_columns2(Qs, AllIVs, Dependencies, State),
Equal = template_cols(EqualColumns),
Match = template_cols(MatchColumns),
- L = 0,
+ L = anno0(),
if
Match =:= Equal ->
[{?V('_'), Match}];
@@ -947,7 +1036,7 @@ template_cols(ColumnClasses) ->
template_as_pattern(E) ->
P = simple_template(E),
- {?TID,foo,foo,{gen,P,{nil,0}}}.
+ {?TID,foo,foo,{gen,P,{nil,anno0()}}}.
simple_template({call,L,{remote,_,{atom,_,erlang},{atom,_,element}}=Call,
[{integer,_,I}=A1,A2]}) when I > 0 ->
@@ -1004,10 +1093,10 @@ match_spec_quals(Template, Dependencies, Qualifiers, State) ->
GQId =:= QId2,
{FQId,{fil,F}}=Filter <- Filters, % guard filters only
FQId =:= QId]
- ++ [{GId#qid.no,Pattern,[],{atom,0,true}} ||
+ ++ [{GId#qid.no,Pattern,[],{atom,anno0(),true}} ||
{GId,{gen,Pattern,_}} <- GeneratorData,
lists:member(GId, NoFilterGIds)],
- E = {nil, 0},
+ E = {nil, anno0()},
GF = [{{GNum,Pattern},Filter} ||
{GNum,Pattern,Filter,F} <- Candidates,
no =/= try_ms(E, Pattern, F, State)],
@@ -1024,7 +1113,7 @@ match_spec_quals(Template, Dependencies, Qualifiers, State) ->
%% expressione can be replaced by a match specification.
[{GNum, AbstrMS, all}]
catch _:_ ->
- {TemplVar, _} = anon_var({var,0,'_'}, 0),
+ {TemplVar, _} = anon_var({var,anno0(),'_'}, 0),
[one_gen_match_spec(GNum, Pattern, GFilterData, State, TemplVar) ||
{{GNum,Pattern},GFilterData} <- GFFL]
end.
@@ -1038,7 +1127,7 @@ gen_ms(E, Pattern, GFilterData, State) ->
{ok, MS, AMS} = try_ms(E, Pattern, filters_as_one(GFilterData), State),
case MS of
[{'$1',[true],['$1']}] ->
- {atom, 0, no_match_spec};
+ {atom, anno0(), no_match_spec};
_ ->
AMS
end.
@@ -1060,7 +1149,7 @@ pattern_as_template({match,_,_E,{var,_,_}=V}=P, _TemplVar) ->
pattern_as_template({match,_,{var,_,_}=V,_E}=P, _TemplVar) ->
{V, P};
pattern_as_template(E, TemplVar) ->
- L = 0,
+ L = anno0(),
{TemplVar, {match, L, E, TemplVar}}.
%% Tries to find columns which are compared or matched against
@@ -1203,7 +1292,7 @@ lu_skip(ColConstants, FilterData, PatternFrame, PatternVars,
ColFil = [{Column, FId#qid.no} ||
{FId,{fil,Fil}} <-
filter_list(FilterData, Dependencies, State),
- [] =/= (SFs = safe_filter(set_line(Fil, 0), PatternFrames,
+ [] =/= (SFs = safe_filter(reset_anno(Fil), PatternFrames,
BindFun, State, Imported)),
{GId,PV} <- PatternVars,
[] =/=
@@ -1392,7 +1481,7 @@ join_skip(JoinClasses, FilterData, PatternFrame, PatternVars, Dependencies,
JF = unify(JoinOp, V1, V2, JF2, BindFun, Imported),
%% "Run" the filter:
- SFs = safe_filter(set_line(Fil, 0), PatternFrames,
+ SFs = safe_filter(reset_anno(Fil), PatternFrames,
BindFun, State, Imported),
JImp = qlc:vars([SFs, JF]), % kludge
lists:all(fun(Frame) ->
@@ -1403,7 +1492,7 @@ join_skip(JoinClasses, FilterData, PatternFrame, PatternVars, Dependencies,
filter_info(FilterData, AllIVs, Dependencies, State) ->
FilterList = filter_list(FilterData, Dependencies, State),
- Filter0 = set_line(filters_as_one(FilterList), 0),
+ Filter0 = reset_anno(filters_as_one(FilterList)),
Anon0 = 0,
{Filter, Anon1} = anon_var(Filter0, Anon0),
Imported = ordsets:subtract(qlc:vars(Filter), % anonymous too
@@ -1510,7 +1599,7 @@ pattern(P0, AnonI, Frame0, BindFun, State) ->
catch _:_ -> P0 % template, records already expanded
end,
%% Makes test for equality simple:
- P2 = set_line(P1, 0),
+ P2 = reset_anno(P1),
{P3, AnonN} = anon_var(P2, AnonI),
{P4, F1} = match_in_pattern(tuple2cons(P3), Frame0, BindFun),
{P, F2} = element_calls(P4, F1, BindFun, _Imp=[]), % kludge for templates
@@ -1550,8 +1639,11 @@ anon_var(E, AnonI) ->
(Var, N) -> {Var, N}
end, AnonI, E).
-set_line(T, L) ->
- map_lines(fun(_L) -> L end, T).
+reset_anno(T) ->
+ set_anno(T, anno0()).
+
+set_anno(T, A) ->
+ erl_parse:map_anno(fun(_L) -> A end, T).
-record(fstate, {state, bind_fun, imported}).
@@ -1673,7 +1765,7 @@ frames_to_columns(Fs, PatternVars, DerefFun, SelectorFun, Imp, CompOp) ->
%% same variables have to be the representatives in every frame.)
SizesVarsL =
[begin
- PatVar = {var,0,PV},
+ PatVar = {var,anno0(),PV},
PatternSizes = [pattern_size([F], PatVar, false) ||
F <- Fs],
MaxPZ = lists:max([0 | PatternSizes -- [undefined]]),
@@ -1692,8 +1784,8 @@ frames_to_columns(Fs, PatternVars, DerefFun, SelectorFun, Imp, CompOp) ->
frames2cols(Fs, PatN, PatSizes, Vars, DerefFun, SelectorFun, CompOp) ->
Rs = [ begin
RL = [{{PatN,Col},cons2tuple(element(2, Const))} ||
- {V, Col} <- lists:zip(sublist(Vars, PatSz),
- seq(1, PatSz)),
+ {V, Col} <- lists:zip(lists:sublist(Vars, PatSz),
+ lists:seq(1, PatSz)),
%% Do not handle the case where several
%% values compare equal, e.g. "X =:= 1
%% andalso X == 1.0". Looking up both
@@ -1722,11 +1814,11 @@ frames2cols(Fs, PatN, PatSizes, Vars, DerefFun, SelectorFun, CompOp) ->
[C || {_,Vs}=C <- sofs:to_external(Cs), not col_ignore(Vs, CompOp)].
pat_vars(N) ->
- [unique_var() || _ <- seq(1, N)].
+ [unique_var() || _ <- lists:seq(1, N)].
pat_tuple(Sz, Vars) when is_integer(Sz), Sz > 0 ->
TupleTail = unique_var(),
- {cons_tuple, list2cons(sublist(Vars, Sz) ++ TupleTail)};
+ {cons_tuple, list2cons(lists:sublist(Vars, Sz) ++ TupleTail)};
pat_tuple(_, _Vars) ->
unique_var().
@@ -1740,7 +1832,7 @@ col_ignore(Vs, '==') ->
pattern_sizes(PatternVars, Fs) ->
[{QId#qid.no, Size} ||
{QId,PV} <- PatternVars,
- undefined =/= (Size = pattern_size(Fs, {var,0,PV}, true))].
+ undefined =/= (Size = pattern_size(Fs, {var,anno0(),PV}, true))].
pattern_size(Fs, PatternVar, Exact) ->
Fun = fun(F) -> (deref_pattern(_Imported = []))(PatternVar, F) end,
@@ -1768,7 +1860,8 @@ prep_expr(E, F, S, BF, Imported) ->
element_calls(tuple2cons(expand_expr_records(E, S)), F, BF, Imported).
unify_column(Frame, Var, Col, BindFun, Imported) ->
- Call = {call,0,{atom,0,element},[{integer,0,Col}, {var,0,Var}]},
+ A = anno0(),
+ Call = {call,A,{atom,A,element},[{integer,A,Col}, {var,A,Var}]},
element_calls(Call, Frame, BindFun, Imported).
%% cons_tuple is used for representing {V1, ..., Vi | TupleTail}.
@@ -1800,19 +1893,21 @@ element_calls(E, F, _BF, _Imported) ->
{E, F}.
unique_var() ->
- {var, 0, make_ref()}.
+ {var, anno0(), make_ref()}.
is_unique_var({var, _L, V}) ->
is_reference(V).
expand_pattern_records(P, State) ->
- E = {'case',0,{atom,0,true},[{clause,0,[P],[],[{atom,0,true}]}]},
- {'case',_,_,[{clause,0,[NP],_,_}]} = expand_expr_records(E, State),
+ A = anno0(),
+ E = {'case',A,{atom,A,true},[{clause,A,[P],[],[{atom,A,true}]}]},
+ {'case',_,_,[{clause,A,[NP],_,_}]} = expand_expr_records(E, State),
NP.
expand_expr_records(E, State) ->
RecordDefs = State#state.records,
- Forms = RecordDefs ++ [{function,1,foo,0,[{clause,1,[],[],[pe(E)]}]}],
+ A = anno1(),
+ Forms = RecordDefs ++ [{function,A,foo,0,[{clause,A,[],[],[pe(E)]}]}],
[{function,_,foo,0,[{clause,_,[],[],[NE]}]}] =
erl_expand_records:module(Forms, [no_strict_record_tests]),
NE.
@@ -2126,15 +2221,15 @@ tuple2cons(E) ->
E.
list2cons([E | Es]) ->
- {cons, 0, E, list2cons(Es)};
+ {cons, anno0(), E, list2cons(Es)};
list2cons([]) ->
- {nil, 0};
+ {nil, anno0()};
list2cons(E) ->
E.
%% Returns {..., Variable} if Variable is a tuple tail.
cons2tuple({cons_tuple, Es}) ->
- {tuple, 0, cons2list(Es)};
+ {tuple, anno0(), cons2list(Es)};
cons2tuple(T) when is_tuple(T) ->
list_to_tuple(cons2tuple(tuple_to_list(T)));
cons2tuple([E | Es]) ->
@@ -2173,11 +2268,10 @@ bindings_subset(F1, F2, Imp) ->
%% not to have guard semantics, affected filters will have to be
%% recognized and excluded here as well.
try_ms(E, P, Fltr, State) ->
- L = 1,
+ L = anno1(),
Fun = {'fun',L,{clauses,[{clause,L,[P],[[Fltr]],[E]}]}},
Expr = {call,L,{remote,L,{atom,L,ets},{atom,L,fun2ms}},[Fun]},
- Form0 = {function,L,foo,0,[{clause,L,[],[],[Expr]}]},
- Form = restore_line_numbers(Form0),
+ Form = {function,L,foo,0,[{clause,L,[],[],[Expr]}]},
X = ms_transform:parse_transform(State#state.records ++ [Form], []),
case catch
begin
@@ -2194,11 +2288,11 @@ try_ms(E, P, Fltr, State) ->
end.
filters_as_one([]) ->
- {atom, 0, true};
+ {atom, anno0(), true};
filters_as_one(FilterData) ->
[{_,{fil,Filter1}} | Filters] = lists:reverse(FilterData),
lists:foldr(fun({_QId,{fil,Filter}}, AbstF) ->
- {op,0,'andalso',Filter,AbstF}
+ {op,anno0(),'andalso',Filter,AbstF}
end, Filter1, Filters).
qual_data(Qualifiers) ->
@@ -2233,38 +2327,40 @@ qdata([], L) ->
{nil,L}.
qcon(Cs) ->
- list2cons([{tuple,0,[{integer,0,Col},list2cons(qcon1(ConstOps))]} ||
+ A = anno0(),
+ list2cons([{tuple,A,[{integer,A,Col},list2cons(qcon1(ConstOps))]} ||
{Col,ConstOps} <- Cs]).
qcon1(ConstOps) ->
- [{tuple,0,[Const,abstr(Op, 0)]} || {Const,Op} <- ConstOps].
+ A = anno0(),
+ [{tuple,A,[Const,abstr(Op, A)]} || {Const,Op} <- ConstOps].
%% The original code (in Source) is used for filters and the template
%% since the translated code can have QLCs and we don't want them to
%% be visible.
-qcode(E, QCs, Source, L) ->
+qcode(E, QCs, Source, L, State) ->
CL = [begin
Bin = term_to_binary(C, [compressed]),
{bin, L, [{bin_element, L,
{string, L, binary_to_list(Bin)},
default, default}]}
end || {_,C} <- lists:keysort(1, [{qlc:template_state(),E} |
- qcode(QCs, Source)])],
+ qcode(QCs, Source, State)])],
{'fun', L, {clauses, [{clause, L, [], [], [{tuple, L, CL}]}]}}.
-qcode([{_QId, {_QIvs, {{gen,P,_LE,_GV}, GoI, _SI}}} | QCs], Source) ->
- [{GoI,undo_no_shadows(P)} | qcode(QCs, Source)];
-qcode([{QId, {_QIVs, {{fil,_F}, GoI, _SI}}} | QCs], Source) ->
+qcode([{_QId, {_QIvs, {{gen,P,_LE,_GV}, GoI, _SI}}} | QCs], Source, State) ->
+ [{GoI,undo_no_shadows(P, State)} | qcode(QCs, Source, State)];
+qcode([{QId, {_QIVs, {{fil,_F}, GoI, _SI}}} | QCs], Source, State) ->
{ok,OrigF} = dict:find(QId, Source),
- [{GoI,undo_no_shadows(OrigF)} | qcode(QCs, Source)];
-qcode([], _Source) ->
+ [{GoI,undo_no_shadows(OrigF, State)} | qcode(QCs, Source, State)];
+qcode([], _Source, _State) ->
[].
closure(Code, L) ->
{'fun',L,{clauses,[{clause,L,[],[],[Code]}]}}.
-simple(L, Var, Init, Line) ->
- {tuple,L,[?A(simple_v1),?A(Var),Init,?I(Line)]}.
+simple(L, Var, Init, Anno) ->
+ {tuple,L,[?A(simple_v1),?A(Var),Init,abstr(loc(Anno), Anno)]}.
clauses([{QId,{QIVs,{QualData,GoI,S}}} | QCs], RL, Fun, Go, NGV, E, IVs,St) ->
?DEBUG("QIVs = ~p~n", [QIVs]),
@@ -2426,19 +2522,22 @@ aux_var(Name, LcN, QN, N, AllVars) ->
qlc:aux_name(lists:concat([Name, LcN, '_', QN, '_']), N, AllVars).
no_compiler_warning(L) ->
- erl_parse:set_line(L, fun(Line) -> -abs(Line) end).
+ Anno = erl_anno:new(L),
+ erl_anno:set_generated(true, Anno).
-abs_loc(L) ->
- loc(erl_parse:set_line(L, fun(Line) -> abs(Line) end)).
-
-loc(L) ->
- {location,Location} = erl_parse:get_attribute(L, location),
- Location.
+loc(A) ->
+ erl_anno:location(A).
list2op([E], _Op) ->
E;
list2op([E | Es], Op) ->
- {op,0,Op,E,list2op(Es, Op)}.
+ {op,anno0(),Op,E,list2op(Es, Op)}.
+
+anno0() ->
+ erl_anno:new(0).
+
+anno1() ->
+ erl_anno:new(1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -2491,13 +2590,61 @@ qlcmf(T, _F, _Imp, A, No) ->
occ_vars(E) ->
qlc:var_fold(fun({var,_L,V}) -> V end, [], E).
+%% Every Anno is replaced by a unique number. The number is used in a
+%% table that holds data about the abstract node where Anno resides.
+%% In particular, the original location is kept there, so that the
+%% original abstract code can be re-created.
+save_anno(Abstr, NodeInfo) ->
+ F = fun(Anno) ->
+ N = next_slot(NodeInfo),
+ Location = erl_anno:location(Anno),
+ Data = {N, #{location => Location}},
+ true = ets:insert(NodeInfo, Data),
+ erl_anno:new(N)
+ end,
+ erl_parse:map_anno(F, Abstr).
+
+next_slot(T) ->
+ I = ets:update_counter(T, var_n, 1),
+ case ets:lookup(T, I) of
+ [] ->
+ I;
+ _ ->
+ next_slot(T)
+ end.
+
+restore_anno(Abstr, NodeInfo) ->
+ F = fun(Anno) ->
+ Location = erl_anno:location(Anno),
+ case ets:lookup(NodeInfo, Location) of
+ [{Location, Data}] ->
+ OrigLocation = maps:get(location, Data),
+ erl_anno:set_location(OrigLocation, Anno);
+ [{Location}] -> % generated code
+ Anno;
+ [] ->
+ Anno
+ end
+ end,
+ erl_parse:map_anno(F, Abstr).
+
+restore_loc(Location, #state{node_info = NodeInfo}) ->
+ case ets:lookup(NodeInfo, Location) of
+ [{Location, #{location := OrigLocation}}] ->
+ OrigLocation;
+ [{Location}] ->
+ Location;
+ [] ->
+ Location
+ end.
+
no_shadows(Forms0, State) ->
%% Variables that may shadow other variables are introduced in
%% LCs and Funs. Such variables (call them SV, Shadowing
%% Variables) are now renamed. Each (new) occurrence in a pattern
%% is assigned an index (integer), unique in the file.
%%
- %% The state {LastIndex,ActiveVars,UsedVars,AllVars,Singletons}
+ %% The state {LastIndex,ActiveVars,UsedVars,AllVars,Singletons,State}
%% holds the last index used for each SV (LastIndex), the SVs in
%% the current scope (ActiveVars), used SVs (UsedVars, the indexed
%% name is the key), all variables occurring in the file
@@ -2507,16 +2654,15 @@ no_shadows(Forms0, State) ->
%% the indexed name of an SV occurs in the file, next index is
%% tried (to avoid mixing up introduced names with existing ones).
%%
- %% The original names of variables are kept in the line number
- %% position of the abstract code: {var, {nos, OriginalName, L},
- %% NewName}. undo_no_shadows/1 re-creates the original code.
+ %% The original names of variables are kept in a table in State.
+ %% undo_no_shadows/2 re-creates the original code.
AllVars = sets:from_list(ordsets:to_list(qlc:vars(Forms0))),
?DEBUG("nos AllVars = ~p~n", [sets:to_list(AllVars)]),
VFun = fun(_Id, LC, Vs) -> nos(LC, Vs) end,
LI = ets:new(?APIMOD,[]),
UV = ets:new(?APIMOD,[]),
D0 = dict:new(),
- S1 = {LI, D0, UV, AllVars, []},
+ S1 = {LI, D0, UV, AllVars, [], State},
_ = qlc_mapfold(VFun, S1, Forms0, State),
?DEBUG("UsedIntroVars = ~p~n", [ets:match_object(UV, '_')]),
Singletons = ets:select(UV, ets:fun2ms(fun({K,0}) -> K end)),
@@ -2524,7 +2670,7 @@ no_shadows(Forms0, State) ->
true = ets:delete_all_objects(LI),
true = ets:delete_all_objects(UV),
%% Do it again, this time we know which variables are singletons.
- S2 = {LI, D0, UV, AllVars, Singletons},
+ S2 = {LI, D0, UV, AllVars, Singletons, State},
{Forms,_} = qlc_mapfold(VFun, S2, Forms0, State),
true = ets:delete(LI),
true = ets:delete(UV),
@@ -2568,11 +2714,11 @@ nos({lc,L,E0,Qs0}, S) ->
{Qs, S1} = lists:mapfoldl(F, S, Qs0),
{E, _} = nos(E0, S1),
{{lc,L,E,Qs}, S};
-nos({var,L,V}=Var, {_LI,Vs,UV,_A,_Sg}=S) when V =/= '_' ->
+nos({var,L,V}=Var, {_LI,Vs,UV,_A,_Sg,State}=S) when V =/= '_' ->
case used_var(V, Vs, UV) of
{true, VN} ->
- NL = nos_var(L, V),
- {{var,NL,VN}, S};
+ nos_var(L, V, State),
+ {{var,L,VN}, S};
false ->
{Var, S}
end;
@@ -2590,7 +2736,7 @@ nos_pattern([P0 | Ps0], S0, PVs0) ->
{P, S1, PVs1} = nos_pattern(P0, S0, PVs0),
{Ps, S, PVs} = nos_pattern(Ps0, S1, PVs1),
{[P | Ps], S, PVs};
-nos_pattern({var,L,V}, {LI,Vs0,UV,A,Sg}, PVs0) when V =/= '_' ->
+nos_pattern({var,L,V}, {LI,Vs0,UV,A,Sg,State}, PVs0) when V =/= '_' ->
{Name, Vs, PVs} =
case lists:keyfind(V, 1, PVs0) of
{V, VN} ->
@@ -2604,16 +2750,25 @@ nos_pattern({var,L,V}, {LI,Vs0,UV,A,Sg}, PVs0) when V =/= '_' ->
end,
{N, Vs1, [{V,VN} | PVs0]}
end,
- NL = nos_var(L, V),
- {{var,NL,Name}, {LI,Vs,UV,A,Sg}, PVs};
+ nos_var(L, V, State),
+ {{var,L,Name}, {LI,Vs,UV,A,Sg,State}, PVs};
nos_pattern(T, S0, PVs0) when is_tuple(T) ->
{TL, S, PVs} = nos_pattern(tuple_to_list(T), S0, PVs0),
{list_to_tuple(TL), S, PVs};
nos_pattern(T, S, PVs) ->
{T, S, PVs}.
-nos_var(L, Name) ->
- erl_parse:set_line(L, fun(Line) -> {nos,Name,Line} end).
+nos_var(Anno, Name, State) ->
+ NodeInfo = State#state.node_info,
+ Location = erl_anno:location(Anno),
+ case ets:lookup(NodeInfo, Location) of
+ [{Location, #{name := _}}] ->
+ true;
+ [{Location, Data}] ->
+ true = ets:insert(NodeInfo, {Location, Data#{name => Name}});
+ [] -> % cannot happen
+ true
+ end.
used_var(V, Vs, UV) ->
case dict:find(V, Vs) of
@@ -2638,69 +2793,30 @@ next_var(V, Vs, AllVars, LI, UV) ->
{VN, NVs}
end.
-undo_no_shadows(E) ->
- var_map(fun undo_no_shadows1/1, E).
-
-undo_no_shadows1({var, L, _}=Var) ->
- case erl_parse:get_attribute(L, line) of
- {line,{nos,V,_VL}} ->
- NL = erl_parse:set_line(L, fun({nos,_V,VL}) -> VL end),
- undo_no_shadows1({var, NL, V});
- _Else ->
- Var
- end.
-
-restore_line_numbers(E) ->
- var_map(fun restore_line_numbers1/1, E).
+undo_no_shadows(E, State) ->
+ var_map(fun(Anno) -> undo_no_shadows1(Anno, State) end, E).
-restore_line_numbers1({var, L, V}=Var) ->
- case erl_parse:get_attribute(L, line) of
- {line,{nos,_,_}} ->
- NL = erl_parse:set_line(L, fun({nos,_V,VL}) -> VL end),
- restore_line_numbers1({var, NL, V});
- _Else ->
+undo_no_shadows1({var, Anno, _}=Var, State) ->
+ Location = erl_anno:location(Anno),
+ NodeInfo = State#state.node_info,
+ case ets:lookup(NodeInfo, Location) of
+ [{Location, #{name := Name}}] ->
+ {var, Anno, Name};
+ _ ->
Var
end.
%% QLC identifier.
%% The first one encountered in the file has No=1.
-make_lcid(Attrs, No) when is_integer(No), No > 0 ->
- F = fun(Line) when is_integer(Line), Line < (1 bsl ?MAX_NUM_OF_LINES) ->
- sgn(Line) * ((No bsl ?MAX_NUM_OF_LINES) + sgn(Line) * Line)
- end,
- erl_parse:set_line(Attrs, F).
-
-is_lcid(Attrs) ->
- try
- {line,Id} = erl_parse:get_attribute(Attrs, line),
- is_integer(Id) andalso (abs(Id) > (1 bsl ?MAX_NUM_OF_LINES))
- catch _:_ ->
- false
- end.
-
-get_lcid_no(IdAttrs) ->
- {line,Id} = erl_parse:get_attribute(IdAttrs, line),
- abs(Id) bsr ?MAX_NUM_OF_LINES.
-
-get_lcid_line(IdAttrs) ->
- {line,Id} = erl_parse:get_attribute(IdAttrs, line),
- sgn(Id) * (abs(Id) band ((1 bsl ?MAX_NUM_OF_LINES) - 1)).
+make_lcid(Anno, No) when is_integer(No), No > 0 ->
+ {No, erl_anno:line(Anno)}.
-sgn(X) when X >= 0 ->
- 1;
-sgn(X) when X < 0 ->
- -1.
+get_lcid_no({No, _Line}) ->
+ No.
-seq(S, E) when S - E =:= 1 ->
- [];
-seq(S, E) ->
- lists:seq(S, E).
-
-sublist(_, 0) ->
- [];
-sublist(L, N) ->
- lists:sublist(L, N).
+get_lcid_line({_No, Line}) ->
+ Line.
qid(LCId, No) ->
#qid{no = No, lcid = LCId}.
diff --git a/lib/stdlib/src/rand.erl b/lib/stdlib/src/rand.erl
new file mode 100644
index 0000000000..6a805eb69e
--- /dev/null
+++ b/lib/stdlib/src/rand.erl
@@ -0,0 +1,591 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+%% =====================================================================
+%% Multiple PRNG module for Erlang/OTP
+%% Copyright (c) 2015 Kenji Rikitake
+%% =====================================================================
+
+-module(rand).
+
+-export([seed_s/1, seed_s/2, seed/1, seed/2,
+ export_seed/0, export_seed_s/1,
+ uniform/0, uniform/1, uniform_s/1, uniform_s/2,
+ normal/0, normal_s/1
+ ]).
+
+-compile({inline, [exs64_next/1, exsplus_next/1,
+ exs1024_next/1, exs1024_calc/2,
+ get_52/1, normal_kiwi/1]}).
+
+-define(DEFAULT_ALG_HANDLER, exsplus).
+-define(SEED_DICT, rand_seed).
+
+%% =====================================================================
+%% Types
+%% =====================================================================
+
+%% This depends on the algorithm handler function
+-type alg_seed() :: exs64_state() | exsplus_state() | exs1024_state().
+%% This is the algorithm handler function within this module
+-type alg_handler() :: #{type => alg(),
+ max => integer(),
+ next => fun(),
+ uniform => fun(),
+ uniform_n => fun()}.
+
+%% Internal state
+-opaque state() :: {alg_handler(), alg_seed()}.
+-type alg() :: exs64 | exsplus | exs1024.
+-opaque export_state() :: {alg(), alg_seed()}.
+-export_type([alg/0, state/0, export_state/0]).
+
+%% =====================================================================
+%% API
+%% =====================================================================
+
+%% Return algorithm and seed so that RNG state can be recreated with seed/1
+-spec export_seed() -> undefined | export_state().
+export_seed() ->
+ case seed_get() of
+ {#{type:=Alg}, Seed} -> {Alg, Seed};
+ _ -> undefined
+ end.
+
+-spec export_seed_s(state()) -> export_state().
+export_seed_s({#{type:=Alg}, Seed}) -> {Alg, Seed}.
+
+%% seed(Alg) seeds RNG with runtime dependent values
+%% and return the NEW state
+
+%% seed({Alg,Seed}) setup RNG with a previously exported seed
+%% and return the NEW state
+
+-spec seed(AlgOrExpState::alg() | export_state()) -> state().
+seed(Alg) ->
+ R = seed_s(Alg),
+ _ = seed_put(R),
+ R.
+
+-spec seed_s(AlgOrExpState::alg() | export_state()) -> state().
+seed_s(Alg) when is_atom(Alg) ->
+ seed_s(Alg, {erlang:phash2([{node(),self()}]),
+ erlang:system_time(),
+ erlang:unique_integer()});
+seed_s({Alg0, Seed}) ->
+ {Alg,_SeedFun} = mk_alg(Alg0),
+ {Alg, Seed}.
+
+%% seed/2: seeds RNG with the algorithm and given values
+%% and returns the NEW state.
+
+-spec seed(Alg :: alg(), {integer(), integer(), integer()}) -> state().
+seed(Alg0, S0) ->
+ State = seed_s(Alg0, S0),
+ _ = seed_put(State),
+ State.
+
+-spec seed_s(Alg :: alg(), {integer(), integer(), integer()}) -> state().
+seed_s(Alg0, S0 = {_, _, _}) ->
+ {Alg, Seed} = mk_alg(Alg0),
+ AS = Seed(S0),
+ {Alg, AS}.
+
+%%% uniform/0, uniform/1, uniform_s/1, uniform_s/2 are all
+%%% uniformly distributed random numbers.
+
+%% uniform/0: returns a random float X where 0.0 < X < 1.0,
+%% updating the state in the process dictionary.
+
+-spec uniform() -> X::float().
+uniform() ->
+ {X, Seed} = uniform_s(seed_get()),
+ _ = seed_put(Seed),
+ X.
+
+%% uniform/1: given an integer N >= 1,
+%% uniform/1 returns a random integer X where 1 =< X =< N,
+%% updating the state in the process dictionary.
+
+-spec uniform(N :: pos_integer()) -> X::pos_integer().
+uniform(N) ->
+ {X, Seed} = uniform_s(N, seed_get()),
+ _ = seed_put(Seed),
+ X.
+
+%% uniform_s/1: given a state, uniform_s/1
+%% returns a random float X where 0.0 < X < 1.0,
+%% and a new state.
+
+-spec uniform_s(state()) -> {X::float(), NewS :: state()}.
+uniform_s(State = {#{uniform:=Uniform}, _}) ->
+ Uniform(State).
+
+%% uniform_s/2: given an integer N >= 1 and a state, uniform_s/2
+%% uniform_s/2 returns a random integer X where 1 =< X =< N,
+%% and a new state.
+
+-spec uniform_s(N::pos_integer(), state()) -> {X::pos_integer(), NewS::state()}.
+uniform_s(N, State = {#{uniform_n:=Uniform, max:=Max}, _})
+ when 0 < N, N =< Max ->
+ Uniform(N, State);
+uniform_s(N, State0 = {#{uniform:=Uniform}, _})
+ when is_integer(N), 0 < N ->
+ {F, State} = Uniform(State0),
+ {trunc(F * N) + 1, State}.
+
+%% normal/0: returns a random float with standard normal distribution
+%% updating the state in the process dictionary.
+
+-spec normal() -> float().
+normal() ->
+ {X, Seed} = normal_s(seed_get()),
+ _ = seed_put(Seed),
+ X.
+
+%% normal_s/1: returns a random float with standard normal distribution
+%% The Ziggurat Method for generating random variables - Marsaglia and Tsang
+%% Paper and reference code: http://www.jstatsoft.org/v05/i08/
+
+-spec normal_s(state()) -> {float(), NewS :: state()}.
+normal_s(State0) ->
+ {Sign, R, State} = get_52(State0),
+ Idx = R band 16#FF,
+ Idx1 = Idx+1,
+ {Ki, Wi} = normal_kiwi(Idx1),
+ X = R * Wi,
+ case R < Ki of
+ %% Fast path 95% of the time
+ true when Sign =:= 0 -> {X, State};
+ true -> {-X, State};
+ %% Slow path
+ false when Sign =:= 0 -> normal_s(Idx, Sign, X, State);
+ false -> normal_s(Idx, Sign, -X, State)
+ end.
+
+%% =====================================================================
+%% Internal functions
+
+-define(UINT21MASK, 16#00000000001fffff).
+-define(UINT32MASK, 16#00000000ffffffff).
+-define(UINT33MASK, 16#00000001ffffffff).
+-define(UINT39MASK, 16#0000007fffffffff).
+-define(UINT58MASK, 16#03ffffffffffffff).
+-define(UINT64MASK, 16#ffffffffffffffff).
+
+-type uint64() :: 0..16#ffffffffffffffff.
+-type uint58() :: 0..16#03ffffffffffffff.
+
+-spec seed_put(state()) -> undefined | state().
+seed_put(Seed) ->
+ put(?SEED_DICT, Seed).
+
+seed_get() ->
+ case get(?SEED_DICT) of
+ undefined -> seed(?DEFAULT_ALG_HANDLER);
+ Old -> Old % no type checking here
+ end.
+
+%% Setup alg record
+mk_alg(exs64) ->
+ {#{type=>exs64, max=>?UINT64MASK, next=>fun exs64_next/1,
+ uniform=>fun exs64_uniform/1, uniform_n=>fun exs64_uniform/2},
+ fun exs64_seed/1};
+mk_alg(exsplus) ->
+ {#{type=>exsplus, max=>?UINT58MASK, next=>fun exsplus_next/1,
+ uniform=>fun exsplus_uniform/1, uniform_n=>fun exsplus_uniform/2},
+ fun exsplus_seed/1};
+mk_alg(exs1024) ->
+ {#{type=>exs1024, max=>?UINT64MASK, next=>fun exs1024_next/1,
+ uniform=>fun exs1024_uniform/1, uniform_n=>fun exs1024_uniform/2},
+ fun exs1024_seed/1}.
+
+%% =====================================================================
+%% exs64 PRNG: Xorshift64*
+%% Algorithm by Sebastiano Vigna
+%% Reference URL: http://xorshift.di.unimi.it/
+%% =====================================================================
+
+-type exs64_state() :: uint64().
+
+exs64_seed({A1, A2, A3}) ->
+ {V1, _} = exs64_next(((A1 band ?UINT32MASK) * 4294967197 + 1)),
+ {V2, _} = exs64_next(((A2 band ?UINT32MASK) * 4294967231 + 1)),
+ {V3, _} = exs64_next(((A3 band ?UINT32MASK) * 4294967279 + 1)),
+ ((V1 * V2 * V3) rem (?UINT64MASK - 1)) + 1.
+
+%% Advance xorshift64* state for one step and generate 64bit unsigned integer
+-spec exs64_next(exs64_state()) -> {uint64(), exs64_state()}.
+exs64_next(R) ->
+ R1 = R bxor (R bsr 12),
+ R2 = R1 bxor ((R1 band ?UINT39MASK) bsl 25),
+ R3 = R2 bxor (R2 bsr 27),
+ {(R3 * 2685821657736338717) band ?UINT64MASK, R3}.
+
+exs64_uniform({Alg, R0}) ->
+ {V, R1} = exs64_next(R0),
+ {V / 18446744073709551616, {Alg, R1}}.
+
+exs64_uniform(Max, {Alg, R}) ->
+ {V, R1} = exs64_next(R),
+ {(V rem Max) + 1, {Alg, R1}}.
+
+%% =====================================================================
+%% exsplus PRNG: Xorshift116+
+%% Algorithm by Sebastiano Vigna
+%% Reference URL: http://xorshift.di.unimi.it/
+%% 58 bits fits into an immediate on 64bits erlang and is thus much faster.
+%% Modification of the original Xorshift128+ algorithm to 116
+%% by Sebastiano Vigna, a lot of thanks for his help and work.
+%% =====================================================================
+-type exsplus_state() :: nonempty_improper_list(uint58(), uint58()).
+
+exsplus_seed({A1, A2, A3}) ->
+ {_, R1} = exsplus_next([(((A1 * 4294967197) + 1) band ?UINT58MASK)|
+ (((A2 * 4294967231) + 1) band ?UINT58MASK)]),
+ {_, R2} = exsplus_next([(((A3 * 4294967279) + 1) band ?UINT58MASK)|
+ tl(R1)]),
+ R2.
+
+%% Advance xorshift116+ state for one step and generate 58bit unsigned integer
+-spec exsplus_next(exsplus_state()) -> {uint58(), exsplus_state()}.
+exsplus_next([S1|S0]) ->
+ %% Note: members s0 and s1 are swapped here
+ S11 = (S1 bxor (S1 bsl 24)) band ?UINT58MASK,
+ S12 = S11 bxor S0 bxor (S11 bsr 11) bxor (S0 bsr 41),
+ {(S0 + S12) band ?UINT58MASK, [S0|S12]}.
+
+exsplus_uniform({Alg, R0}) ->
+ {I, R1} = exsplus_next(R0),
+ {I / (?UINT58MASK+1), {Alg, R1}}.
+
+exsplus_uniform(Max, {Alg, R}) ->
+ {V, R1} = exsplus_next(R),
+ {(V rem Max) + 1, {Alg, R1}}.
+
+%% =====================================================================
+%% exs1024 PRNG: Xorshift1024*
+%% Algorithm by Sebastiano Vigna
+%% Reference URL: http://xorshift.di.unimi.it/
+%% =====================================================================
+
+-type exs1024_state() :: {list(uint64()), list(uint64())}.
+
+exs1024_seed({A1, A2, A3}) ->
+ B1 = (((A1 band ?UINT21MASK) + 1) * 2097131) band ?UINT21MASK,
+ B2 = (((A2 band ?UINT21MASK) + 1) * 2097133) band ?UINT21MASK,
+ B3 = (((A3 band ?UINT21MASK) + 1) * 2097143) band ?UINT21MASK,
+ {exs1024_gen1024((B1 bsl 43) bor (B2 bsl 22) bor (B3 bsl 1) bor 1),
+ []}.
+
+%% Generate a list of 16 64-bit element list
+%% of the xorshift64* random sequence
+%% from a given 64-bit seed.
+%% Note: dependent on exs64_next/1
+-spec exs1024_gen1024(uint64()) -> list(uint64()).
+exs1024_gen1024(R) ->
+ exs1024_gen1024(16, R, []).
+
+exs1024_gen1024(0, _, L) ->
+ L;
+exs1024_gen1024(N, R, L) ->
+ {X, R2} = exs64_next(R),
+ exs1024_gen1024(N - 1, R2, [X|L]).
+
+%% Calculation of xorshift1024*.
+%% exs1024_calc(S0, S1) -> {X, NS1}.
+%% X: random number output
+-spec exs1024_calc(uint64(), uint64()) -> {uint64(), uint64()}.
+exs1024_calc(S0, S1) ->
+ S11 = S1 bxor ((S1 band ?UINT33MASK) bsl 31),
+ S12 = S11 bxor (S11 bsr 11),
+ S01 = S0 bxor (S0 bsr 30),
+ NS1 = S01 bxor S12,
+ {(NS1 * 1181783497276652981) band ?UINT64MASK, NS1}.
+
+%% Advance xorshift1024* state for one step and generate 64bit unsigned integer
+-spec exs1024_next(exs1024_state()) -> {uint64(), exs1024_state()}.
+exs1024_next({[S0,S1|L3], RL}) ->
+ {X, NS1} = exs1024_calc(S0, S1),
+ {X, {[NS1|L3], [S0|RL]}};
+exs1024_next({[H], RL}) ->
+ NL = [H|lists:reverse(RL)],
+ exs1024_next({NL, []}).
+
+exs1024_uniform({Alg, R0}) ->
+ {V, R1} = exs1024_next(R0),
+ {V / 18446744073709551616, {Alg, R1}}.
+
+exs1024_uniform(Max, {Alg, R}) ->
+ {V, R1} = exs1024_next(R),
+ {(V rem Max) + 1, {Alg, R1}}.
+
+%% =====================================================================
+%% Ziggurat cont
+%% =====================================================================
+-define(NOR_R, 3.6541528853610087963519472518).
+-define(NOR_INV_R, 1/?NOR_R).
+
+%% return a {sign, Random51bits, State}
+get_52({Alg=#{next:=Next}, S0}) ->
+ {Int,S1} = Next(S0),
+ {((1 bsl 51) band Int), Int band ((1 bsl 51)-1), {Alg, S1}}.
+
+%% Slow path
+normal_s(0, Sign, X0, State0) ->
+ {U0, S1} = uniform_s(State0),
+ X = -?NOR_INV_R*math:log(U0),
+ {U1, S2} = uniform_s(S1),
+ Y = -math:log(U1),
+ case Y+Y > X*X of
+ false ->
+ normal_s(0, Sign, X0, S2);
+ true when Sign =:= 0 ->
+ {?NOR_R + X, S2};
+ true ->
+ {-?NOR_R - X, S2}
+ end;
+normal_s(Idx, _Sign, X, State0) ->
+ Fi2 = normal_fi(Idx+1),
+ {U0, S1} = uniform_s(State0),
+ case ((normal_fi(Idx) - Fi2)*U0 + Fi2) < math:exp(-0.5*X*X) of
+ true -> {X, S1};
+ false -> normal_s(S1)
+ end.
+
+%% Tables for generating normal_s
+%% ki is zipped with wi (slightly faster)
+normal_kiwi(Indx) ->
+ element(Indx,
+ {{2104047571236786,1.736725412160263e-15}, {0,9.558660351455634e-17},
+ {1693657211986787,1.2708704834810623e-16},{1919380038271141,1.4909740962495474e-16},
+ {2015384402196343,1.6658733631586268e-16},{2068365869448128,1.8136120810119029e-16},
+ {2101878624052573,1.9429720153135588e-16},{2124958784102998,2.0589500628482093e-16},
+ {2141808670795147,2.1646860576895422e-16},{2154644611568301,2.2622940392218116e-16},
+ {2164744887587275,2.353271891404589e-16},{2172897953696594,2.438723455742877e-16},
+ {2179616279372365,2.5194879829274225e-16},{2185247251868649,2.5962199772528103e-16},
+ {2190034623107822,2.6694407473648285e-16},{2194154434521197,2.7395729685142446e-16},
+ {2197736978774660,2.8069646002484804e-16},{2200880740891961,2.871905890411393e-16},
+ {2203661538010620,2.9346417484728883e-16},{2206138681109102,2.9953809336782113e-16},
+ {2208359231806599,3.054303000719244e-16},{2210361007258210,3.111563633892157e-16},
+ {2212174742388539,3.1672988018581815e-16},{2213825672704646,3.2216280350549905e-16},
+ {2215334711002614,3.274657040793975e-16},{2216719334487595,3.326479811684171e-16},
+ {2217994262139172,3.377180341735323e-16},{2219171977965032,3.4268340353119356e-16},
+ {2220263139538712,3.475508873172976e-16},{2221276900117330,3.523266384600203e-16},
+ {2222221164932930,3.5701624633953494e-16},{2223102796829069,3.616248057159834e-16},
+ {2223927782546658,3.661569752965354e-16},{2224701368170060,3.7061702777236077e-16},
+ {2225428170204312,3.75008892787478e-16},{2226112267248242,3.7933619401549554e-16},
+ {2226757276105256,3.836022812967728e-16},{2227366415328399,3.8781025861250247e-16},
+ {2227942558554684,3.919630085325768e-16},{2228488279492521,3.9606321366256378e-16},
+ {2229005890047222,4.001133755254669e-16},{2229497472775193,4.041158312414333e-16},
+ {2229964908627060,4.080727683096045e-16},{2230409900758597,4.119862377480744e-16},
+ {2230833995044585,4.1585816580828064e-16},{2231238597816133,4.1969036444740733e-16},
+ {2231624991250191,4.234845407152071e-16},{2231994346765928,4.272423051889976e-16},
+ {2232347736722750,4.309651795716294e-16},{2232686144665934,4.346546035512876e-16},
+ {2233010474325959,4.383119410085457e-16},{2233321557544881,4.4193848564470665e-16},
+ {2233620161276071,4.455354660957914e-16},{2233906993781271,4.491040505882875e-16},
+ {2234182710130335,4.52645351185714e-16},{2234447917093496,4.561604276690038e-16},
+ {2234703177503020,4.596502910884941e-16},{2234949014150181,4.631159070208165e-16},
+ {2235185913274316,4.665581985600875e-16},{2235414327692884,4.699780490694195e-16},
+ {2235634679614920,4.733763047158324e-16},{2235847363174595,4.767537768090853e-16},
+ {2236052746716837,4.8011124396270155e-16},{2236251174862869,4.834494540935008e-16},
+ {2236442970379967,4.867691262742209e-16},{2236628435876762,4.900709524522994e-16},
+ {2236807855342765,4.933555990465414e-16},{2236981495548562,4.966237084322178e-16},
+ {2237149607321147,4.998759003240909e-16},{2237312426707209,5.031127730659319e-16},
+ {2237470176035652,5.0633490483427195e-16},{2237623064889403,5.095428547633892e-16},
+ {2237771290995388,5.127371639978797e-16},{2237915041040597,5.159183566785736e-16},
+ {2238054491421305,5.190869408670343e-16},{2238189808931712,5.222434094134042e-16},
+ {2238321151397660,5.253882407719454e-16},{2238448668260432,5.285218997682382e-16},
+ {2238572501115169,5.316448383216618e-16},{2238692784207942,5.34757496126473e-16},
+ {2238809644895133,5.378603012945235e-16},{2238923204068402,5.409536709623993e-16},
+ {2239033576548190,5.440380118655467e-16},{2239140871448443,5.471137208817361e-16},
+ {2239245192514958,5.501811855460336e-16},{2239346638439541,5.532407845392784e-16},
+ {2239445303151952,5.56292888151909e-16},{2239541276091442,5.593378587248462e-16},
+ {2239634642459498,5.623760510690043e-16},{2239725483455293,5.65407812864896e-16},
+ {2239813876495186,5.684334850436814e-16},{2239899895417494,5.714534021509204e-16},
+ {2239983610673676,5.744678926941961e-16},{2240065089506935,5.774772794756965e-16},
+ {2240144396119183,5.804818799107686e-16},{2240221591827230,5.834820063333892e-16},
+ {2240296735208969,5.864779662894365e-16},{2240369882240293,5.894700628185872e-16},
+ {2240441086423386,5.924585947256134e-16},{2240510398907004,5.95443856841806e-16},
+ {2240577868599305,5.984261402772028e-16},{2240643542273726,6.014057326642664e-16},
+ {2240707464668391,6.043829183936125e-16},{2240769678579486,6.073579788423606e-16},
+ {2240830224948980,6.103311925956439e-16},{2240889142947082,6.133028356617911e-16},
+ {2240946470049769,6.162731816816596e-16},{2241002242111691,6.192425021325847e-16},
+ {2241056493434746,6.222110665273788e-16},{2241109256832602,6.251791426088e-16},
+ {2241160563691400,6.281469965398895e-16},{2241210444026879,6.311148930905604e-16},
+ {2241258926538122,6.34083095820806e-16},{2241306038658137,6.370518672608815e-16},
+ {2241351806601435,6.400214690888025e-16},{2241396255408788,6.429921623054896e-16},
+ {2241439408989313,6.459642074078832e-16},{2241481290160038,6.489378645603397e-16},
+ {2241521920683062,6.519133937646159e-16},{2241561321300462,6.548910550287415e-16},
+ {2241599511767028,6.578711085350741e-16},{2241636510880960,6.608538148078259e-16},
+ {2241672336512612,6.638394348803506e-16},{2241707005631362,6.668282304624746e-16},
+ {2241740534330713,6.698204641081558e-16},{2241772937851689,6.728163993837531e-16},
+ {2241804230604585,6.758163010371901e-16},{2241834426189161,6.78820435168298e-16},
+ {2241863537413311,6.818290694006254e-16},{2241891576310281,6.848424730550038e-16},
+ {2241918554154466,6.878609173251664e-16},{2241944481475843,6.908846754557169e-16},
+ {2241969368073071,6.939140229227569e-16},{2241993223025298,6.969492376174829e-16},
+ {2242016054702685,6.999906000330764e-16},{2242037870775710,7.030383934552151e-16},
+ {2242058678223225,7.060929041565482e-16},{2242078483339331,7.091544215954873e-16},
+ {2242097291739040,7.122232386196779e-16},{2242115108362774,7.152996516745303e-16},
+ {2242131937479672,7.183839610172063e-16},{2242147782689725,7.214764709364707e-16},
+ {2242162646924736,7.245774899788387e-16},{2242176532448092,7.276873311814693e-16},
+ {2242189440853337,7.308063123122743e-16},{2242201373061537,7.339347561177405e-16},
+ {2242212329317416,7.370729905789831e-16},{2242222309184237,7.4022134917658e-16},
+ {2242231311537397,7.433801711647648e-16},{2242239334556717,7.465498018555889e-16},
+ {2242246375717369,7.497305929136979e-16},{2242252431779415,7.529229026624058e-16},
+ {2242257498775893,7.561270964017922e-16},{2242261571999416,7.5934354673958895e-16},
+ {2242264645987196,7.625726339356756e-16},{2242266714504453,7.658147462610487e-16},
+ {2242267770526109,7.690702803721919e-16},{2242267806216711,7.723396417018299e-16},
+ {2242266812908462,7.756232448671174e-16},{2242264781077289,7.789215140963852e-16},
+ {2242261700316818,7.822348836756411e-16},{2242257559310145,7.855637984161084e-16},
+ {2242252345799276,7.889087141441755e-16},{2242246046552082,7.922700982152271e-16},
+ {2242238647326615,7.956484300529366e-16},{2242230132832625,7.99044201715713e-16},
+ {2242220486690076,8.024579184921259e-16},{2242209691384458,8.058900995272657e-16},
+ {2242197728218684,8.093412784821501e-16},{2242184577261310,8.128120042284501e-16},
+ {2242170217290819,8.163028415809877e-16},{2242154625735679,8.198143720706533e-16},
+ {2242137778609839,8.23347194760605e-16},{2242119650443327,8.26901927108847e-16},
+ {2242100214207556,8.304792058805374e-16},{2242079441234906,8.340796881136629e-16},
+ {2242057301132135,8.377040521420222e-16},{2242033761687079,8.413529986798028e-16},
+ {2242008788768107,8.450272519724097e-16},{2241982346215682,8.487275610186155e-16},
+ {2241954395725356,8.524547008695596e-16},{2241924896721443,8.562094740106233e-16},
+ {2241893806220517,8.599927118327665e-16},{2241861078683830,8.638052762005259e-16},
+ {2241826665857598,8.676480611245582e-16},{2241790516600041,8.715219945473698e-16},
+ {2241752576693881,8.754280402517175e-16},{2241712788642916,8.793671999021043e-16},
+ {2241671091451078,8.833405152308408e-16},{2241627420382235,8.873490703813135e-16},
+ {2241581706698773,8.913939944224086e-16},{2241533877376767,8.954764640495068e-16},
+ {2241483854795281,8.9959770648911e-16},{2241431556397035,9.037590026260118e-16},
+ {2241376894317345,9.079616903740068e-16},{2241319774977817,9.122071683134846e-16},
+ {2241260098640860,9.164968996219135e-16},{2241197758920538,9.208324163262308e-16},
+ {2241132642244704,9.252153239095693e-16},{2241064627262652,9.296473063086417e-16},
+ {2240993584191742,9.341301313425265e-16},{2240919374095536,9.38665656618666e-16},
+ {2240841848084890,9.432558359676707e-16},{2240760846432232,9.479027264651738e-16},
+ {2240676197587784,9.526084961066279e-16},{2240587717084782,9.57375432209745e-16},
+ {2240495206318753,9.622059506294838e-16},{2240398451183567,9.671026058823054e-16},
+ {2240297220544165,9.720681022901626e-16},{2240191264522612,9.771053062707209e-16},
+ {2240080312570155,9.822172599190541e-16},{2239964071293331,9.874071960480671e-16},
+ {2239842221996530,9.926785548807976e-16},{2239714417896699,9.980350026183645e-16},
+ {2239580280957725,1.003480452143618e-15},{2239439398282193,1.0090190861637457e-15},
+ {2239291317986196,1.0146553831467086e-15},{2239135544468203,1.0203941464683124e-15},
+ {2238971532964979,1.0262405372613567e-15},{2238798683265269,1.0322001115486456e-15},
+ {2238616332424351,1.03827886235154e-15},{2238423746288095,1.044483267600047e-15},
+ {2238220109591890,1.0508203448355195e-15},{2238004514345216,1.057297713900989e-15},
+ {2237775946143212,1.06392366906768e-15},{2237533267957822,1.0707072623632994e-15},
+ {2237275200846753,1.0776584002668106e-15},{2237000300869952,1.0847879564403425e-15},
+ {2236706931309099,1.0921079038149563e-15},{2236393229029147,1.0996314701785628e-15},
+ {2236057063479501,1.1073733224935752e-15},{2235695986373246,1.1153497865853155e-15},
+ {2235307169458859,1.1235791107110833e-15},{2234887326941578,1.1320817840164846e-15},
+ {2234432617919447,1.140880924258278e-15},{2233938522519765,1.1500027537839792e-15},
+ {2233399683022677,1.159477189144919e-15},{2232809697779198,1.169338578691096e-15},
+ {2232160850599817,1.17962663529558e-15},{2231443750584641,1.190387629928289e-15},
+ {2230646845562170,1.2016759392543819e-15},{2229755753817986,1.2135560818666897e-15},
+ {2228752329126533,1.2261054417450561e-15},{2227613325162504,1.2394179789163251e-15},
+ {2226308442121174,1.2536093926602567e-15},{2224797391720399,1.268824481425501e-15},
+ {2223025347823832,1.2852479319096109e-15},{2220915633329809,1.3031206634689985e-15},
+ {2218357446087030,1.3227655770195326e-15},{2215184158448668,1.3446300925011171e-15},
+ {2211132412537369,1.3693606835128518e-15},{2205758503851065,1.397943667277524e-15},
+ {2198248265654987,1.4319989869661328e-15},{2186916352102141,1.4744848603597596e-15},
+ {2167562552481814,1.5317872741611144e-15},{2125549880839716,1.6227698675312968e-15}}).
+
+normal_fi(Indx) ->
+ element(Indx,
+ {1.0000000000000000e+00,9.7710170126767082e-01,9.5987909180010600e-01,
+ 9.4519895344229909e-01,9.3206007595922991e-01,9.1999150503934646e-01,
+ 9.0872644005213032e-01,8.9809592189834297e-01,8.8798466075583282e-01,
+ 8.7830965580891684e-01,8.6900868803685649e-01,8.6003362119633109e-01,
+ 8.5134625845867751e-01,8.4291565311220373e-01,8.3471629298688299e-01,
+ 8.2672683394622093e-01,8.1892919160370192e-01,8.1130787431265572e-01,
+ 8.0384948317096383e-01,7.9654233042295841e-01,7.8937614356602404e-01,
+ 7.8234183265480195e-01,7.7543130498118662e-01,7.6863731579848571e-01,
+ 7.6195334683679483e-01,7.5537350650709567e-01,7.4889244721915638e-01,
+ 7.4250529634015061e-01,7.3620759812686210e-01,7.2999526456147568e-01,
+ 7.2386453346862967e-01,7.1781193263072152e-01,7.1183424887824798e-01,
+ 7.0592850133275376e-01,7.0009191813651117e-01,6.9432191612611627e-01,
+ 6.8861608300467136e-01,6.8297216164499430e-01,6.7738803621877308e-01,
+ 6.7186171989708166e-01,6.6639134390874977e-01,6.6097514777666277e-01,
+ 6.5561147057969693e-01,6.5029874311081637e-01,6.4503548082082196e-01,
+ 6.3982027745305614e-01,6.3465179928762327e-01,6.2952877992483625e-01,
+ 6.2445001554702606e-01,6.1941436060583399e-01,6.1442072388891344e-01,
+ 6.0946806492577310e-01,6.0455539069746733e-01,5.9968175261912482e-01,
+ 5.9484624376798689e-01,5.9004799633282545e-01,5.8528617926337090e-01,
+ 5.8055999610079034e-01,5.7586868297235316e-01,5.7121150673525267e-01,
+ 5.6658776325616389e-01,5.6199677581452390e-01,5.5743789361876550e-01,
+ 5.5291049042583185e-01,5.4841396325526537e-01,5.4394773119002582e-01,
+ 5.3951123425695158e-01,5.3510393238045717e-01,5.3072530440366150e-01,
+ 5.2637484717168403e-01,5.2205207467232140e-01,5.1775651722975591e-01,
+ 5.1348772074732651e-01,5.0924524599574761e-01,5.0502866794346790e-01,
+ 5.0083757512614835e-01,4.9667156905248933e-01,4.9253026364386815e-01,
+ 4.8841328470545758e-01,4.8432026942668288e-01,4.8025086590904642e-01,
+ 4.7620473271950547e-01,4.7218153846772976e-01,4.6818096140569321e-01,
+ 4.6420268904817391e-01,4.6024641781284248e-01,4.5631185267871610e-01,
+ 4.5239870686184824e-01,4.4850670150720273e-01,4.4463556539573912e-01,
+ 4.4078503466580377e-01,4.3695485254798533e-01,4.3314476911265209e-01,
+ 4.2935454102944126e-01,4.2558393133802180e-01,4.2183270922949573e-01,
+ 4.1810064983784795e-01,4.1438753404089090e-01,4.1069314827018799e-01,
+ 4.0701728432947315e-01,4.0335973922111429e-01,3.9972031498019700e-01,
+ 3.9609881851583223e-01,3.9249506145931540e-01,3.8890886001878855e-01,
+ 3.8534003484007706e-01,3.8178841087339344e-01,3.7825381724561896e-01,
+ 3.7473608713789086e-01,3.7123505766823922e-01,3.6775056977903225e-01,
+ 3.6428246812900372e-01,3.6083060098964775e-01,3.5739482014578022e-01,
+ 3.5397498080007656e-01,3.5057094148140588e-01,3.4718256395679348e-01,
+ 3.4380971314685055e-01,3.4045225704452164e-01,3.3711006663700588e-01,
+ 3.3378301583071823e-01,3.3047098137916342e-01,3.2717384281360129e-01,
+ 3.2389148237639104e-01,3.2062378495690530e-01,3.1737063802991350e-01,
+ 3.1413193159633707e-01,3.1090755812628634e-01,3.0769741250429189e-01,
+ 3.0450139197664983e-01,3.0131939610080288e-01,2.9815132669668531e-01,
+ 2.9499708779996164e-01,2.9185658561709499e-01,2.8872972848218270e-01,
+ 2.8561642681550159e-01,2.8251659308370741e-01,2.7943014176163772e-01,
+ 2.7635698929566810e-01,2.7329705406857691e-01,2.7025025636587519e-01,
+ 2.6721651834356114e-01,2.6419576399726080e-01,2.6118791913272082e-01,
+ 2.5819291133761890e-01,2.5521066995466168e-01,2.5224112605594190e-01,
+ 2.4928421241852824e-01,2.4633986350126363e-01,2.4340801542275012e-01,
+ 2.4048860594050039e-01,2.3758157443123795e-01,2.3468686187232990e-01,
+ 2.3180441082433859e-01,2.2893416541468023e-01,2.2607607132238020e-01,
+ 2.2323007576391746e-01,2.2039612748015194e-01,2.1757417672433113e-01,
+ 2.1476417525117358e-01,2.1196607630703015e-01,2.0917983462112499e-01,
+ 2.0640540639788071e-01,2.0364274931033485e-01,2.0089182249465656e-01,
+ 1.9815258654577511e-01,1.9542500351413428e-01,1.9270903690358912e-01,
+ 1.9000465167046496e-01,1.8731181422380025e-01,1.8463049242679927e-01,
+ 1.8196065559952254e-01,1.7930227452284767e-01,1.7665532144373500e-01,
+ 1.7401977008183875e-01,1.7139559563750595e-01,1.6878277480121151e-01,
+ 1.6618128576448205e-01,1.6359110823236570e-01,1.6101222343751107e-01,
+ 1.5844461415592431e-01,1.5588826472447920e-01,1.5334316106026283e-01,
+ 1.5080929068184568e-01,1.4828664273257453e-01,1.4577520800599403e-01,
+ 1.4327497897351341e-01,1.4078594981444470e-01,1.3830811644855071e-01,
+ 1.3584147657125373e-01,1.3338602969166913e-01,1.3094177717364430e-01,
+ 1.2850872227999952e-01,1.2608687022018586e-01,1.2367622820159654e-01,
+ 1.2127680548479021e-01,1.1888861344290998e-01,1.1651166562561080e-01,
+ 1.1414597782783835e-01,1.1179156816383801e-01,1.0944845714681163e-01,
+ 1.0711666777468364e-01,1.0479622562248690e-01,1.0248715894193508e-01,
+ 1.0018949876880981e-01,9.7903279038862284e-02,9.5628536713008819e-02,
+ 9.3365311912690860e-02,9.1113648066373634e-02,8.8873592068275789e-02,
+ 8.6645194450557961e-02,8.4428509570353374e-02,8.2223595813202863e-02,
+ 8.0030515814663056e-02,7.7849336702096039e-02,7.5680130358927067e-02,
+ 7.3522973713981268e-02,7.1377949058890375e-02,6.9245144397006769e-02,
+ 6.7124653827788497e-02,6.5016577971242842e-02,6.2921024437758113e-02,
+ 6.0838108349539864e-02,5.8767952920933758e-02,5.6710690106202902e-02,
+ 5.4666461324888914e-02,5.2635418276792176e-02,5.0617723860947761e-02,
+ 4.8613553215868521e-02,4.6623094901930368e-02,4.4646552251294443e-02,
+ 4.2684144916474431e-02,4.0736110655940933e-02,3.8802707404526113e-02,
+ 3.6884215688567284e-02,3.4980941461716084e-02,3.3093219458578522e-02,
+ 3.1221417191920245e-02,2.9365939758133314e-02,2.7527235669603082e-02,
+ 2.5705804008548896e-02,2.3902203305795882e-02,2.2117062707308864e-02,
+ 2.0351096230044517e-02,1.8605121275724643e-02,1.6880083152543166e-02,
+ 1.5177088307935325e-02,1.3497450601739880e-02,1.1842757857907888e-02,
+ 1.0214971439701471e-02,8.6165827693987316e-03,7.0508754713732268e-03,
+ 5.5224032992509968e-03,4.0379725933630305e-03,2.6090727461021627e-03,
+ 1.2602859304985975e-03}).
diff --git a/lib/stdlib/src/random.erl b/lib/stdlib/src/random.erl
index d7b51a151c..cf84f8cecf 100644
--- a/lib/stdlib/src/random.erl
+++ b/lib/stdlib/src/random.erl
@@ -57,11 +57,17 @@ seed() ->
%% seed({A1, A2, A3})
%% Seed random number generation
--spec seed({A1, A2, A3}) -> 'undefined' | ran() when
+-spec seed(SValue) -> 'undefined' | ran() when
+ SValue :: {A1, A2, A3} | integer(),
A1 :: integer(),
A2 :: integer(),
A3 :: integer().
+seed(Int) when is_integer(Int) ->
+ A1 = (Int bsr 16) band 16#fffffff,
+ A2 = Int band 16#ffffff,
+ A3 = (Int bsr 36) bor (A2 bsr 16),
+ seed(A1, A2, A3);
seed({A1, A2, A3}) ->
seed(A1, A2, A3).
diff --git a/lib/stdlib/src/shell.erl b/lib/stdlib/src/shell.erl
index 679c13f0cf..c6ba574ff4 100644
--- a/lib/stdlib/src/shell.erl
+++ b/lib/stdlib/src/shell.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -314,7 +314,8 @@ prompt(N, Eval0, Bs0, RT, Ds0) ->
case get_prompt_func() of
{M,F} ->
L = [{history,N}],
- C = {call,1,{remote,1,{atom,1,M},{atom,1,F}},[{value,1,L}]},
+ A = erl_anno:new(1),
+ C = {call,A,{remote,A,{atom,A,M},{atom,A,F}},[{value,A,L}]},
{V,Eval,Bs,Ds} = shell_cmd([C], Eval0, Bs0, RT, Ds0, pmt),
{Eval,Bs,Ds,case V of
{pmt,Val} ->
@@ -416,7 +417,7 @@ expand_expr({call,_L,{atom,_,v},[N]}, C) ->
{_,undefined,_} ->
no_command(N);
{Ces,V,CommandN} when is_list(Ces) ->
- {value,CommandN,V}
+ {value,erl_anno:new(CommandN),V}
end;
expand_expr({call,L,F,Args}, C) ->
{call,L,expand_expr(F, C),expand_exprs(Args, C)};
@@ -901,7 +902,7 @@ prep_check({call,Line,{atom,_,f},[{var,_,_Name}]}) ->
{atom,Line,ok};
prep_check({value,_CommandN,_Val}) ->
%% erl_lint cannot handle the history expansion {value,_,_}.
- {atom,0,ok};
+ {atom,a0(),ok};
prep_check(T) when is_tuple(T) ->
list_to_tuple(prep_check(tuple_to_list(T)));
prep_check([E | Es]) ->
@@ -913,7 +914,7 @@ expand_records([], E0) ->
E0;
expand_records(UsedRecords, E0) ->
RecordDefs = [Def || {_Name,Def} <- UsedRecords],
- L = 1,
+ L = erl_anno:new(1),
E = prep_rec(E0),
Forms = RecordDefs ++ [{function,L,foo,0,[{clause,L,[],[],[E]}]}],
[{function,L,foo,0,[{clause,L,[],[],[NE]}]}] =
@@ -1320,13 +1321,15 @@ list_bindings([{Name,Val}|Bs], RT) ->
case erl_eval:fun_data(Val) of
{fun_data,_FBs,FCs0} ->
FCs = expand_value(FCs0), % looks nicer
- F = {'fun',0,{clauses,FCs}},
- M = {match,0,{var,0,Name},F},
+ A = a0(),
+ F = {'fun',A,{clauses,FCs}},
+ M = {match,A,{var,A,Name},F},
io:fwrite(<<"~ts\n">>, [erl_pp:expr(M, enc())]);
{named_fun_data,_FBs,FName,FCs0} ->
FCs = expand_value(FCs0), % looks nicer
- F = {named_fun,0,FName,FCs},
- M = {match,0,{var,0,Name},F},
+ A = a0(),
+ F = {named_fun,A,FName,FCs},
+ M = {match,A,{var,A,Name},F},
io:fwrite(<<"~ts\n">>, [erl_pp:expr(M, enc())]);
false ->
Namel = io_lib:fwrite(<<"~s = ">>, [Name]),
@@ -1356,13 +1359,18 @@ expand_value(E) ->
%% There is no abstract representation of funs.
try_abstract(V, CommandN) ->
try erl_parse:abstract(V)
- catch _:_ -> {call,0,{atom,0,v},[{integer,0,CommandN}]}
+ catch
+ _:_ ->
+ A = a0(),
+ {call,A,{atom,A,v},[{integer,A,CommandN}]}
end.
%% Rather than listing possibly huge results the calls to v/1 are shown.
prep_list_commands(E) ->
- substitute_v1(fun({value,CommandN,_V}) ->
- {call,0,{atom,0,v},[{integer,0,CommandN}]}
+ A = a0(),
+ substitute_v1(fun({value,Anno,_V}) ->
+ CommandN = erl_anno:line(Anno),
+ {call,A,{atom,A,v},[{integer,A,CommandN}]}
end, E).
substitute_v1(F, {value,_,_}=Value) ->
@@ -1374,6 +1382,9 @@ substitute_v1(F, [E | Es]) ->
substitute_v1(_F, E) ->
E.
+a0() ->
+ erl_anno:new(0).
+
check_and_get_history_and_results() ->
check_env(shell_history_length),
check_env(shell_saved_results),
diff --git a/lib/stdlib/src/shell_default.erl b/lib/stdlib/src/shell_default.erl
index 3fe359af0e..0fca7ff8c7 100644
--- a/lib/stdlib/src/shell_default.erl
+++ b/lib/stdlib/src/shell_default.erl
@@ -23,7 +23,7 @@
-module(shell_default).
-export([help/0,lc/1,c/1,c/2,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,
- memory/0,memory/1,
+ memory/0,memory/1,uptime/0,
erlangrc/1,bi/1, regs/0, flush/0,pwd/0,ls/0,ls/1,cd/1,
y/1, y/2,
xm/1, bt/1, q/0,
@@ -92,6 +92,7 @@ pid(X,Y,Z) -> c:pid(X,Y,Z).
pwd() -> c:pwd().
q() -> c:q().
regs() -> c:regs().
+uptime() -> c:uptime().
xm(Mod) -> c:xm(Mod).
y(File) -> c:y(File).
y(File, Opts) -> c:y(File, Opts).
diff --git a/lib/stdlib/src/slave.erl b/lib/stdlib/src/slave.erl
index 1898dc8aba..28da45621a 100644
--- a/lib/stdlib/src/slave.erl
+++ b/lib/stdlib/src/slave.erl
@@ -128,7 +128,7 @@ relay1(Pid) ->
%% {error, {already_running, Name@Host}}
-spec start(Host) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
+ Host :: inet:hostname(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -138,8 +138,8 @@ start(Host) ->
start(Host, Name, [], no_link).
-spec start(Host, Name) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
- Name :: atom(),
+ Host :: inet:hostname(),
+ Name :: atom() | string(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -147,8 +147,8 @@ start(Host, Name) ->
start(Host, Name, []).
-spec start(Host, Name, Args) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
- Name :: atom(),
+ Host :: inet:hostname(),
+ Name :: atom() | string(),
Args :: string(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -157,7 +157,7 @@ start(Host, Name, Args) ->
start(Host, Name, Args, no_link).
-spec start_link(Host) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
+ Host :: inet:hostname(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -167,8 +167,8 @@ start_link(Host) ->
start(Host, Name, [], self()).
-spec start_link(Host, Name) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
- Name :: atom(),
+ Host :: inet:hostname(),
+ Name :: atom() | string(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -176,8 +176,8 @@ start_link(Host, Name) ->
start_link(Host, Name, []).
-spec start_link(Host, Name, Args) -> {ok, Node} | {error, Reason} when
- Host :: atom(),
- Name :: atom(),
+ Host :: inet:hostname(),
+ Name :: atom() | string(),
Args :: string(),
Node :: node(),
Reason :: timeout | no_rsh | {already_running, Node}.
@@ -210,7 +210,6 @@ start(Host0, Name, Args, LinkTo, Prog) ->
Node :: node().
stop(Node) ->
-% io:format("stop(~p)~n", [Node]),
rpc:call(Node, erlang, halt, []),
ok.
@@ -229,7 +228,6 @@ wait_for_slave(Parent, Host, Name, Node, Args, LinkTo, Prog) ->
Waiter = register_unique_name(0),
case mk_cmd(Host, Name, Args, Waiter, Prog) of
{ok, Cmd} ->
-%% io:format("Command: ~ts~n", [Cmd]),
open_port({spawn, Cmd}, [stream]),
receive
{SlavePid, slave_started} ->
diff --git a/lib/stdlib/src/stdlib.app.src b/lib/stdlib/src/stdlib.app.src
index f134c75869..c33130cf8c 100644
--- a/lib/stdlib/src/stdlib.app.src
+++ b/lib/stdlib/src/stdlib.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2011. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -39,6 +39,7 @@
edlin_expand,
epp,
eval_bits,
+ erl_anno,
erl_bits,
erl_compile,
erl_eval,
@@ -83,6 +84,7 @@
qlc,
qlc_pt,
queue,
+ rand,
random,
re,
sets,
@@ -102,7 +104,7 @@
dets]},
{applications, [kernel]},
{env, []},
- {runtime_dependencies, ["sasl-2.4","kernel-3.0.2","erts-6.2","crypto-3.3",
+ {runtime_dependencies, ["sasl-2.4","kernel-4.0","erts-7.0","crypto-3.3",
"compiler-5.0"]}
]}.
diff --git a/lib/stdlib/src/stdlib.appup.src b/lib/stdlib/src/stdlib.appup.src
index 5900fd3ff3..b3569c2848 100644
--- a/lib/stdlib/src/stdlib.appup.src
+++ b/lib/stdlib/src/stdlib.appup.src
@@ -1,7 +1,7 @@
%% -*- erlang -*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -17,9 +17,7 @@
%% %CopyrightEnd%
{"%VSN%",
%% Up from - max one major revision back
- [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3
- {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}], %% 17.0
+ [{<<"2\\.[0-4](\\.[0-9]+)*">>,[restart_new_emulator]}], %% 17.0-17.5
%% Down to - max one major revision back
- [{<<"2\\.[1-2](\\.[0-9]+)*">>,[restart_new_emulator]}, %% 17.1-17.3
- {<<"2\\.0(\\.[0-9]+)*">>,[restart_new_emulator]}] %% 17.0
+ [{<<"2\\.[0-4](\\.[0-9]+)*">>,[restart_new_emulator]}] %% 17.0-17.5
}.
diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl
index f9b083a56d..f6903d1c3d 100644
--- a/lib/stdlib/src/string.erl
+++ b/lib/stdlib/src/string.erl
@@ -221,23 +221,47 @@ substr2([_|String], S) -> substr2(String, S-1).
Tokens :: [Token :: nonempty_string()].
tokens(S, Seps) ->
- tokens1(S, Seps, []).
+ case Seps of
+ [] ->
+ case S of
+ [] -> [];
+ [_|_] -> [S]
+ end;
+ [C] ->
+ tokens_single_1(reverse(S), C, []);
+ [_|_] ->
+ tokens_multiple_1(reverse(S), Seps, [])
+ end.
-tokens1([C|S], Seps, Toks) ->
+tokens_single_1([Sep|S], Sep, Toks) ->
+ tokens_single_1(S, Sep, Toks);
+tokens_single_1([C|S], Sep, Toks) ->
+ tokens_single_2(S, Sep, Toks, [C]);
+tokens_single_1([], _, Toks) ->
+ Toks.
+
+tokens_single_2([Sep|S], Sep, Toks, Tok) ->
+ tokens_single_1(S, Sep, [Tok|Toks]);
+tokens_single_2([C|S], Sep, Toks, Tok) ->
+ tokens_single_2(S, Sep, Toks, [C|Tok]);
+tokens_single_2([], _Sep, Toks, Tok) ->
+ [Tok|Toks].
+
+tokens_multiple_1([C|S], Seps, Toks) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, Toks);
- false -> tokens2(S, Seps, Toks, [C])
+ true -> tokens_multiple_1(S, Seps, Toks);
+ false -> tokens_multiple_2(S, Seps, Toks, [C])
end;
-tokens1([], _Seps, Toks) ->
- reverse(Toks).
+tokens_multiple_1([], _Seps, Toks) ->
+ Toks.
-tokens2([C|S], Seps, Toks, Cs) ->
+tokens_multiple_2([C|S], Seps, Toks, Tok) ->
case member(C, Seps) of
- true -> tokens1(S, Seps, [reverse(Cs)|Toks]);
- false -> tokens2(S, Seps, Toks, [C|Cs])
+ true -> tokens_multiple_1(S, Seps, [Tok|Toks]);
+ false -> tokens_multiple_2(S, Seps, Toks, [C|Tok])
end;
-tokens2([], _Seps, Toks, Cs) ->
- reverse([reverse(Cs)|Toks]).
+tokens_multiple_2([], _Seps, Toks, Tok) ->
+ [Tok|Toks].
-spec chars(Character, Number) -> String when
Character :: char(),
diff --git a/lib/stdlib/src/supervisor.erl b/lib/stdlib/src/supervisor.erl
index 658c00dc77..1d7396adee 100644
--- a/lib/stdlib/src/supervisor.erl
+++ b/lib/stdlib/src/supervisor.erl
@@ -381,7 +381,7 @@ handle_call({start_child, EArgs}, _From, State) when ?is_simple(State) ->
#child{mfargs = {M, F, A}} = Child,
Args = A ++ EArgs,
case do_start_child_i(M, F, Args) of
- {ok, undefined} when Child#child.restart_type =:= temporary ->
+ {ok, undefined} ->
{reply, {ok, undefined}, State};
{ok, Pid} ->
NState = save_dynamic_child(Child#child.restart_type, Pid, Args, State),
@@ -1383,7 +1383,7 @@ add_restart(State) ->
I = State#state.intensity,
P = State#state.period,
R = State#state.restarts,
- Now = erlang:now(),
+ Now = erlang:monotonic_time(1),
R1 = add_restart([Now|R], Now, P),
State1 = State#state{restarts = R1},
case length(R1) of
@@ -1403,26 +1403,8 @@ add_restart([R|Restarts], Now, Period) ->
add_restart([], _, _) ->
[].
-inPeriod(Time, Now, Period) ->
- case difference(Time, Now) of
- T when T > Period ->
- false;
- _ ->
- true
- end.
-
-%%
-%% Time = {MegaSecs, Secs, MicroSecs} (NOTE: MicroSecs is ignored)
-%% Calculate the time elapsed in seconds between two timestamps.
-%% If MegaSecs is equal just subtract Secs.
-%% Else calculate the Mega difference and add the Secs difference,
-%% note that Secs difference can be negative, e.g.
-%% {827, 999999, 676} diff {828, 1, 653753} == > 2 secs.
-%%
-difference({TimeM, TimeS, _}, {CurM, CurS, _}) when CurM > TimeM ->
- ((CurM - TimeM) * 1000000) + (CurS - TimeS);
-difference({_, TimeS, _}, {_, CurS, _}) ->
- CurS - TimeS.
+inPeriod(Then, Now, Period) ->
+ Now =< Then + Period.
%%% ------------------------------------------------------
%%% Error and progress reporting.
diff --git a/lib/stdlib/src/timer.erl b/lib/stdlib/src/timer.erl
index 72a2dd9616..c266177b4d 100644
--- a/lib/stdlib/src/timer.erl
+++ b/lib/stdlib/src/timer.erl
@@ -161,10 +161,11 @@ sleep(T) ->
Time :: integer(),
Value :: term().
tc(F) ->
- Before = os:timestamp(),
+ T1 = erlang:monotonic_time(),
Val = F(),
- After = os:timestamp(),
- {now_diff(After, Before), Val}.
+ T2 = erlang:monotonic_time(),
+ Time = erlang:convert_time_unit(T2 - T1, native, micro_seconds),
+ {Time, Val}.
%%
%% Measure the execution time (in microseconds) for Fun(Args).
@@ -175,10 +176,11 @@ tc(F) ->
Time :: integer(),
Value :: term().
tc(F, A) ->
- Before = os:timestamp(),
+ T1 = erlang:monotonic_time(),
Val = apply(F, A),
- After = os:timestamp(),
- {now_diff(After, Before), Val}.
+ T2 = erlang:monotonic_time(),
+ Time = erlang:convert_time_unit(T2 - T1, native, micro_seconds),
+ {Time, Val}.
%%
%% Measure the execution time (in microseconds) for an MFA.
@@ -190,10 +192,11 @@ tc(F, A) ->
Time :: integer(),
Value :: term().
tc(M, F, A) ->
- Before = os:timestamp(),
+ T1 = erlang:monotonic_time(),
Val = apply(M, F, A),
- After = os:timestamp(),
- {now_diff(After, Before), Val}.
+ T2 = erlang:monotonic_time(),
+ Time = erlang:convert_time_unit(T2 - T1, native, micro_seconds),
+ {Time, Val}.
%%
%% Calculate the time difference (in microseconds) of two
@@ -437,10 +440,8 @@ positive(X) ->
%%
%% system_time() -> time in microseconds
%%
-system_time() ->
- {M,S,U} = erlang:now(),
- 1000000 * (M*1000000 + S) + U.
-
+system_time() ->
+ erlang:monotonic_time(1000000).
send([Pid, Msg]) ->
Pid ! Msg.
diff --git a/lib/stdlib/src/win32reg.erl b/lib/stdlib/src/win32reg.erl
index 48a7e262be..38c41a5f6e 100644
--- a/lib/stdlib/src/win32reg.erl
+++ b/lib/stdlib/src/win32reg.erl
@@ -218,12 +218,7 @@ expand([C|Rest], [], Result) ->
expand(Rest, [], [C|Result]);
expand([$%|Rest], Env0, Result) ->
Env = lists:reverse(Env0),
- case os:getenv(Env) of
- false ->
- expand(Rest, [], Result);
- Value ->
- expand(Rest, [], lists:reverse(Value)++Result)
- end;
+ expand(Rest, [], lists:reverse(os:getenv(Env, ""))++Result);
expand([C|Rest], Env, Result) ->
expand(Rest, [C|Env], Result);
expand([], [], Result) ->
diff --git a/lib/stdlib/src/zip.erl b/lib/stdlib/src/zip.erl
index b768c6d0b9..f986c0081d 100644
--- a/lib/stdlib/src/zip.erl
+++ b/lib/stdlib/src/zip.erl
@@ -24,7 +24,7 @@
list_dir/1, list_dir/2, table/1, table/2,
t/1, tt/1]).
-%% unzipping peicemeal
+%% unzipping piecemeal
-export([openzip_open/1, openzip_open/2,
openzip_get/1, openzip_get/2,
openzip_t/1, openzip_tt/1,
@@ -214,7 +214,9 @@
-type zip_comment() :: #zip_comment{}.
-type zip_file() :: #zip_file{}.
--export_type([create_option/0, filename/0]).
+-opaque handle() :: pid().
+
+-export_type([create_option/0, filename/0, handle/0]).
%% Open a zip archive with options
%%
@@ -500,7 +502,7 @@ do_list_dir(F, Options) ->
-spec(t(Archive) -> ok when
Archive :: file:name() | binary() | ZipHandle,
- ZipHandle :: pid()).
+ ZipHandle :: handle()).
t(F) when is_pid(F) -> zip_t(F);
t(F) when is_record(F, openzip) -> openzip_t(F);
@@ -524,7 +526,7 @@ do_t(F, RawPrint) ->
-spec(tt(Archive) -> ok when
Archive :: file:name() | binary() | ZipHandle,
- ZipHandle :: pid()).
+ ZipHandle :: handle()).
tt(F) when is_pid(F) -> zip_tt(F);
tt(F) when is_record(F, openzip) -> openzip_tt(F);
@@ -1114,15 +1116,19 @@ local_file_header_from_info_method_name(#file_info{mtime = MTime},
file_name_length = length(Name),
extra_field_length = 0}.
+server_init(Parent) ->
+ %% we want to know if our parent dies
+ process_flag(trap_exit, true),
+ server_loop(Parent, not_open).
%% small, simple, stupid zip-archive server
-server_loop(OpenZip) ->
+server_loop(Parent, OpenZip) ->
receive
{From, {open, Archive, Options}} ->
case openzip_open(Archive, Options) of
{ok, NewOpenZip} ->
From ! {self(), {ok, self()}},
- server_loop(NewOpenZip);
+ server_loop(Parent, NewOpenZip);
Error ->
From ! {self(), Error}
end;
@@ -1130,43 +1136,47 @@ server_loop(OpenZip) ->
From ! {self(), openzip_close(OpenZip)};
{From, get} ->
From ! {self(), openzip_get(OpenZip)},
- server_loop(OpenZip);
+ server_loop(Parent, OpenZip);
{From, {get, FileName}} ->
From ! {self(), openzip_get(FileName, OpenZip)},
- server_loop(OpenZip);
+ server_loop(Parent, OpenZip);
{From, list_dir} ->
From ! {self(), openzip_list_dir(OpenZip)},
- server_loop(OpenZip);
+ server_loop(Parent, OpenZip);
{From, {list_dir, Opts}} ->
From ! {self(), openzip_list_dir(OpenZip, Opts)},
- server_loop(OpenZip);
+ server_loop(Parent, OpenZip);
{From, get_state} ->
From ! {self(), OpenZip},
- server_loop(OpenZip);
+ server_loop(Parent, OpenZip);
+ {'EXIT', Parent, Reason} ->
+ _ = openzip_close(OpenZip),
+ exit({parent_died, Reason});
_ ->
{error, bad_msg}
end.
-spec(zip_open(Archive) -> {ok, ZipHandle} | {error, Reason} when
Archive :: file:name() | binary(),
- ZipHandle :: pid(),
+ ZipHandle :: handle(),
Reason :: term()).
zip_open(Archive) -> zip_open(Archive, []).
-spec(zip_open(Archive, Options) -> {ok, ZipHandle} | {error, Reason} when
Archive :: file:name() | binary(),
- ZipHandle :: pid(),
+ ZipHandle :: handle(),
Options :: [Option],
Option :: cooked | memory | {cwd, CWD :: file:filename()},
Reason :: term()).
zip_open(Archive, Options) ->
- Pid = spawn(fun() -> server_loop(not_open) end),
- request(self(), Pid, {open, Archive, Options}).
+ Self = self(),
+ Pid = spawn_link(fun() -> server_init(Self) end),
+ request(Self, Pid, {open, Archive, Options}).
-spec(zip_get(ZipHandle) -> {ok, [Result]} | {error, Reason} when
- ZipHandle :: pid(),
+ ZipHandle :: handle(),
Result :: file:name() | {file:name(), binary()},
Reason :: term()).
@@ -1174,14 +1184,14 @@ zip_get(Pid) when is_pid(Pid) ->
request(self(), Pid, get).
-spec(zip_close(ZipHandle) -> ok | {error, einval} when
- ZipHandle :: pid()).
+ ZipHandle :: handle()).
zip_close(Pid) when is_pid(Pid) ->
request(self(), Pid, close).
-spec(zip_get(FileName, ZipHandle) -> {ok, Result} | {error, Reason} when
FileName :: file:name(),
- ZipHandle :: pid(),
+ ZipHandle :: handle(),
Result :: file:name() | {file:name(), binary()},
Reason :: term()).
@@ -1190,7 +1200,7 @@ zip_get(FileName, Pid) when is_pid(Pid) ->
-spec(zip_list_dir(ZipHandle) -> {ok, Result} | {error, Reason} when
Result :: [zip_comment() | zip_file()],
- ZipHandle :: pid(),
+ ZipHandle :: handle(),
Reason :: term()).
zip_list_dir(Pid) when is_pid(Pid) ->
diff --git a/lib/stdlib/test/Makefile b/lib/stdlib/test/Makefile
index a271229c59..61eb34d565 100644
--- a/lib/stdlib/test/Makefile
+++ b/lib/stdlib/test/Makefile
@@ -23,6 +23,7 @@ MODULES= \
dummy_via \
edlin_expand_SUITE \
epp_SUITE \
+ erl_anno_SUITE \
erl_eval_SUITE \
erl_expand_records_SUITE \
erl_internal_SUITE \
@@ -53,6 +54,7 @@ MODULES= \
proc_lib_SUITE \
qlc_SUITE \
queue_SUITE \
+ rand_SUITE \
random_SUITE \
re_SUITE \
run_pcre_tests \
diff --git a/lib/stdlib/test/binary_module_SUITE.erl b/lib/stdlib/test/binary_module_SUITE.erl
index f828c70b63..8d26c77c9b 100644
--- a/lib/stdlib/test/binary_module_SUITE.erl
+++ b/lib/stdlib/test/binary_module_SUITE.erl
@@ -993,43 +993,51 @@ random_parts(X,N) ->
random_ref_comp(doc) ->
["Test pseudorandomly generated cases against reference imlementation"];
random_ref_comp(Config) when is_list(Config) ->
- ?line put(success_counter,0),
- ?line random:seed({1271,769940,559934}),
- ?line do_random_match_comp(5000,{1,40},{30,1000}),
+ put(success_counter,0),
+ random:seed({1271,769940,559934}),
+ Nr = {1,40},
+ Hr = {30,1000},
+ I1 = 1500,
+ I2 = 5,
+ do_random_match_comp(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_match_comp2(5000,{1,40},{30,1000}),
+ do_random_match_comp2(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_match_comp3(5000,{1,40},{30,1000}),
+ do_random_match_comp3(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_match_comp4(5000,{1,40},{30,1000}),
+ do_random_match_comp4(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_matches_comp(5000,{1,40},{30,1000}),
+ do_random_matches_comp(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_matches_comp2(5000,{1,40},{30,1000}),
+ do_random_matches_comp2(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_matches_comp3(5,{1,40},{30,1000}),
- ?line erts_debug:set_internal_state(available_internal_state,true),
- ?line io:format("oldlimit: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,100)]),
- ?line do_random_match_comp(5000,{1,40},{30,1000}),
- ?line do_random_matches_comp3(5,{1,40},{30,1000}),
- ?line io:format("limit was: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,default)]),
- ?line erts_debug:set_internal_state(available_internal_state,false),
+ do_random_matches_comp3(I2,Nr,Hr),
+ erts_debug:set_internal_state(available_internal_state,true),
+ io:format("oldlimit: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,100)]),
+ do_random_match_comp(I1,Nr,Hr),
+ do_random_matches_comp3(I2,Nr,Hr),
+ io:format("limit was: ~p~n",[ erts_debug:set_internal_state(binary_loop_limit,default)]),
+ erts_debug:set_internal_state(available_internal_state,false),
ok.
random_ref_sr_comp(doc) ->
["Test pseudorandomly generated cases against reference imlementation of split and replace"];
random_ref_sr_comp(Config) when is_list(Config) ->
- ?line put(success_counter,0),
- ?line random:seed({1271,769940,559934}),
- ?line do_random_split_comp(5000,{1,40},{30,1000}),
+ put(success_counter,0),
+ random:seed({1271,769940,559934}),
+ Nr = {1,40},
+ Hr = {30,1000},
+ I1 = 1500,
+ do_random_split_comp(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_replace_comp(5000,{1,40},{30,1000}),
+ do_random_replace_comp(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_split_comp2(5000,{1,40},{30,1000}),
+ do_random_split_comp2(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
- ?line do_random_replace_comp2(5000,{1,40},{30,1000}),
+ do_random_replace_comp2(I1,Nr,Hr),
io:format("Number of successes: ~p~n",[get(success_counter)]),
ok.
+
random_ref_fla_comp(doc) ->
["Test pseudorandomly generated cases against reference imlementation of split and replace"];
random_ref_fla_comp(Config) when is_list(Config) ->
@@ -1130,7 +1138,9 @@ do_random_matches_comp3(N,NeedleRange,HaystackRange) ->
Needles = [random_substring(NeedleRange,Haystack) ||
_ <- lists:duplicate(NumNeedles,a)],
RefRes = binref:matches(Haystack,Needles),
- true = do_matches_comp_loop(10000,Needles,Haystack, RefRes),
+ RefRes = binary:matches(Haystack,Needles),
+ Compiled = binary:compile_pattern(Needles),
+ true = do_matches_comp_loop(10000,Compiled,Haystack, RefRes),
do_random_matches_comp3(N-1,NeedleRange,HaystackRange).
do_matches_comp_loop(0,_,_,_) ->
@@ -1160,9 +1170,8 @@ do_matches_comp2(N,H,A) ->
end.
do_matches_comp(N,H) ->
A = ?MASK_ERROR(binref:matches(H,N)),
- B = ?MASK_ERROR(binref:matches(H,binref:compile_pattern(N))),
- C = ?MASK_ERROR(binary:matches(H,N)),
- D = ?MASK_ERROR(binary:matches(make_unaligned(H),
+ B = ?MASK_ERROR(binary:matches(H,N)),
+ C = ?MASK_ERROR(binary:matches(make_unaligned(H),
binary:compile_pattern([make_unaligned2(X) || X <- N]))),
if
A =/= nomatch ->
@@ -1170,14 +1179,14 @@ do_matches_comp(N,H) ->
true ->
ok
end,
- case {(A =:= B), (B =:= C),(C =:= D)} of
- {true,true,true} ->
+ case {(A =:= B), (B =:= C)} of
+ {true,true} ->
true;
_ ->
io:format("Failed to match ~p (needle) against ~s (haystack)~n",
[N,H]),
- io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p.~n",
- [A,B,C,D]),
+ io:format("A:~p,~nB:~p,~n,C:~p,~n",
+ [A,B,C]),
exit(mismatch)
end.
@@ -1219,46 +1228,44 @@ do_random_match_comp4(N,NeedleRange,HaystackRange) ->
do_match_comp(N,H) ->
A = ?MASK_ERROR(binref:match(H,N)),
- B = ?MASK_ERROR(binref:match(H,binref:compile_pattern([N]))),
- C = ?MASK_ERROR(binary:match(make_unaligned(H),N)),
- D = ?MASK_ERROR(binary:match(H,binary:compile_pattern([N]))),
- E = ?MASK_ERROR(binary:match(H,binary:compile_pattern(make_unaligned(N)))),
+ B = ?MASK_ERROR(binary:match(make_unaligned(H),N)),
+ C = ?MASK_ERROR(binary:match(H,binary:compile_pattern([N]))),
+ D = ?MASK_ERROR(binary:match(H,binary:compile_pattern(make_unaligned(N)))),
if
A =/= nomatch ->
put(success_counter,get(success_counter)+1);
true ->
ok
end,
- case {(A =:= B), (B =:= C),(C =:= D),(D =:= E)} of
- {true,true,true,true} ->
+ case {(A =:= B), (B =:= C),(C =:= D)} of
+ {true,true,true} ->
true;
_ ->
io:format("Failed to match ~s (needle) against ~s (haystack)~n",
[N,H]),
- io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p,E:~p.~n",
- [A,B,C,D,E]),
+ io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p.~n",
+ [A,B,C,D]),
exit(mismatch)
end.
do_match_comp3(N,H) ->
A = ?MASK_ERROR(binref:match(H,N)),
- B = ?MASK_ERROR(binref:match(H,binref:compile_pattern(N))),
- C = ?MASK_ERROR(binary:match(H,N)),
- D = ?MASK_ERROR(binary:match(H,binary:compile_pattern(N))),
+ B = ?MASK_ERROR(binary:match(H,N)),
+ C = ?MASK_ERROR(binary:match(H,binary:compile_pattern(N))),
if
A =/= nomatch ->
put(success_counter,get(success_counter)+1);
true ->
ok
end,
- case {(A =:= B), (B =:= C),(C =:= D)} of
- {true,true,true} ->
+ case {(A =:= B),(B =:= C)} of
+ {true,true} ->
true;
_ ->
io:format("Failed to match ~s (needle) against ~s (haystack)~n",
[N,H]),
- io:format("A:~p,~nB:~p,~n,C:~p,~n,D:~p.~n",
- [A,B,C,D]),
+ io:format("A:~p,~nB:~p,~n,C:~p.~n",
+ [A,B,C]),
exit(mismatch)
end.
diff --git a/lib/stdlib/test/dict_SUITE.erl b/lib/stdlib/test/dict_SUITE.erl
index 69814e12ce..ab624e8dd2 100644
--- a/lib/stdlib/test/dict_SUITE.erl
+++ b/lib/stdlib/test/dict_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -25,16 +25,16 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
- create/1,store/1]).
+ create/1,store/1,iterate/1]).
-include_lib("test_server/include/test_server.hrl").
--import(lists, [foldl/3,reverse/1]).
+-import(lists, [foldl/3]).
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [create, store].
+ [create, store, iterate].
groups() ->
[].
@@ -93,6 +93,48 @@ store_1(List, M) ->
D0.
%%%
+%%% Test specifics for gb_trees.
+%%%
+
+iterate(Config) when is_list(Config) ->
+ test_all(fun iterate_1/1).
+
+iterate_1(M) ->
+ case M(module, []) of
+ gb_trees -> iterate_2(M);
+ _ -> ok
+ end,
+ M(empty, []).
+
+iterate_2(M) ->
+ random:seed(1, 2, 42),
+ iter_tree(M, 1000).
+
+iter_tree(_M, 0) ->
+ ok;
+iter_tree(M, N) ->
+ L = [{I, I} || I <- lists:seq(1, N)],
+ T = M(from_list, L),
+ L = lists:reverse(iterate_tree(M, T)),
+ R = random:uniform(N),
+ KV = lists:reverse(iterate_tree_from(M, R, T)),
+ KV = [P || P={K,_} <- L, K >= R],
+ iter_tree(M, N-1).
+
+iterate_tree(M, Tree) ->
+ I = M(iterator, Tree),
+ iterate_tree_1(M, M(next, I), []).
+
+iterate_tree_from(M, Start, Tree) ->
+ I = M(iterator_from, {Start, Tree}),
+ iterate_tree_1(M, M(next, I), []).
+
+iterate_tree_1(_, none, R) ->
+ R;
+iterate_tree_1(M, {K, V, I}, R) ->
+ iterate_tree_1(M, M(next, I), [{K, V} | R]).
+
+%%%
%%% Helper functions.
%%%
diff --git a/lib/stdlib/test/dict_test_lib.erl b/lib/stdlib/test/dict_test_lib.erl
index 4fdb4fa0bd..81d26ce5f8 100644
--- a/lib/stdlib/test/dict_test_lib.erl
+++ b/lib/stdlib/test/dict_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -29,6 +29,9 @@ new(Mod, Eq) ->
(module, []) -> Mod;
(size, D) -> Mod:size(D);
(is_empty, D) -> Mod:is_empty(D);
+ (iterator, S) -> Mod:iterator(S);
+ (iterator_from, {Start, S}) -> Mod:iterator_from(Start, S);
+ (next, I) -> Mod:next(I);
(to_list, D) -> to_list(Mod, D)
end.
diff --git a/lib/stdlib/test/epp_SUITE.erl b/lib/stdlib/test/epp_SUITE.erl
index b17e8bd186..9ab170c826 100644
--- a/lib/stdlib/test/epp_SUITE.erl
+++ b/lib/stdlib/test/epp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -211,7 +211,7 @@ predef_mac(Config) when is_list(Config) ->
?line File = filename:join(?config(data_dir, Config), "mac3.erl"),
?line {ok, List} = epp:parse_file(File, [], []),
?line [_,
- {attribute, LineCol1, l, Line1},
+ {attribute, Anno, l, Line1},
{attribute, _, f, File},
{attribute, _, machine1, _},
{attribute, _, module, mac3},
@@ -219,13 +219,9 @@ predef_mac(Config) when is_list(Config) ->
{attribute, _, ms, "mac3"},
{attribute, _, machine2, _}
| _] = List,
- ?line case LineCol1 of
- Line1 -> ok;
- {Line1,_} -> ok
- end,
+ Line1 = erl_anno:line(Anno),
ok.
-
variable_1(doc) ->
[];
variable_1(suite) ->
@@ -553,11 +549,7 @@ otp_7702(Config) when is_list(Config) ->
{ok, AC} = beam_lib:chunks(BeamFile, [abstract_code]),
{file_7702,[{abstract_code,{_,Forms}}]} = AC,
- Fun = fun(Attrs) ->
- {line, L} = erl_parse:get_attribute(Attrs, line),
- L
- end,
- Forms2 = [erl_lint:modify_line(Form, Fun) || Form <- Forms],
+ Forms2 = unopaque_forms(Forms),
?line
[{attribute,1,file,_},
_,
@@ -1395,9 +1387,10 @@ otp_10820(Config) when is_list(Config) ->
do_otp_10820(File, C, PC) ->
{ok,Node} = start_node(erl_pp_helper, "+fnu " ++ PC),
ok = rpc:call(Node, file, write_file, [File, C]),
- {ok,[{attribute,1,file,{File,1}},
- {attribute,2,module,any},
- {eof,2}]} = rpc:call(Node, epp, parse_file, [File, [],[]]),
+ {ok, Forms} = rpc:call(Node, epp, parse_file, [File, [],[]]),
+ [{attribute,1,file,{File,1}},
+ {attribute,2,module,any},
+ {eof,2}] = unopaque_forms(Forms),
true = test_server:stop_node(Node),
ok.
@@ -1440,15 +1433,15 @@ encoding(Config) when is_list(Config) ->
{attribute,1,module,encoding},
{error,_},
{error,{2,epp,cannot_parse}},
- {eof,2}]} = epp:parse_file(ErlFile, []),
+ {eof,2}]} = epp_parse_file(ErlFile, []),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,3}]} =
- epp:parse_file(ErlFile, [{default_encoding,latin1}]),
+ epp_parse_file(ErlFile, [{default_encoding,latin1}]),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,3}],[{encoding,none}]} =
- epp:parse_file(ErlFile, [{default_encoding,latin1},extra]),
+ epp_parse_file(ErlFile, [{default_encoding,latin1},extra]),
%% Try a latin-1 file with encoding given in a comment.
C2 = <<"-module(encoding).
@@ -1459,27 +1452,27 @@ encoding(Config) when is_list(Config) ->
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}]} =
- epp:parse_file(ErlFile, []),
+ epp_parse_file(ErlFile, []),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}]} =
- epp:parse_file(ErlFile, [{default_encoding,latin1}]),
+ epp_parse_file(ErlFile, [{default_encoding,latin1}]),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}]} =
- epp:parse_file(ErlFile, [{default_encoding,utf8}]),
+ epp_parse_file(ErlFile, [{default_encoding,utf8}]),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}],[{encoding,latin1}]} =
- epp:parse_file(ErlFile, [extra]),
+ epp_parse_file(ErlFile, [extra]),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}],[{encoding,latin1}]} =
- epp:parse_file(ErlFile, [{default_encoding,latin1},extra]),
+ epp_parse_file(ErlFile, [{default_encoding,latin1},extra]),
{ok,[{attribute,1,file,_},
{attribute,1,module,encoding},
{eof,4}],[{encoding,latin1}]} =
- epp:parse_file(ErlFile, [{default_encoding,utf8},extra]),
+ epp_parse_file(ErlFile, [{default_encoding,utf8},extra]),
ok.
@@ -1552,6 +1545,17 @@ errs([_|L], File) ->
errs([], _File) ->
[].
+epp_parse_file(File, Opts) ->
+ case epp:parse_file(File, Opts) of
+ {ok, Forms} ->
+ {ok, unopaque_forms(Forms)};
+ {ok, Forms, Other} ->
+ {ok, unopaque_forms(Forms), Other}
+ end.
+
+unopaque_forms(Forms) ->
+ [erl_parse:anno_to_term(Form) || Form <- Forms].
+
run_test(Config, Test0) ->
Test = [<<"-module(epp_test). -compile(export_all). ">>, Test0],
Filename = "epp_test.erl",
diff --git a/lib/stdlib/test/erl_anno_SUITE.erl b/lib/stdlib/test/erl_anno_SUITE.erl
new file mode 100644
index 0000000000..d024f6907d
--- /dev/null
+++ b/lib/stdlib/test/erl_anno_SUITE.erl
@@ -0,0 +1,568 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(erl_anno_SUITE).
+
+%-define(debug, true).
+
+-ifdef(debug).
+-include_lib("test_server/include/test_server.hrl").
+-define(format(S, A), io:format(S, A)).
+-else.
+-include_lib("test_server/include/test_server.hrl").
+-define(format(S, A), ok).
+-endif.
+
+-export([all/0, suite/0, groups/0, init_per_suite/1, end_per_suite/1,
+ init_per_group/2, end_per_group/2,
+ init_per_testcase/2, end_per_testcase/2]).
+
+-export([new/1, is_anno/1, generated/1, end_location/1, file/1,
+ line/1, location/1, record/1, text/1, bad/1, neg_line/1]).
+
+-export([parse_abstract/1, mapfold_anno/1]).
+
+all() ->
+ [{group, anno}, {group, parse}].
+
+groups() ->
+ [{anno, [], [new, is_anno, generated, end_location, file,
+ line, location, record, text, bad, neg_line]},
+ {parse, [], [parse_abstract, mapfold_anno]}].
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+init_per_testcase(_Case, Config) ->
+ Dog=?t:timetrap(?t:minutes(1)),
+ [{watchdog, Dog}|Config].
+
+end_per_testcase(_Case, _Config) ->
+ Dog=?config(watchdog, _Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+-define(INFO(T, V), {T, V}).
+
+-dialyzer({no_fail_call, new/1}).
+new(doc) ->
+ ["Test erl_anno:new/1"];
+new(_Config) ->
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:new([{location,1},{text, "text"}])), % badarg
+ ok.
+
+is_anno(doc) ->
+ ["Test erl_anno:is_anno/1"];
+is_anno(_Config) ->
+ false = erl_anno:is_anno(a),
+ false = erl_anno:is_anno({a}),
+ false = erl_anno:is_anno([]),
+ false = erl_anno:is_anno([{location, 1}|{generated, true}]),
+ false = erl_anno:is_anno([{generated,false}]),
+ false = erl_anno:is_anno([{generated,true}]),
+ false = erl_anno:is_anno([{location,1},{file,nofile}]),
+ false = erl_anno:is_anno([{location,1},{text,notext}]),
+
+ true = erl_anno:is_anno(erl_anno:new(1)),
+ A0 = erl_anno:new({1, 17}),
+ true = erl_anno:is_anno(A0),
+ A1 = erl_anno:set_generated(true, A0),
+ true = erl_anno:is_anno(A1),
+ A2 = erl_anno:set_file("", A1),
+ true = erl_anno:is_anno(A2),
+ A3 = erl_anno:set_record(true, A2),
+ true = erl_anno:is_anno(A3),
+ A4 = erl_anno:set_text("text", A3),
+ true = erl_anno:is_anno(A4),
+ A5 = erl_anno:set_file(<<"filename">>, A4),
+ true = erl_anno:is_anno(A5),
+ ok.
+
+generated(doc) ->
+ ["Test 'generated'"];
+generated(_Config) ->
+ test(1, [{generated, true}, {generated, false}]),
+ test(1, [{generated, false}, {generated, true}, {generated, false}]),
+ test({1, 17}, [{generated, false},
+ {generated, true},
+ {generated, false}]),
+ test({1, 17}, [{text, "text", [{end_location, {1, 21}}, {length, 4}]},
+ {generated, false},
+ {generated, true},
+ {generated, false}]),
+ test(1, [{generated, false},
+ {generated, true},
+ {generated, false}]),
+ test(1, [{text, "text", [{end_location, 1}, {length, 4}]},
+ {generated, false},
+ {generated, true},
+ {generated, false}]),
+ ok.
+
+end_location(doc) ->
+ ["Test 'end_location'"];
+end_location(_Config) ->
+ test({1, 17}, [{text, "TEXT", [{end_location, {1, 21}}, {length, 4}]},
+ {text, "TEXT\n", [{end_location, {2, 1}}, {length, 5}]},
+ {text, "TEXT\ntxt", [{end_location, {2, 4}}, {length, 8}]}]),
+ test(1, [{text, "TEXT", [{end_location, 1}, {length, 4}]},
+ {text, "TEXT\n", [{end_location, 2}, {length, 5}]},
+ {text, "TEXT\ntxt", [{end_location, 2}, {length, 8}]}]),
+ ok.
+
+file(doc) ->
+ ["Test 'file'"];
+file(_Config) ->
+ test(1, [{file, "name"}, {file, ""}]),
+ test({1, 17}, [{file, "name"}, {file, ""}]),
+ ok.
+
+line(doc) ->
+ ["Test 'line'"];
+line(_Config) ->
+ test(1, [{line, 17, [{location, 17}]},
+ {location, {9, 8}, [{line, 9}, {column, 8}]},
+ {line, 14, [{location, {14, 8}}]}]),
+ ok.
+
+location(doc) ->
+ ["Test 'location'"];
+location(_Config) ->
+ test(1, [{location, 2, [{line,2}]},
+ {location, {1, 17}, [{line, 1}, {column, 17}]},
+ {location, {9, 6}, [{line, 9}, {column, 6}]},
+ {location, 9, [{column, undefined}]}]),
+ test(1, [{generated, true},
+ {location, 2, [{line,2}]},
+ {location, {1, 17}, [{line, 1}, {column, 17}]},
+ {location, {9, 6}, [{line, 9}, {column, 6}]},
+ {location, 9, [{column, undefined}]}]),
+ test(1, [{record, true},
+ {location, 2, [{line,2}]},
+ {location, {1, 17}, [{line, 1}, {column, 17}]},
+ {location, {9, 6}, [{line, 9}, {column, 6}]},
+ {location, 9, [{column, undefined}]}]),
+ ok.
+
+record(doc) ->
+ ["Test 'record'"];
+record(_Config) ->
+ test({1, 17}, [{record, true}, {record, false}]),
+ test(1, [{record, true}, {record, false}]),
+ test({1, 17}, [{generated, false},
+ {generated, true},
+ {generated, false}]),
+ test({1, 17}, [{text, "text", [{end_location, {1, 21}}, {length, 4}]},
+ {generated, false},
+ {generated, true},
+ {generated, false}]),
+ test(1, [{generated, false},
+ {generated, true},
+ {generated, false}]),
+ test(1, [{text, "text", [{end_location, 1}, {length, 4}]},
+ {generated, false},
+ {generated, true},
+ {generated, false}]),
+ ok.
+
+text(doc) ->
+ ["Test 'text'"];
+text(_Config) ->
+ test(1, [{text, "text", [{end_location, 1}, {length, 4}]},
+ {text, "", [{end_location, 1}, {length, 0}]}]),
+ test({1, 17}, [{text, "text", [{end_location, {1,21}}, {length, 4}]},
+ {text, "", [{end_location, {1,17}}, {length, 0}]}]),
+ ok.
+
+-dialyzer({[no_opaque, no_fail_call], bad/1}).
+bad(doc) ->
+ ["Test bad annotations"];
+bad(_Config) ->
+ Line = erl_anno:new(1),
+ LineColumn = erl_anno:new({1, 17}),
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:set_generated(true, bad)), % 3rd arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:set_generated(false, bad)), % 3rd arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:set_generated(19, Line)),
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:set_generated(19, LineColumn)),
+
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:generated(bad)), % 1st arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:end_location(bad)), % 1st arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:file(bad)), % 1st arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:text(bad)), % 1st arg not opaque
+ {'EXIT', {badarg, _}} =
+ (catch erl_anno:record(bad)), % 1st arg not opaque
+ ok.
+
+neg_line(doc) ->
+ ["Test negative line numbers (OTP 18)"];
+neg_line(_Config) ->
+ neg_line1(false),
+ neg_line1(true),
+ ok.
+
+neg_line1(TextToo) ->
+ Minus8_0 = erl_anno:new(-8),
+ Plus8_0 = erl_anno:new(8),
+ Minus8C_0 = erl_anno:new({-8, 17}),
+ Plus8C_0 = erl_anno:new({8, 17}),
+
+ [Minus8, Plus8, Minus8C, Plus8C] =
+ [case TextToo of
+ true ->
+ erl_anno:set_text("foo", A);
+ false ->
+ A
+ end || A <- [Minus8_0, Plus8_0, Minus8C_0, Plus8C_0]],
+
+ tst(-3, erl_anno:set_location(3, Minus8)),
+ tst(-3, erl_anno:set_location(-3, Plus8)),
+ tst(-3, erl_anno:set_location(-3, Minus8)),
+ tst({-3,9}, erl_anno:set_location({3, 9}, Minus8)),
+ tst({-3,9}, erl_anno:set_location({-3, 9}, Plus8)),
+ tst({-3,9}, erl_anno:set_location({-3, 9}, Minus8)),
+ tst(-3, erl_anno:set_location(3, Minus8C)),
+ tst(-3, erl_anno:set_location(-3, Plus8C)),
+ tst(-3, erl_anno:set_location(-3, Minus8C)),
+ tst({-3,9}, erl_anno:set_location({3, 9}, Minus8C)),
+ tst({-3,9}, erl_anno:set_location({-3, 9}, Plus8C)),
+ tst({-3,9}, erl_anno:set_location({-3, 9}, Minus8C)),
+
+ tst(-8, erl_anno:set_generated(true, Plus8)),
+ tst(-8, erl_anno:set_generated(true, Minus8)),
+ tst({-8,17}, erl_anno:set_generated(true, Plus8C)),
+ tst({-8,17}, erl_anno:set_generated(true, Minus8C)),
+ tst(8, erl_anno:set_generated(false, Plus8)),
+ tst(8, erl_anno:set_generated(false, Minus8)),
+ tst({8,17}, erl_anno:set_generated(false, Plus8C)),
+ tst({8,17}, erl_anno:set_generated(false, Minus8C)),
+
+ tst(-3, erl_anno:set_line(3, Minus8)),
+ tst(-3, erl_anno:set_line(-3, Plus8)),
+ tst(-3, erl_anno:set_line(-3, Minus8)),
+ tst({-3,17}, erl_anno:set_line(3, Minus8C)),
+ tst({-3,17}, erl_anno:set_line(-3, Plus8C)),
+ tst({-3,17}, erl_anno:set_line(-3, Minus8C)),
+ ok.
+
+tst(Term, Anno) ->
+ ?format("Term: ~p\n", [Term]),
+ ?format("Anno: ~p\n", [Anno]),
+ case anno_to_term(Anno) of
+ Term ->
+ ok;
+ Else ->
+ case lists:keyfind(location, 1, Else) of
+ {location, Term} ->
+ ok;
+ _Else2 ->
+ ?format("Else2 ~p\n", [_Else2]),
+ io:format("expected ~p\n got ~p\n", [Term, Else]),
+ exit({Term, Else})
+ end
+ end.
+
+parse_abstract(doc) ->
+ ["Test erl_parse:new_anno/1, erl_parse:anno_to_term/1"
+ ", and erl_parse:anno_from_term/1"];
+parse_abstract(_Config) ->
+ T = sample_term(),
+ A = erl_parse:abstract(T, [{line,17}]),
+ T1 = erl_parse:anno_to_term(A),
+ Abstr = erl_parse:new_anno(T1),
+ T = erl_parse:normalise(Abstr),
+ Abstr2 = erl_parse:anno_from_term(T1),
+ T = erl_parse:normalise(Abstr2),
+ ok.
+
+mapfold_anno(doc) ->
+ ["Test erl_parse:{map_anno/2,fold_anno/3, and mapfold_anno/3}"];
+mapfold_anno(_Config) ->
+ T = sample_term(),
+ Abstr = erl_parse:abstract(T),
+ CF = fun(Anno, {L, D}) ->
+ {erl_anno:new(L), {L+1, dict:store(L, Anno, D)}}
+ end,
+ {U, {N, D}} = erl_parse:mapfold_anno(CF, {1, dict:new()}, Abstr),
+ SeqA = erl_parse:fold_anno(fun(Anno, Acc) -> [Anno|Acc] end, [], U),
+ Seq = [erl_anno:location(A) || A <- SeqA],
+ Seq = lists:seq(N-1, 1, -1),
+ NF = fun(Anno) ->
+ L = erl_anno:location(Anno),
+ dict:fetch(L, D)
+ end,
+ Abstr = erl_parse:map_anno(NF, U),
+ ok.
+
+sample_term() ->
+ %% This is just a sample.
+ {3,a,4.0,"foo",<<"bar">>,#{a => <<19:64/unsigned-little>>},
+ [1000,2000]}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+test(StartLocation, Updates) ->
+ S0 = init(StartLocation),
+ A0 = erl_anno:new(StartLocation),
+ chk(S0, A0, []),
+ eval(Updates, S0, A0).
+
+eval([], _S0, _A0) ->
+ ok;
+eval([{Item, Value}|Updates], S0, A0) ->
+ {S, A} = set(Item, Value, A0, S0, []),
+ eval(Updates, S, A);
+eval([{Item, Value, Secondary}|Updates], S0, A0) ->
+ {S, A} = set(Item, Value, A0, S0, Secondary),
+ eval(Updates, S, A).
+
+init({Line, Column}) ->
+ lists:sort([{location, {Line, Column}} | default()]);
+init(Line) when is_integer(Line) ->
+ lists:sort([{location, Line} | default()]).
+
+set(Item, Value, Anno0, State0, Secondary) ->
+ true = lists:member(Item, primary_items()),
+ ?format("Set '~w' to ~p\n", [Item, Value]),
+ State = set_value(Item, Value, State0),
+ Anno = anno_set(Item, Value, Anno0),
+ ?format("State0 ~p\n", [State0]),
+ ?format("State ~p\n", [State]),
+ ?format("Anno0 ~p\n", [anno_to_term(Anno0)]),
+ ?format("Anno ~p\n", [anno_to_term(Anno)]),
+ chk(State, Anno, Secondary),
+ ok = frame(Anno0, Anno, Secondary),
+ {State, Anno}.
+
+frame(OldAnno, NewAnno, Secondary) ->
+ SecItems = [I || {I, _} <- Secondary],
+ Frame = secondary_items() -- (SecItems ++ primary_items()),
+ ?format("Frame items ~p\n", [Frame]),
+ frame1(Frame, OldAnno, NewAnno).
+
+frame1([], _OldAnno, _NewAnno) ->
+ ok;
+frame1([Item|Items], OldAnno, NewAnno) ->
+ V1 = anno_info(OldAnno, Item),
+ V2 = anno_info(NewAnno, Item),
+ ok = check_value(Item, V1, V2),
+ frame1(Items, OldAnno, NewAnno).
+
+chk(State, Anno, Secondary) ->
+ ok = check_simple(Anno),
+ ok = chk_primary(State, Anno),
+ ok = check_secondary(Secondary, State, Anno).
+
+chk_primary(State, Anno) ->
+ chk_primary(primary_items(), State, Anno).
+
+chk_primary([], _State, _Anno) ->
+ ok;
+chk_primary([Item | Items], State, Anno) ->
+ V1 = primary_value(Item, State),
+ V2 = anno_info(Anno, Item),
+ ok = check_value(Item, V1, V2),
+ chk_primary(Items, State, Anno).
+
+check_secondary([], _State, _Anno) ->
+ ok;
+check_secondary([{Item, _}=V1 | Secondary], State, Anno) ->
+ V2 = anno_info(Anno, Item),
+ case {V1, V2} of
+ {{Item, undefined}, undefined} ->
+ ok;
+ _ ->
+ ok = check_value(Item, V1, V2)
+ end,
+ check_secondary(Secondary, State, Anno).
+
+check_value(Item, V1, V2) ->
+ ?format("~w: V1 ~p\n", [Item, V1]),
+ ?format("~w: V2 ~p\n", [Item, V2]),
+ case V1 =:= V2 of
+ true ->
+ ok;
+ false ->
+ io:format("~w: expected ~p\n got ~p\n", [Item, V1, V2]),
+ exit({V1, V2})
+ end.
+
+check_simple(Anno) ->
+ Term = anno_to_term(Anno),
+ case find_defaults(Term) of
+ [] ->
+ ok;
+ Ds ->
+ io:format("found default values ~w in ~p\n", [Ds, Anno]),
+ exit({defaults, Anno})
+ end,
+ case check_simple1(Term) of
+ true ->
+ ok;
+ false ->
+ io:format("not simple ~p\n", [Anno]),
+ exit({not_simple, Anno})
+ end.
+
+check_simple1(L) when is_integer(L) ->
+ true;
+check_simple1({L, C}) when is_integer(L), is_integer(C) ->
+ true;
+check_simple1(List) ->
+ case lists:sort(List) of
+ [{location, _}] ->
+ false;
+ _ ->
+ true
+ end.
+
+find_defaults(L) when is_list(L) ->
+ [I ||
+ I <- default_items(),
+ {I1, Value} <- L,
+ I =:= I1,
+ Value =:= default_value(I)];
+find_defaults(_) ->
+ [].
+
+anno_to_term(Anno) ->
+ T = erl_anno:to_term(Anno),
+ maybe_sort(T).
+
+maybe_sort(L) when is_list(L) ->
+ lists:sort(L);
+maybe_sort(T) ->
+ T.
+
+anno_set(file, Value, Anno) ->
+ erl_anno:set_file(Value, Anno);
+anno_set(generated, Value, Anno) ->
+ erl_anno:set_generated(Value, Anno);
+anno_set(line, Value, Anno) ->
+ erl_anno:set_line(Value, Anno);
+anno_set(location, Value, Anno) ->
+ erl_anno:set_location(Value, Anno);
+anno_set(record, Value, Anno) ->
+ erl_anno:set_record(Value, Anno);
+anno_set(text, Value, Anno) ->
+ erl_anno:set_text(Value, Anno).
+
+anno_info(Anno, Item) ->
+ Value =
+ case Item of
+ column ->
+ erl_anno:column(Anno);
+ generated ->
+ erl_anno:generated(Anno);
+ end_location ->
+ erl_anno:end_location(Anno);
+ file ->
+ erl_anno:file(Anno);
+ length ->
+ case erl_anno:text(Anno) of
+ undefined ->
+ undefined;
+ Text ->
+ length(Text)
+ end;
+ line ->
+ erl_anno:line(Anno);
+ location ->
+ erl_anno:location(Anno);
+ record ->
+ erl_anno:record(Anno);
+ text ->
+ erl_anno:text(Anno);
+ _ ->
+ erlang:error(badarg, [Anno, Item])
+ end,
+ if
+ Value =:= undefined ->
+ undefined;
+ true ->
+ {Item, Value}
+ end.
+
+%%% Originally 'location' was primary while 'line' and 'column' were
+%%% secondary (their values are determined by 'location'). But since
+%%% set_line() is used kind of frequently, 'line' is also primary,
+%%% and 'location' secondary (depends on 'line'). 'line' need to be
+%%% handled separately.
+
+set_value(line, Line, State) ->
+ {location, Location} = primary_value(location, State),
+ NewLocation = case Location of
+ {_, Column} ->
+ {Line, Column};
+ _ ->
+ Line
+ end,
+ set_value(location, NewLocation, State);
+set_value(Item, Value, State) ->
+ lists:ukeymerge(1, [{Item, Value}], State).
+
+primary_value(line, State) ->
+ {location, Location} = primary_value(location, State),
+ {line, case Location of
+ {Line, _} ->
+ Line;
+ Line ->
+ Line
+ end};
+primary_value(Item, State) ->
+ case lists:keyfind(Item, 1, State) of
+ false ->
+ undefined;
+ Tuple ->
+ Tuple
+ end.
+
+default() ->
+ [{Tag, default_value(Tag)} || Tag <- default_items()].
+
+primary_items() ->
+ [file, generated, line, location, record, text].
+
+secondary_items() ->
+ %% 'length' has not been implemented
+ [column, end_location, length, line, location].
+
+default_items() ->
+ [generated, record].
+
+default_value(generated) -> false;
+default_value(record) -> false.
diff --git a/lib/stdlib/test/erl_eval_SUITE.erl b/lib/stdlib/test/erl_eval_SUITE.erl
index 3427f431c5..a750c5cace 100644
--- a/lib/stdlib/test/erl_eval_SUITE.erl
+++ b/lib/stdlib/test/erl_eval_SUITE.erl
@@ -1482,8 +1482,11 @@ eep43(Config) when is_list(Config) ->
" #{ K1 := 1, K2 := 2, K3 := 3, {2,2} := 4} = Map "
"end.",
#{ 1 => 1, <<42:301>> => 2, {3,<<42:301>>} => 3, {2,2} => 4}),
- error_check("[camembert]#{}.", {badarg,[camembert]}),
+ error_check("[camembert]#{}.", {badmap,[camembert]}),
+ error_check("[camembert]#{nonexisting:=v}.", {badmap,[camembert]}),
error_check("#{} = 1.", {badmatch,1}),
+ error_check("[]#{a=>error(bad)}.", bad),
+ error_check("(#{})#{nonexisting:=value}.", {badkey,nonexisting}),
ok.
%% Check the string in different contexts: as is; in fun; from compiled code.
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index f8a99f653a..c0d9b7c466 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -64,7 +64,7 @@
too_many_arguments/1,
basic_errors/1,bin_syntax_errors/1,
predef/1,
- maps/1,maps_type/1,otp_11851/1
+ maps/1,maps_type/1,otp_11851/1,otp_12195/1
]).
% Default timetrap timeout (set in init_per_testcase).
@@ -93,7 +93,7 @@ all() ->
bif_clash, behaviour_basic, behaviour_multiple, otp_11861,
otp_7550, otp_8051, format_warn, {group, on_load},
too_many_arguments, basic_errors, bin_syntax_errors, predef,
- maps, maps_type, otp_11851].
+ maps, maps_type, otp_11851, otp_12195].
groups() ->
[{unused_vars_warn, [],
@@ -3708,7 +3708,13 @@ maps(Config) ->
">>,
[],
{errors,[{4,erl_lint,illegal_map_construction},
- {6,erl_lint,illegal_map_key}],[]}}],
+ {6,erl_lint,illegal_map_key}],[]}},
+ {unused_vars_with_empty_maps,
+ <<"t(Foo, Bar, Baz) -> {#{},#{}}.">>,
+ [warn_unused_variables],
+ {warnings,[{1,erl_lint,{unused_var,'Bar'}},
+ {1,erl_lint,{unused_var,'Baz'}},
+ {1,erl_lint,{unused_var,'Foo'}}]}}],
[] = run(Config, Ts),
ok.
@@ -3828,6 +3834,40 @@ otp_11851(Config) when is_list(Config) ->
[] = run(Config, Ts),
ok.
+otp_12195(doc) ->
+ "OTP-12195: Check obsolete types (tailor made for OTP 18).";
+otp_12195(Config) when is_list(Config) ->
+ Ts = [{otp_12195_1,
+ <<"-export_type([r1/0]).
+ -type r1() :: erl_scan:line()
+ | erl_scan:column()
+ | erl_scan:location()
+ | erl_anno:line().">>,
+ [],
+ {warnings,[{2,erl_lint,
+ {deprecated_type,{erl_scan,line,0},
+ "deprecated (will be removed in OTP 19); "
+ "use erl_anno:line() instead"}},
+ {3,erl_lint,
+ {deprecated_type,{erl_scan,column,0},
+ "deprecated (will be removed in OTP 19); use "
+ "erl_anno:column() instead"}},
+ {4,erl_lint,
+ {deprecated_type,{erl_scan,location,0},
+ "deprecated (will be removed in OTP 19); "
+ "use erl_anno:location() instead"}}]}},
+ {otp_12195_2,
+ <<"-export_type([r1/0]).
+ -compile(nowarn_deprecated_type).
+ -type r1() :: erl_scan:line()
+ | erl_scan:column()
+ | erl_scan:location()
+ | erl_anno:line().">>,
+ [],
+ []}],
+ [] = run(Config, Ts),
+ ok.
+
run(Config, Tests) ->
F = fun({N,P,Ws,E}, BadL) ->
case catch run_test(Config, P, Ws) of
diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl
index f71446dd64..1d63c8e17e 100644
--- a/lib/stdlib/test/erl_pp_SUITE.erl
+++ b/lib/stdlib/test/erl_pp_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -490,7 +490,7 @@ cond1(Config) when is_list(Config) ->
[{cons,3,{atom,3,a},{cons,3,{atom,3,b},{nil,3}}}]},
{clause,4,[],[[{atom,4,true}]],
[{tuple,5,[{atom,5,x},{atom,5,y}]}]}]},
- ?line CChars = lists:flatten(erl_pp:expr(C)),
+ CChars = flat_expr1(C),
% ?line "cond {foo,bar} -> [a,b]; true -> {x,y} end" = CChars,
?line "cond\n"
" {foo,bar} ->\n"
@@ -557,7 +557,7 @@ messages(Config) when is_list(Config) ->
lists:flatten(erl_pp:form({error,{some,"error"}})),
?line true = "{warning,{some,\"warning\"}}\n" =:=
lists:flatten(erl_pp:form({warning,{some,"warning"}})),
- ?line true = "\n" =:= lists:flatten(erl_pp:form({eof,0})),
+ "\n" = flat_form({eof,0}),
ok.
import_export(suite) ->
@@ -616,27 +616,29 @@ hook(Config) when is_list(Config) ->
do_hook(HookFun) ->
Lc = parse_expr(binary_to_list(<<"[X || X <- [1,2,3]].">>)),
H = HookFun(fun hook/4),
- Expr = {call,0,{atom,0,fff},[{foo,Lc},{foo,Lc},{foo,Lc}]},
+ A0 = erl_anno:new(0),
+ Expr = {call,A0,{atom,A0,fff},[{foo,Lc},{foo,Lc},{foo,Lc}]},
EChars = lists:flatten(erl_pp:expr(Expr, 0, H)),
- Call = {call,0,{atom,0,foo},[Lc]},
- Expr2 = {call,0,{atom,0,fff},[Call,Call,Call]},
+ Call = {call,A0,{atom,A0,foo},[Lc]},
+ Expr2 = {call,A0,{atom,A0,fff},[Call,Call,Call]},
EChars2 = erl_pp:exprs([Expr2]),
?line true = EChars =:= lists:flatten(EChars2),
EsChars = erl_pp:exprs([Expr], H),
?line true = EChars =:= lists:flatten(EsChars),
- F = {function,1,ffff,0,[{clause,1,[],[],[Expr]}]},
+ A1 = erl_anno:new(1),
+ F = {function,A1,ffff,0,[{clause,A1,[],[],[Expr]}]},
FuncChars = lists:flatten(erl_pp:function(F, H)),
- F2 = {function,1,ffff,0,[{clause,1,[],[],[Expr2]}]},
+ F2 = {function,A1,ffff,0,[{clause,A1,[],[],[Expr2]}]},
FuncChars2 = erl_pp:function(F2),
?line true = FuncChars =:= lists:flatten(FuncChars2),
FFormChars = erl_pp:form(F, H),
?line true = FuncChars =:= lists:flatten(FFormChars),
- A = {attribute,1,record,{r,[{record_field,1,{atom,1,a},Expr}]}},
+ A = {attribute,A1,record,{r,[{record_field,A1,{atom,A1,a},Expr}]}},
AChars = lists:flatten(erl_pp:attribute(A, H)),
- A2 = {attribute,1,record,{r,[{record_field,1,{atom,1,a},Expr2}]}},
+ A2 = {attribute,A1,record,{r,[{record_field,A1,{atom,A1,a},Expr2}]}},
AChars2 = erl_pp:attribute(A2),
?line true = AChars =:= lists:flatten(AChars2),
AFormChars = erl_pp:form(A, H),
@@ -645,10 +647,10 @@ do_hook(HookFun) ->
?line "INVALID-FORM:{foo,bar}:" = lists:flatten(erl_pp:expr({foo,bar})),
%% A list (as before R6), not a list of lists.
- G = [{op,1,'>',{atom,1,a},{foo,{atom,1,b}}}], % not a proper guard
+ G = [{op,A1,'>',{atom,A1,a},{foo,{atom,A1,b}}}], % not a proper guard
GChars = lists:flatten(erl_pp:guard(G, H)),
- G2 = [{op,1,'>',{atom,1,a},
- {call,0,{atom,0,foo},[{atom,1,b}]}}], % not a proper guard
+ G2 = [{op,A1,'>',{atom,A1,a},
+ {call,A0,{atom,A0,foo},[{atom,A1,b}]}}], % not a proper guard
GChars2 = erl_pp:guard(G2),
?line true = GChars =:= lists:flatten(GChars2),
@@ -659,14 +661,14 @@ do_hook(HookFun) ->
?line true = EChars =:= lists:flatten(XEChars2),
%% Note: no leading spaces before "begin".
- Block = {block,0,[{match,0,{var,0,'A'},{integer,0,3}},
- {atom,0,true}]},
+ Block = {block,A0,[{match,A0,{var,A0,'A'},{integer,A0,3}},
+ {atom,A0,true}]},
?line "begin\n A =" ++ _ =
lists:flatten(erl_pp:expr(Block, 17, none)),
%% Special...
?line true =
- "{some,value}" =:= lists:flatten(erl_pp:expr({value,0,{some,value}})),
+ "{some,value}" =:= lists:flatten(erl_pp:expr({value,A0,{some,value}})),
%% Silly...
?line true =
@@ -674,8 +676,8 @@ do_hook(HookFun) ->
flat_expr({'if',0,[{clause,0,[],[],[{atom,0,0}]}]}),
%% More compatibility: before R6
- OldIf = {'if',0,[{clause,0,[],[{atom,0,true}],[{atom,0,b}]}]},
- NewIf = {'if',0,[{clause,0,[],[[{atom,0,true}]],[{atom,0,b}]}]},
+ OldIf = {'if',A0,[{clause,A0,[],[{atom,A0,true}],[{atom,A0,b}]}]},
+ NewIf = {'if',A0,[{clause,A0,[],[[{atom,A0,true}]],[{atom,A0,b}]}]},
OldIfChars = lists:flatten(erl_pp:expr(OldIf)),
NewIfChars = lists:flatten(erl_pp:expr(NewIf)),
?line true = OldIfChars =:= NewIfChars,
@@ -691,7 +693,8 @@ ehook(HE, I, P, H, foo, bar) ->
hook(HE, I, P, H).
hook({foo,E}, I, P, H) ->
- erl_pp:expr({call,0,{atom,0,foo},[E]}, I, P, H).
+ A = erl_anno:new(0),
+ erl_pp:expr({call,A,{atom,A,foo},[E]}, I, P, H).
neg_indent(suite) ->
[];
@@ -774,7 +777,7 @@ otp_6911(Config) when is_list(Config) ->
{var,6,'X'},
[{clause,7,[{atom,7,true}],[],[{integer,7,12}]},
{clause,8,[{atom,8,false}],[],[{integer,8,14}]}]}]}]},
- ?line Chars = lists:flatten(erl_pp:form(F)),
+ Chars = flat_form(F),
?line "thomas(X) ->\n"
" case X of\n"
" true ->\n"
@@ -1084,10 +1087,11 @@ otp_10302(Config) when is_list(Config) ->
Opts = [{hook, fun unicode_hook/4},{encoding,unicode}],
Lc = parse_expr("[X || X <- [\"\x{400}\",\"\xFF\"]]."),
- Expr = {call,0,{atom,0,fff},[{foo,{foo,Lc}},{foo,{foo,Lc}}]},
+ A0 = erl_anno:new(0),
+ Expr = {call,A0,{atom,A0,fff},[{foo,{foo,Lc}},{foo,{foo,Lc}}]},
EChars = lists:flatten(erl_pp:expr(Expr, 0, Opts)),
- Call = {call,0,{atom,0,foo},[{call,0,{atom,0,foo},[Lc]}]},
- Expr2 = {call,0,{atom,0,fff},[Call,Call]},
+ Call = {call,A0,{atom,A0,foo},[{call,A0,{atom,A0,foo},[Lc]}]},
+ Expr2 = {call,A0,{atom,A0,fff},[Call,Call]},
EChars2 = erl_pp:exprs([Expr2], U),
EChars = lists:flatten(EChars2),
[$\x{400},$\x{400}] = [C || C <- EChars, C > 255],
@@ -1097,7 +1101,8 @@ otp_10302(Config) when is_list(Config) ->
ok.
unicode_hook({foo,E}, I, P, H) ->
- erl_pp:expr({call,0,{atom,0,foo},[E]}, I, P, H).
+ A = erl_anno:new(0),
+ erl_pp:expr({call,A,{atom,A,foo},[E]}, I, P, H).
otp_10820(doc) ->
"OTP-10820. Unicode filenames.";
@@ -1137,29 +1142,30 @@ otp_11100(Config) when is_list(Config) ->
%% Cannot trigger the use of the hook function with export/import.
"-export([{fy,a}/b]).\n" =
pf({attribute,1,export,[{{fy,a},b}]}),
+ A1 = erl_anno:new(1),
"-type foo() :: integer(INVALID-FORM:{foo,bar}:).\n" =
- pf({attribute,1,type,{foo,{type,1,integer,[{foo,bar}]},[]}}),
- pf({attribute,1,type,
- {a,{type,1,range,[{integer,1,1},{foo,bar}]},[]}}),
+ pf({attribute,A1,type,{foo,{type,A1,integer,[{foo,bar}]},[]}}),
+ pf({attribute,A1,type,
+ {a,{type,A1,range,[{integer,A1,1},{foo,bar}]},[]}}),
"-type foo(INVALID-FORM:{foo,bar}:) :: A.\n" =
- pf({attribute,1,type,{foo,{var,1,'A'},[{foo,bar}]}}),
+ pf({attribute,A1,type,{foo,{var,A1,'A'},[{foo,bar}]}}),
"-type foo() :: (INVALID-FORM:{foo,bar}: :: []).\n" =
- pf({attribute,1,type,
- {foo,{paren_type,1,
- [{ann_type,1,[{foo,bar},{type,1,nil,[]}]}]},
+ pf({attribute,A1,type,
+ {foo,{paren_type,A1,
+ [{ann_type,A1,[{foo,bar},{type,A1,nil,[]}]}]},
[]}}),
"-type foo() :: <<_:INVALID-FORM:{foo,bar}:>>.\n" =
- pf({attribute,1,type,
- {foo,{type,1,binary,[{foo,bar},{integer,1,0}]},[]}}),
+ pf({attribute,A1,type,
+ {foo,{type,A1,binary,[{foo,bar},{integer,A1,0}]},[]}}),
"-type foo() :: <<_:10, _:_*INVALID-FORM:{foo,bar}:>>.\n" =
- pf({attribute,1,type,
- {foo,{type,1,binary,[{integer,1,10},{foo,bar}]},[]}}),
+ pf({attribute,A1,type,
+ {foo,{type,A1,binary,[{integer,A1,10},{foo,bar}]},[]}}),
"-type foo() :: #r{INVALID-FORM:{foo,bar}: :: integer()}.\n" =
- pf({attribute,1,type,
- {foo,{type,1,record,
- [{atom,1,r},
- {type,1,field_type,
- [{foo,bar},{type,1,integer,[]}]}]},
+ pf({attribute,A1,type,
+ {foo,{type,A1,record,
+ [{atom,A1,r},
+ {type,A1,field_type,
+ [{foo,bar},{type,A1,integer,[]}]}]},
[]}}),
ok.
@@ -1239,9 +1245,18 @@ strip_module_info(Bin) ->
<<R:Start/binary,_/binary>> = Bin,
R.
-flat_expr(Expr) ->
+flat_expr1(Expr0) ->
+ Expr = erl_parse:new_anno(Expr0),
+ lists:flatten(erl_pp:expr(Expr)).
+
+flat_expr(Expr0) ->
+ Expr = erl_parse:new_anno(Expr0),
lists:flatten(erl_pp:expr(Expr, -1, none)).
+flat_form(Form0) ->
+ Form = erl_parse:new_anno(Form0),
+ lists:flatten(erl_pp:form(Form)).
+
pp_forms(Bin) ->
pp_forms(Bin, none).
diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl
index 6ef947f0e3..fb85055b6c 100644
--- a/lib/stdlib/test/erl_scan_SUITE.erl
+++ b/lib/stdlib/test/erl_scan_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -138,7 +138,7 @@ iso88591(Config) when is_list(Config) ->
A1s = [$h,$ä,$r],
A2s = [$ö,$r,$e],
%% Test parsing atom and variable characters.
- {ok,Ts1,_} = erl_scan:string(V1s ++ " " ++ V2s ++
+ {ok,Ts1,_} = erl_scan_string(V1s ++ " " ++ V2s ++
"\327" ++
A1s ++ " " ++ A2s),
V1s = atom_to_list(element(3, nth(1, Ts1))),
@@ -151,7 +151,7 @@ iso88591(Config) when is_list(Config) ->
%% Test parsing and printing strings.
S1 = V1s ++ "\327" ++ A1s ++ "\250" ++ A2s,
S1s = "\"" ++ S1 ++ "\"",
- {ok,Ts2,_} = erl_scan:string(S1s),
+ {ok,Ts2,_} = erl_scan_string(S1s),
S1 = element(3, nth(1, Ts2)),
S1s = flatten(print(element(3, nth(1, Ts2)))),
ok %It all worked
@@ -219,7 +219,7 @@ atoms() ->
test_string([39,65,200,39], [{atom,{1,1},'AÈ'}]),
test_string("ärlig östen", [{atom,{1,1},ärlig},{atom,{1,7},östen}]),
?line {ok,[{atom,_,'$a'}],{1,6}} =
- erl_scan:string("'$\\a'", {1,1}),
+ erl_scan_string("'$\\a'", {1,1}),
?line test("'$\\a'"),
ok.
@@ -268,24 +268,24 @@ punctuations() ->
comments() ->
?line test("a %%\n b"),
- ?line {ok,[],1} = erl_scan:string("%"),
+ {ok,[],1} = erl_scan_string("%"),
?line test("a %%\n b"),
{ok,[{atom,{1,1},a},{atom,{2,2},b}],{2,3}} =
- erl_scan:string("a %%\n b",{1,1}),
+ erl_scan_string("a %%\n b", {1,1}),
{ok,[{atom,{1,1},a},{comment,{1,3},"%%"},{atom,{2,2},b}],{2,3}} =
- erl_scan:string("a %%\n b",{1,1}, [return_comments]),
+ erl_scan_string("a %%\n b",{1,1}, [return_comments]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
{2,3}} =
- erl_scan:string("a %%\n b",{1,1},[return_white_spaces]),
+ erl_scan_string("a %%\n b",{1,1},[return_white_spaces]),
{ok,[{atom,{1,1},a},
{white_space,{1,2}," "},
{comment,{1,3},"%%"},
{white_space,{1,5},"\n "},
{atom,{2,2},b}],
- {2,3}} = erl_scan:string("a %%\n b",{1,1},[return]),
+ {2,3}} = erl_scan_string("a %%\n b",{1,1},[return]),
ok.
errors() ->
@@ -337,11 +337,11 @@ base_integers() ->
erl_scan:string(Str)
end || {BS,S} <- [{"3","3"},{"15","f"}, {"12","c"}] ],
- ?line {ok,[{integer,1,239},{'@',1}],1} = erl_scan:string("16#ef@"),
+ {ok,[{integer,1,239},{'@',1}],1} = erl_scan_string("16#ef@"),
{ok,[{integer,{1,1},239},{'@',{1,6}}],{1,7}} =
- erl_scan:string("16#ef@", {1,1}, []),
+ erl_scan_string("16#ef@", {1,1}, []),
{ok,[{integer,{1,1},14},{atom,{1,5},g@}],{1,7}} =
- erl_scan:string("16#eg@", {1,1}, []),
+ erl_scan_string("16#eg@", {1,1}, []),
ok.
@@ -382,8 +382,8 @@ dots() ->
{ok,[{'.',{1,1}},{atom,{1,2},a}],{1,3}}}
],
[begin
- R = erl_scan:string(S),
- R2 = erl_scan:string(S, {1,1}, [])
+ R = erl_scan_string(S),
+ R2 = erl_scan_string(S, {1,1}, [])
end || {S, R, R2} <- Dot],
?line {ok,[{dot,_}=T1],{1,2}} = erl_scan:string(".", {1,1}, text),
@@ -417,7 +417,7 @@ dots() ->
{white_space,{1,4},"\n"},
{dot,{2,1}}],
{2,3}}, ""} =
- erl_scan:tokens(C, "\n. ", {1,1}, return), % any loc, any options
+ erl_scan_tokens(C, "\n. ", {1,1}, return), % any loc, any options
?line [test_string(S, R) ||
{S, R} <- [{".$\n", [{'.',{1,1}},{char,{1,2},$\n}]},
@@ -511,7 +511,7 @@ eof() ->
%% An error before R13A.
%% ?line {done,Err={error,{1,erl_scan,scan},1},eof} =
?line {done,{ok,[{atom,1,abra}],1},eof} =
- erl_scan:tokens(C2, eof, 1),
+ erl_scan_tokens(C2, eof, 1),
%% With column.
?line {more, C3} = erl_scan:tokens([]," \n",{1,1}),
@@ -520,7 +520,7 @@ eof() ->
%% An error before R13A.
%% ?line {done,{error,{{1,1},erl_scan,scan},{1,5}},eof} =
?line {done,{ok,[{atom,_,abra}],{1,5}},eof} =
- erl_scan:tokens(C4, eof, 1),
+ erl_scan_tokens(C4, eof, 1),
%% Robert's scanner returns "" as LeftoverChars;
%% the R12B scanner returns eof as LeftoverChars: (eof is correct)
@@ -528,26 +528,26 @@ eof() ->
%% An error before R13A.
%% ?line {done,{error,{1,erl_scan,scan},1},eof} =
?line {done,{ok,[{atom,1,a}],1},eof} =
- erl_scan:tokens(C5,eof,1),
+ erl_scan_tokens(C5,eof,1),
%% With column.
{more, C6} = erl_scan:tokens([], "a", {1,1}),
%% An error before R13A.
%% {done,{error,{1,erl_scan,scan},1},eof} =
{done,{ok,[{atom,{1,1},a}],{1,2}},eof} =
- erl_scan:tokens(C6,eof,1),
+ erl_scan_tokens(C6,eof,1),
%% A dot followed by eof is special:
?line {more, C} = erl_scan:tokens([], "a.", 1),
- ?line {done,{ok,[{atom,1,a},{dot,1}],1},eof} = erl_scan:tokens(C,eof,1),
- ?line {ok,[{atom,1,foo},{dot,1}],1} = erl_scan:string("foo."),
+ {done,{ok,[{atom,1,a},{dot,1}],1},eof} = erl_scan_tokens(C,eof,1),
+ {ok,[{atom,1,foo},{dot,1}],1} = erl_scan_string("foo."),
%% With column.
{more, CCol} = erl_scan:tokens([], "a.", {1,1}),
{done,{ok,[{atom,{1,1},a},{dot,{1,2}}],{1,3}},eof} =
- erl_scan:tokens(CCol,eof,1),
+ erl_scan_tokens(CCol,eof,1),
{ok,[{atom,{1,1},foo},{dot,{1,4}}],{1,5}} =
- erl_scan:string("foo.", {1,1}, []),
+ erl_scan_string("foo.", {1,1}, []),
ok.
@@ -628,23 +628,23 @@ crashes() ->
options() ->
%% line and column are not options, but tested here
?line {ok,[{atom,1,foo},{white_space,1," "},{comment,1,"% bar"}], 1} =
- erl_scan:string("foo % bar", 1, return),
+ erl_scan_string("foo % bar", 1, return),
?line {ok,[{atom,1,foo},{white_space,1," "}],1} =
- erl_scan:string("foo % bar", 1, return_white_spaces),
+ erl_scan_string("foo % bar", 1, return_white_spaces),
?line {ok,[{atom,1,foo},{comment,1,"% bar"}],1} =
- erl_scan:string("foo % bar", 1, return_comments),
+ erl_scan_string("foo % bar", 1, return_comments),
?line {ok,[{atom,17,foo}],17} =
- erl_scan:string("foo % bar", 17),
+ erl_scan_string("foo % bar", 17),
?line {'EXIT',{function_clause,_}} =
(catch {foo,
erl_scan:string("foo % bar", {a,1}, [])}), % type error
?line {ok,[{atom,_,foo}],{17,18}} =
- erl_scan:string("foo % bar", {17,9}, []),
+ erl_scan_string("foo % bar", {17,9}, []),
?line {'EXIT',{function_clause,_}} =
(catch {foo,
erl_scan:string("foo % bar", {1,0}, [])}), % type error
?line {ok,[{foo,1}],1} =
- erl_scan:string("foo % bar",1, [{reserved_word_fun,
+ erl_scan_string("foo % bar",1, [{reserved_word_fun,
fun(W) -> W =:= foo end}]),
?line {'EXIT',{badarg,_}} =
(catch {foo,
@@ -706,8 +706,9 @@ token_info() ->
attributes_info() ->
?line {'EXIT',_} =
(catch {foo,erl_scan:attributes_info(foo)}), % type error
- ?line [{line,18}] = erl_scan:attributes_info(18),
- ?line {location,19} = erl_scan:attributes_info(19, location),
+ [{line,18}] = erl_scan:attributes_info(erl_anno:new(18)),
+ {location,19} =
+ erl_scan:attributes_info(erl_anno:new(19), location),
?line {ok,[{atom,A0,foo}],_} = erl_scan:string("foo", 19, [text]),
?line {location,19} = erl_scan:attributes_info(A0, location),
@@ -735,7 +736,9 @@ attributes_info() ->
set_attribute() ->
F = fun(Line) -> -Line end,
- ?line -2 = erl_scan:set_attribute(line, 2, F),
+ Anno2 = erl_anno:new(2),
+ A0 = erl_scan:set_attribute(line, Anno2, F),
+ {line, -2} = erl_scan:attributes_info(A0, line),
?line {ok,[{atom,A1,foo}],_} = erl_scan:string("foo", {9,17}),
?line A2 = erl_scan:set_attribute(line, A1, F),
?line {line,-9} = erl_scan:attributes_info(A2, line),
@@ -765,10 +768,15 @@ set_attribute() ->
?line {ok,[{atom,A6,foo}],_} = erl_scan:string("foo", 11, [text]),
?line A7 = erl_scan:set_attribute(line, A6, F2),
- ?line {line,{17,11}} = erl_scan:attributes_info(A7, line),
+ %% Incompatible with pre 18:
+ %% {line,{17,11}} = erl_scan:attributes_info(A7, line),
+ {line,17} = erl_scan:attributes_info(A7, line),
?line {location,{17,11}} = % mixed up
erl_scan:attributes_info(A7, location),
- ?line [{line,{17,11}},{text,"foo"}] =
+ %% Incompatible with pre 18:
+ %% [{line,{17,11}},{text,"foo"}] =
+ %% erl_scan:attributes_info(A7, [line,column,text]),
+ [{line,17},{column,11},{text,"foo"}] =
erl_scan:attributes_info(A7, [line,column,text]),
?line {'EXIT',_} =
@@ -776,9 +784,13 @@ set_attribute() ->
?line {'EXIT',{badarg,_}} =
(catch {foo, erl_scan:set_attribute(column, [], F2)}), % type error
+ Attr10 = erl_anno:new(8),
+ Attr20 = erl_scan:set_attribute(line, Attr10,
+ fun(L) -> {nos,'X',L} end),
%% OTP-9412
- ?line 8 = erl_scan:set_attribute(line, [{line,{nos,'X',8}}],
- fun({nos,_V,VL}) -> VL end),
+ Attr30 = erl_scan:set_attribute(line, Attr20,
+ fun({nos,_V,VL}) -> VL end),
+ 8 = erl_anno:to_term(Attr30),
ok.
column_errors() ->
@@ -812,7 +824,7 @@ white_spaces() ->
{white_space,_," "},
{atom,_,a},
{white_space,_,"\n"}],
- _} = erl_scan:string("\r a\n", {1,1}, return),
+ _} = erl_scan_string("\r a\n", {1,1}, return),
?line test("\r a\n"),
L = "{\"a\nb\", \"a\\nb\",\nabc\r,def}.\n\n",
?line {ok,[{'{',_},
@@ -829,7 +841,7 @@ white_spaces() ->
{'}',_},
{dot,_},
{white_space,_,"\n"}],
- _} = erl_scan:string(L, {1,1}, return),
+ _} = erl_scan_string(L, {1,1}, return),
?line test(L),
?line test("\"\n\"\n"),
?line test("\n\r\n"),
@@ -846,7 +858,7 @@ white_spaces() ->
unicode() ->
?line {ok,[{char,1,83},{integer,1,45}],1} =
- erl_scan:string("$\\12345"), % not unicode
+ erl_scan_string("$\\12345"), % not unicode
?line {error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string([1089]),
@@ -858,7 +870,7 @@ unicode() ->
erl_scan:string("'a"++[1089]++"b'", {1,1}),
?line test("\"a"++[1089]++"b\""),
{ok,[{char,1,1}],1} =
- erl_scan:string([$$,$\\,$^,1089], 1),
+ erl_scan_string([$$,$\\,$^,1089], 1),
{error,{1,erl_scan,Error},1} =
erl_scan:string("\"qa\x{aaa}", 1),
@@ -870,13 +882,13 @@ unicode() ->
erl_scan:string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} =
- erl_scan:string([$$,1089], 1),
+ erl_scan_string([$$,1089], 1),
{ok,[{char,1,1089}],1} =
- erl_scan:string([$$,$\\,1089], 1),
+ erl_scan_string([$$,$\\,1089], 1),
Qs = "$\\x{aaa}",
{ok,[{char,1,$\x{aaa}}],1} =
- erl_scan:string(Qs, 1),
+ erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} =
erl_scan:string("$\\x{aaa}", {1,1}, [text]),
[{category,char},{column,1},{length,8},
@@ -884,19 +896,19 @@ unicode() ->
erl_scan:token_info(Q2),
U1 = "\"\\x{aaa}\"",
- {ok,
- [{string,[{line,1},{column,1},{text,"\"\\x{aaa}\""}],[2730]}],
- {1,10}} = erl_scan:string(U1, {1,1}, [text]),
- {ok,[{string,1,[2730]}],1} = erl_scan:string(U1, 1),
+ {ok,[{string,A1,[2730]}],{1,10}} = erl_scan:string(U1, {1,1}, [text]),
+ [{line,1},{column,1},{text,"\"\\x{aaa}\""}] =
+ erl_scan:attributes_info(A1, [line, column, text]),
+ {ok,[{string,1,[2730]}],1} = erl_scan_string(U1, 1),
U2 = "\"\\x41\\x{fff}\\x42\"",
- {ok,[{string,1,[$\x41,$\x{fff},$\x42]}],1} = erl_scan:string(U2, 1),
+ {ok,[{string,1,[$\x41,$\x{fff},$\x42]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
- {ok,[{string,1,[$a,$\n,$\x{fff},$\n]}],3} = erl_scan:string(U3, 1),
+ {ok,[{string,1,[$a,$\n,$\x{fff},$\n]}],3} = erl_scan_string(U3, 1),
U4 = "\"\\^\n\\x{aaa}\\^\n\"",
- {ok,[{string,1,[$\n,$\x{aaa},$\n]}],3} = erl_scan:string(U4, 1),
+ {ok,[{string,1,[$\n,$\x{aaa},$\n]}],3} = erl_scan_string(U4, 1),
%% Keep these tests:
?line test(Qs),
@@ -906,15 +918,15 @@ unicode() ->
?line test(U4),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
- {ok,[{string,1,[$a,$b,1089,$c,$d]}],1} = erl_scan:string(Str1, 1),
+ {ok,[{string,1,[$a,$b,1089,$c,$d]}],1} = erl_scan_string(Str1, 1),
{ok,[{string,{1,1},[$a,$b,1089,$c,$d]}],{1,8}} =
- erl_scan:string(Str1, {1,1}),
+ erl_scan_string(Str1, {1,1}),
?line test(Str1),
Comment = "%% "++[1089],
{ok,[{comment,1,[$%,$%,$\s,1089]}],1} =
- erl_scan:string(Comment, 1, [return]),
+ erl_scan_string(Comment, 1, [return]),
{ok,[{comment,{1,1},[$%,$%,$\s,1089]}],{1,5}} =
- erl_scan:string(Comment, {1,1}, [return]),
+ erl_scan_string(Comment, {1,1}, [return]),
ok.
more_chars() ->
@@ -923,12 +935,12 @@ more_chars() ->
%% All kinds of tests...
?line {ok,[{char,_,123}],{1,4}} =
- erl_scan:string("$\\{",{1,1}),
+ erl_scan_string("$\\{",{1,1}),
?line {more, C1} = erl_scan:tokens([], "$\\{", {1,1}),
?line {done,{ok,[{char,_,123}],{1,4}},eof} =
- erl_scan:tokens(C1, eof, 1),
+ erl_scan_tokens(C1, eof, 1),
?line {ok,[{char,1,123},{atom,1,a},{'}',1}],1} =
- erl_scan:string("$\\{a}"),
+ erl_scan_string("$\\{a}"),
?line {error,{{1,1},erl_scan,char},{1,4}} =
erl_scan:string("$\\x", {1,1}),
@@ -993,11 +1005,11 @@ otp_10302(Config) when is_list(Config) ->
{error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
erl_scan:string("'qa\\x{aaa}'",{1,1}),
- {ok,[{char,1,1089}],1} = erl_scan:string([$$,1089], 1),
- {ok,[{char,1,1089}],1} = erl_scan:string([$$,$\\,1089],1),
+ {ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
+ {ok,[{char,1,1089}],1} = erl_scan_string([$$,$\\,1089],1),
Qs = "$\\x{aaa}",
- {ok,[{char,1,2730}],1} = erl_scan:string(Qs,1),
+ {ok,[{char,1,2730}],1} = erl_scan_string(Qs, 1),
{ok,[Q2],{1,9}} = erl_scan:string(Qs,{1,1},[text]),
[{category,char},{column,1},{length,8},
{line,1},{symbol,16#aaa},{text,Qs}] =
@@ -1011,19 +1023,19 @@ otp_10302(Config) when is_list(Config) ->
{symbol,[16#aaa]},{text,U1}] = erl_scan:token_info(T1, Tags),
U2 = "\"\\x41\\x{fff}\\x42\"",
- {ok,[{string,1,[65,4095,66]}],1} = erl_scan:string(U2, 1),
+ {ok,[{string,1,[65,4095,66]}],1} = erl_scan_string(U2, 1),
U3 = "\"a\n\\x{fff}\n\"",
- {ok,[{string,1,[97,10,4095,10]}],3} = erl_scan:string(U3, 1),
+ {ok,[{string,1,[97,10,4095,10]}],3} = erl_scan_string(U3, 1),
U4 = "\"\\^\n\\x{aaa}\\^\n\"",
- {ok,[{string,1,[10,2730,10]}],3} = erl_scan:string(U4, 1,[]),
+ {ok,[{string,1,[10,2730,10]}],3} = erl_scan_string(U4, 1,[]),
Str1 = "\"ab" ++ [1089] ++ "cd\"",
{ok,[{string,1,[97,98,1089,99,100]}],1} =
- erl_scan:string(Str1,1),
+ erl_scan_string(Str1,1),
{ok,[{string,{1,1},[97,98,1089,99,100]}],{1,8}} =
- erl_scan:string(Str1, {1,1}),
+ erl_scan_string(Str1, {1,1}),
OK1 = 16#D800-1,
OK2 = 16#DFFF+1,
@@ -1038,19 +1050,19 @@ otp_10302(Config) when is_list(Config) ->
IllegalL = [Illegal1,Illegal2,Illegal3,Illegal4],
[{ok,[{comment,1,[$%,$%,$\s,OK]}],1} =
- erl_scan:string("%% "++[OK], 1, [return]) ||
+ erl_scan_string("%% "++[OK], 1, [return]) ||
OK <- OKL],
{ok,[{comment,_,[$%,$%,$\s,OK1]}],{1,5}} =
- erl_scan:string("%% "++[OK1], {1,1}, [return]),
+ erl_scan_string("%% "++[OK1], {1,1}, [return]),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, [return]) ||
Illegal <- IllegalL],
{error,{{1,1},erl_scan,{illegal,character}},{1,5}} =
erl_scan:string("%% "++[Illegal1], {1,1}, [return]),
- [{ok,[],1} = erl_scan:string("%% "++[OK], 1, []) ||
+ [{ok,[],1} = erl_scan_string("%% "++[OK], 1, []) ||
OK <- OKL],
- {ok,[],{1,5}} = erl_scan:string("%% "++[OK1], {1,1}, []),
+ {ok,[],{1,5}} = erl_scan_string("%% "++[OK1], {1,1}, []),
[{error,{1,erl_scan,{illegal,character}},1} =
erl_scan:string("%% "++[Illegal], 1, []) ||
Illegal <- IllegalL],
@@ -1058,7 +1070,7 @@ otp_10302(Config) when is_list(Config) ->
erl_scan:string("%% "++[Illegal1], {1,1}, []),
[{ok,[{string,{1,1},[OK]}],{1,4}} =
- erl_scan:string("\""++[OK]++"\"",{1,1}) ||
+ erl_scan_string("\""++[OK]++"\"",{1,1}) ||
OK <- OKL],
[{error,{{1,2},erl_scan,{illegal,character}},{1,3}} =
erl_scan:string("\""++[OK]++"\"",{1,1}) ||
@@ -1069,93 +1081,93 @@ otp_10302(Config) when is_list(Config) ->
Illegal <- IllegalL],
{ok,[{char,{1,1},OK1}],{1,3}} =
- erl_scan:string([$$,OK1],{1,1}),
+ erl_scan_string([$$,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([$$,Illegal1],{1,1}),
{ok,[{char,{1,1},OK1}],{1,4}} =
- erl_scan:string([$$,$\\,OK1],{1,1}),
+ erl_scan_string([$$,$\\,OK1],{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string([$$,$\\,Illegal1],{1,1}),
{ok,[{string,{1,1},[55295]}],{1,5}} =
- erl_scan:string("\"\\"++[OK1]++"\"",{1,1}),
+ erl_scan_string("\"\\"++[OK1]++"\"",{1,1}),
{error,{{1,2},erl_scan,{illegal,character}},{1,4}} =
erl_scan:string("\"\\"++[Illegal1]++"\"",{1,1}),
{ok,[{char,{1,1},OK1}],{1,10}} =
- erl_scan:string("$\\x{D7FF}",{1,1}),
+ erl_scan_string("$\\x{D7FF}",{1,1}),
{error,{{1,1},erl_scan,{illegal,character}},{1,10}} =
erl_scan:string("$\\x{D800}",{1,1}),
%% Not erl_scan, but erl_parse.
- {integer,0,1} = erl_parse:abstract(1),
- Float = 3.14, {float,0,Float} = erl_parse:abstract(Float),
- {nil,0} = erl_parse:abstract([]),
+ {integer,0,1} = erl_parse_abstract(1),
+ Float = 3.14, {float,0,Float} = erl_parse_abstract(Float),
+ {nil,0} = erl_parse_abstract([]),
{bin,0,
[{bin_element,0,{integer,0,1},default,default},
{bin_element,0,{integer,0,2},default,default}]} =
- erl_parse:abstract(<<1,2>>),
+ erl_parse_abstract(<<1,2>>),
{cons,0,{tuple,0,[{atom,0,a}]},{atom,0,b}} =
- erl_parse:abstract([{a} | b]),
- {string,0,"str"} = erl_parse:abstract("str"),
+ erl_parse_abstract([{a} | b]),
+ {string,0,"str"} = erl_parse_abstract("str"),
{cons,0,
{integer,0,$a},
{cons,0,{integer,0,55296},{string,0,"c"}}} =
- erl_parse:abstract("a"++[55296]++"c"),
+ erl_parse_abstract("a"++[55296]++"c"),
Line = 17,
- {integer,Line,1} = erl_parse:abstract(1, Line),
- Float = 3.14, {float,Line,Float} = erl_parse:abstract(Float, Line),
- {nil,Line} = erl_parse:abstract([], Line),
+ {integer,Line,1} = erl_parse_abstract(1, Line),
+ Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Line),
+ {nil,Line} = erl_parse_abstract([], Line),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
- erl_parse:abstract(<<1,2>>, Line),
+ erl_parse_abstract(<<1,2>>, Line),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
- erl_parse:abstract([{a} | b], Line),
- {string,Line,"str"} = erl_parse:abstract("str", Line),
+ erl_parse_abstract([{a} | b], Line),
+ {string,Line,"str"} = erl_parse_abstract("str", Line),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
- erl_parse:abstract("a"++[55296]++"c", Line),
+ erl_parse_abstract("a"++[55296]++"c", Line),
Opts1 = [{line,17}],
- {integer,Line,1} = erl_parse:abstract(1, Opts1),
- Float = 3.14, {float,Line,Float} = erl_parse:abstract(Float, Opts1),
- {nil,Line} = erl_parse:abstract([], Opts1),
+ {integer,Line,1} = erl_parse_abstract(1, Opts1),
+ Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts1),
+ {nil,Line} = erl_parse_abstract([], Opts1),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
- erl_parse:abstract(<<1,2>>, Opts1),
+ erl_parse_abstract(<<1,2>>, Opts1),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
- erl_parse:abstract([{a} | b], Opts1),
- {string,Line,"str"} = erl_parse:abstract("str", Opts1),
+ erl_parse_abstract([{a} | b], Opts1),
+ {string,Line,"str"} = erl_parse_abstract("str", Opts1),
{cons,Line,
{integer,Line,$a},
{cons,Line,{integer,Line,55296},{string,Line,"c"}}} =
- erl_parse:abstract("a"++[55296]++"c", Opts1),
+ erl_parse_abstract("a"++[55296]++"c", Opts1),
[begin
- {integer,Line,1} = erl_parse:abstract(1, Opts2),
- Float = 3.14, {float,Line,Float} = erl_parse:abstract(Float, Opts2),
- {nil,Line} = erl_parse:abstract([], Opts2),
+ {integer,Line,1} = erl_parse_abstract(1, Opts2),
+ Float = 3.14, {float,Line,Float} = erl_parse_abstract(Float, Opts2),
+ {nil,Line} = erl_parse_abstract([], Opts2),
{bin,Line,
[{bin_element,Line,{integer,Line,1},default,default},
{bin_element,Line,{integer,Line,2},default,default}]} =
- erl_parse:abstract(<<1,2>>, Opts2),
+ erl_parse_abstract(<<1,2>>, Opts2),
{cons,Line,{tuple,Line,[{atom,Line,a}]},{atom,Line,b}} =
- erl_parse:abstract([{a} | b], Opts2),
- {string,Line,"str"} = erl_parse:abstract("str", Opts2),
+ erl_parse_abstract([{a} | b], Opts2),
+ {string,Line,"str"} = erl_parse_abstract("str", Opts2),
{string,Line,[97,1024,99]} =
- erl_parse:abstract("a"++[1024]++"c", Opts2)
+ erl_parse_abstract("a"++[1024]++"c", Opts2)
end || Opts2 <- [[{encoding,unicode},{line,Line}],
[{encoding,utf8},{line,Line}]]],
{cons,0,
{integer,0,97},
{cons,0,{integer,0,1024},{string,0,"c"}}} =
- erl_parse:abstract("a"++[1024]++"c", [{encoding,latin1}]),
+ erl_parse_abstract("a"++[1024]++"c", [{encoding,latin1}]),
ok.
otp_10990(doc) ->
@@ -1172,13 +1184,13 @@ otp_10992(suite) ->
[];
otp_10992(Config) when is_list(Config) ->
{cons,0,{float,0,42.0},{nil,0}} =
- erl_parse:abstract([42.0], [{encoding,unicode}]),
+ erl_parse_abstract([42.0], [{encoding,unicode}]),
{cons,0,{float,0,42.0},{nil,0}} =
- erl_parse:abstract([42.0], [{encoding,utf8}]),
+ erl_parse_abstract([42.0], [{encoding,utf8}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
- erl_parse:abstract([$A,42.0], [{encoding,unicode}]),
+ erl_parse_abstract([$A,42.0], [{encoding,unicode}]),
{cons,0,{integer,0,65},{cons,0,{float,0,42.0},{nil,0}}} =
- erl_parse:abstract([$A,42.0], [{encoding,utf8}]),
+ erl_parse_abstract([$A,42.0], [{encoding,utf8}]),
ok.
otp_11807(doc) ->
@@ -1187,29 +1199,72 @@ otp_11807(suite) ->
[];
otp_11807(Config) when is_list(Config) ->
{cons,0,{integer,0,97},{cons,0,{integer,0,98},{nil,0}}} =
- erl_parse:abstract("ab", [{encoding,none}]),
+ erl_parse_abstract("ab", [{encoding,none}]),
{cons,0,{integer,0,-1},{nil,0}} =
- erl_parse:abstract([-1], [{encoding,latin1}]),
+ erl_parse_abstract([-1], [{encoding,latin1}]),
ASCII = fun(I) -> I >= 0 andalso I < 128 end,
- {string,0,"xyz"} = erl_parse:abstract("xyz", [{encoding,ASCII}]),
+ {string,0,"xyz"} = erl_parse_abstract("xyz", [{encoding,ASCII}]),
{cons,0,{integer,0,228},{nil,0}} =
- erl_parse:abstract([228], [{encoding,ASCII}]),
+ erl_parse_abstract([228], [{encoding,ASCII}]),
{cons,0,{integer,0,97},{atom,0,a}} =
- erl_parse:abstract("a"++a, [{encoding,latin1}]),
+ erl_parse_abstract("a"++a, [{encoding,latin1}]),
{'EXIT', {{badarg,bad},_}} = % minor backward incompatibility
(catch erl_parse:abstract("string", [{encoding,bad}])),
ok.
test_string(String, ExpectedWithCol) ->
- {ok, ExpectedWithCol, _EndWithCol} = erl_scan:string(String, {1, 1}, []),
+ {ok, ExpectedWithCol, _EndWithCol} = erl_scan_string(String, {1, 1}, []),
Expected = [ begin
{L,_C} = element(2, T),
setelement(2, T, L)
end
|| T <- ExpectedWithCol ],
- {ok, Expected, _End} = erl_scan:string(String),
+ {ok, Expected, _End} = erl_scan_string(String),
test(String).
+erl_scan_string(String) ->
+ erl_scan_string(String, 1, []).
+
+erl_scan_string(String, StartLocation) ->
+ erl_scan_string(String, StartLocation, []).
+
+erl_scan_string(String, StartLocation, Options) ->
+ case erl_scan:string(String, StartLocation, Options) of
+ {ok, Tokens, EndLocation} ->
+ {ok, unopaque_tokens(Tokens), EndLocation};
+ Else ->
+ Else
+ end.
+
+erl_scan_tokens(C, S, L) ->
+ erl_scan_tokens(C, S, L, []).
+
+erl_scan_tokens(C, S, L, O) ->
+ case erl_scan:tokens(C, S, L, O) of
+ {done, {ok, Ts, End}, R} ->
+ {done, {ok, unopaque_tokens(Ts), End}, R};
+ Else ->
+ Else
+ end.
+
+unopaque_tokens([]) ->
+ [];
+unopaque_tokens([Token|Tokens]) ->
+ Attrs = element(2, Token),
+ Term = erl_anno:to_term(Attrs),
+ T = setelement(2, Token, Term),
+ [T | unopaque_tokens(Tokens)].
+
+erl_parse_abstract(Term) ->
+ erl_parse_abstract(Term, []).
+
+erl_parse_abstract(Term, Options) ->
+ Abstr = erl_parse:abstract(Term, Options),
+ unopaque_abstract(Abstr).
+
+unopaque_abstract(Abstr) ->
+ erl_parse:anno_to_term(Abstr).
+
%% test_string(String, Expected, StartLocation, Options) ->
%% {ok, Expected, _End} = erl_scan:string(String, StartLocation, Options),
%% test(String).
@@ -1359,7 +1414,7 @@ select_tokens(Tokens, Tags) ->
simplify([Token|Tokens]) ->
{line,Line} = erl_scan:token_info(Token, line),
- [setelement(2, Token, Line) | simplify(Tokens)];
+ [setelement(2, Token, erl_anno:new(Line)) | simplify(Tokens)];
simplify([]) ->
[].
diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl
index 8dc8b2c291..fff6b11a38 100644
--- a/lib/stdlib/test/ets_SUITE.erl
+++ b/lib/stdlib/test/ets_SUITE.erl
@@ -47,6 +47,7 @@
-export([ordered/1, ordered_match/1, interface_equality/1,
fixtable_next/1, fixtable_insert/1, rename/1, rename_unnamed/1, evil_rename/1,
update_element/1, update_counter/1, evil_update_counter/1, partly_bound/1, match_heavy/1]).
+-export([update_counter_with_default/1]).
-export([member/1]).
-export([memory/1]).
-export([select_fail/1]).
@@ -77,6 +78,7 @@
-export([otp_10182/1]).
-export([ets_all/1]).
-export([memory_check_summary/1]).
+-export([take/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
%% Convenience for manual testing
@@ -98,7 +100,7 @@
misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1,
heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1,
do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2,
- types_do/1, sleeper/0, memory_do/1,
+ types_do/1, sleeper/0, memory_do/1, update_counter_with_default_do/1,
ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4
]).
@@ -135,7 +137,8 @@ all() ->
{group, heavy}, ordered, ordered_match,
interface_equality, fixtable_next, fixtable_insert,
rename, rename_unnamed, evil_rename, update_element,
- update_counter, evil_update_counter, partly_bound,
+ update_counter, evil_update_counter,
+ update_counter_with_default, partly_bound,
match_heavy, {group, fold}, member, t_delete_object,
t_init_table, t_whitebox, t_delete_all_objects,
t_insert_list, t_test_ms, t_select_delete, t_ets_dets,
@@ -153,6 +156,7 @@ all() ->
otp_9932,
otp_9423,
ets_all,
+ take,
memory_check_summary]. % MUST BE LAST
@@ -1381,7 +1385,7 @@ random_test() ->
{ok,[X]} ->
X;
_ ->
- {A,B,C} = erlang:now(),
+ {A,B,C} = erlang:timestamp(),
random:seed(A,B,C),
get(random_seed)
end,
@@ -1759,6 +1763,14 @@ update_counter_do(Opts) ->
OrdSet = ets_new(ordered_set,[ordered_set | Opts]),
update_counter_for(Set),
update_counter_for(OrdSet),
+ ets:delete_all_objects(Set),
+ ets:delete_all_objects(OrdSet),
+ ets:safe_fixtable(Set, true),
+ ets:safe_fixtable(OrdSet, true),
+ update_counter_for(Set),
+ update_counter_for(OrdSet),
+ ets:safe_fixtable(Set, false),
+ ets:safe_fixtable(OrdSet, false),
ets:delete(Set),
ets:delete(OrdSet),
update_counter_neg(Opts).
@@ -1778,10 +1790,14 @@ update_counter_for(T) ->
?line {NewObj, Ret} = uc_mimic(Obj,Arg3),
ArgHash = erlang:phash2({T,a,Arg3}),
%%io:format("update_counter(~p, ~p, ~p) expecting ~p\n",[T,a,Arg3,Ret]),
+ [DefaultObj] = ets:lookup(T, a),
?line Ret = ets:update_counter(T,a,Arg3),
+ Ret = ets:update_counter(T, b, Arg3, DefaultObj), % Use other key
?line ArgHash = erlang:phash2({T,a,Arg3}),
%%io:format("NewObj=~p~n ",[NewObj]),
?line [NewObj] = ets:lookup(T,a),
+ true = ets:lookup(T, b) =:= [setelement(1, NewObj, b)],
+ ets:delete(T, b),
Myself(NewObj,Times-1,Arg3,Myself)
end,
@@ -2006,6 +2022,44 @@ evil_counter_1(Iter, T) ->
ets:update_counter(T, dracula, 1),
evil_counter_1(Iter-1, T).
+update_counter_with_default(Config) when is_list(Config) ->
+ repeat_for_opts(update_counter_with_default_do).
+
+update_counter_with_default_do(Opts) ->
+ T1 = ets_new(a, [set | Opts]),
+ %% Insert default object.
+ 3 = ets:update_counter(T1, foo, 2, {beaufort,1}),
+ %% Increment.
+ 5 = ets:update_counter(T1, foo, 2, {cabecou,1}),
+ %% Increment with list.
+ [9] = ets:update_counter(T1, foo, [{2,4}], {camembert,1}),
+ %% Same with non-immediate key.
+ 3 = ets:update_counter(T1, {foo,bar}, 2, {{chaource,chevrotin},1}),
+ 5 = ets:update_counter(T1, {foo,bar}, 2, {{cantal,comté},1}),
+ [9] = ets:update_counter(T1, {foo,bar}, [{2,4}], {{emmental,de,savoie},1}),
+ %% Same with ordered set.
+ T2 = ets_new(b, [ordered_set | Opts]),
+ 3 = ets:update_counter(T2, foo, 2, {maroilles,1}),
+ 5 = ets:update_counter(T2, foo, 2, {mimolette,1}),
+ [9] = ets:update_counter(T2, foo, [{2,4}], {morbier,1}),
+ 3 = ets:update_counter(T2, {foo,bar}, 2, {{laguiole},1}),
+ 5 = ets:update_counter(T2, {foo,bar}, 2, {{saint,nectaire},1}),
+ [9] = ets:update_counter(T2, {foo,bar}, [{2,4}], {{rocamadour},1}),
+ %% Arithmetically-equal keys.
+ 3 = ets:update_counter(T2, 1.0, 2, {1,1}),
+ 5 = ets:update_counter(T2, 1, 2, {1,1}),
+ 7 = ets:update_counter(T2, 1, 2, {1.0,1}),
+ %% Same with reversed type difference.
+ 3 = ets:update_counter(T2, 2, 2, {2.0,1}),
+ 5 = ets:update_counter(T2, 2.0, 2, {2.0,1}),
+ 7 = ets:update_counter(T2, 2.0, 2, {2,1}),
+ %% bar is not an integer.
+ {'EXIT',{badarg,_}} = (catch ets:update_counter(T1, qux, 3, {saint,félicien})),
+ %% No third element in default value.
+ {'EXIT',{badarg,_}} = (catch ets:update_counter(T1, qux, [{3,1}], {roquefort,1})),
+
+ ok.
+
fixtable_next(doc) ->
["Check that a first-next sequence always works on a fixed table"];
fixtable_next(suite) ->
@@ -3007,13 +3061,13 @@ time_lookup(Config) when is_list(Config) ->
"~p ets lookups/s",[Values]))}.
time_lookup_do(Opts) ->
- ?line Tab = ets_new(foo,Opts),
- ?line fill_tab(Tab,foo),
- ?line ets:insert(Tab,{{a,key},foo}),
- ?line {Time,_} = ?t:timecall(test_server,do_times,
- [10000,ets,lookup,[Tab,{a,key}]]),
- ?line true = ets:delete(Tab),
- round(10000 / Time). % lookups/s
+ Tab = ets_new(foo,Opts),
+ fill_tab(Tab,foo),
+ ets:insert(Tab,{{a,key},foo}),
+ {Time,_} = ?t:timecall(test_server,do_times,
+ [100000,ets,lookup,[Tab,{a,key}]]),
+ true = ets:delete(Tab),
+ round(100000 / Time). % lookups/s
badlookup(doc) ->
["Check proper return values from bad lookups in existing/non existing "
@@ -3487,12 +3541,9 @@ verify_rescheduling_exit(Config, ForEachData, Flags, Fix, NOTabs, NOProcs) ->
fun () ->
repeat(
fun () ->
- {A, B, C} = now(),
- ?line Name = list_to_atom(
- TestCase
- ++ "-" ++ integer_to_list(A)
- ++ "-" ++ integer_to_list(B)
- ++ "-" ++ integer_to_list(C)),
+ Uniq = erlang:unique_integer([positive]),
+ Name = list_to_atom(TestCase ++ "-" ++
+ integer_to_list(Uniq)),
Tab = ets_new(Name, Flags),
ForEachData(fun(Data) -> ets:insert(Tab, Data) end),
case Fix of
@@ -3770,41 +3821,99 @@ match_object(Config) when is_list(Config) ->
repeat_for_opts(match_object_do).
match_object_do(Opts) ->
- ?line EtsMem = etsmem(),
- ?line Tab = ets_new(foobar, Opts),
- ?line fill_tab(Tab, foo),
- ?line ets:insert(Tab, {{one, 4}, 4}),
- ?line ets:insert(Tab,{{one,5},5}),
- ?line ets:insert(Tab,{{two,4},4}),
- ?line ets:insert(Tab,{{two,5},6}),
- ?line case ets:match_object(Tab, {{one, '_'}, '$0'}) of
+ EtsMem = etsmem(),
+ Tab = ets_new(foobar, Opts),
+ fill_tab(Tab, foo),
+ ets:insert(Tab,{{one,4},4}),
+ ets:insert(Tab,{{one,5},5}),
+ ets:insert(Tab,{{two,4},4}),
+ ets:insert(Tab,{{two,5},6}),
+ ets:insert(Tab, {#{camembert=>cabécou},7}),
+ ets:insert(Tab, {#{"hi"=>"hello","wazzup"=>"awesome","1337"=>"42"},8}),
+ ets:insert(Tab, {#{"hi"=>"hello",#{"wazzup"=>3}=>"awesome","1337"=>"42"},9}),
+ ets:insert(Tab, {#{"hi"=>"hello","wazzup"=>#{"awesome"=>3},"1337"=>"42"},10}),
+ Is = lists:seq(1,100),
+ M1 = maps:from_list([{I,I}||I <- Is]),
+ M2 = maps:from_list([{I,"hi"}||I <- Is]),
+ ets:insert(Tab, {M1,11}),
+ ets:insert(Tab, {M2,12}),
+
+ case ets:match_object(Tab, {{one, '_'}, '$0'}) of
[{{one,5},5},{{one,4},4}] -> ok;
[{{one,4},4},{{one,5},5}] -> ok;
_ -> ?t:fail("ets:match_object() returned something funny.")
end,
- ?line case ets:match_object(Tab, {{two, '$1'}, '$0'}) of
+ case ets:match_object(Tab, {{two, '$1'}, '$0'}) of
[{{two,5},6},{{two,4},4}] -> ok;
[{{two,4},4},{{two,5},6}] -> ok;
_ -> ?t:fail("ets:match_object() returned something funny.")
end,
- ?line case ets:match_object(Tab, {{two, '$9'}, '$4'}) of
+ case ets:match_object(Tab, {{two, '$9'}, '$4'}) of
[{{two,5},6},{{two,4},4}] -> ok;
[{{two,4},4},{{two,5},6}] -> ok;
_ -> ?t:fail("ets:match_object() returned something funny.")
end,
- ?line case ets:match_object(Tab, {{two, '$9'}, '$22'}) of
+ case ets:match_object(Tab, {{two, '$9'}, '$22'}) of
[{{two,5},6},{{two,4},4}] -> ok;
[{{two,4},4},{{two,5},6}] -> ok;
_ -> ?t:fail("ets:match_object() returned something funny.")
end,
- % Check that unsucessful match returns an empty list.
- ?line [] = ets:match_object(Tab, {{three,'$0'}, '$92'}),
+
+ % Check that maps are inspected for variables.
+ [{#{camembert:=cabécou},7}] = ets:match_object(Tab, {#{camembert=>'_'},7}),
+
+ [{#{"hi":="hello",#{"wazzup"=>3}:="awesome","1337":="42"},9}] =
+ ets:match_object(Tab, {#{#{"wazzup"=>3}=>"awesome","hi"=>"hello","1337"=>"42"},9}),
+ [{#{"hi":="hello",#{"wazzup"=>3}:="awesome","1337":="42"},9}] =
+ ets:match_object(Tab, {#{#{"wazzup"=>3}=>"awesome","hi"=>"hello","1337"=>'_'},'_'}),
+ [{#{"hi":="hello","wazzup":=#{"awesome":=3},"1337":="42"},10}] =
+ ets:match_object(Tab, {#{"wazzup"=>'_',"hi"=>'_',"1337"=>'_'},10}),
+
+ %% multiple patterns
+ Pat = {{#{#{"wazzup"=>3}=>"awesome","hi"=>"hello","1337"=>'_'},'$1'},[{is_integer,'$1'}],['$_']},
+ [{#{"hi":="hello",#{"wazzup"=>3}:="awesome","1337":="42"},9}] =
+ ets:select(Tab, [Pat,Pat,Pat,Pat]),
+ case ets:match_object(Tab, {#{"hi"=>"hello","wazzup"=>'_',"1337"=>"42"},'_'}) of
+ [{#{"1337" := "42","hi" := "hello","wazzup" := "awesome"},8},
+ {#{"1337" := "42","hi" := "hello","wazzup" := #{"awesome" := 3}},10}] -> ok;
+ [{#{"1337" := "42","hi" := "hello","wazzup" := #{"awesome" := 3}},10},
+ {#{"1337" := "42","hi" := "hello","wazzup" := "awesome"},8}] -> ok;
+ _ -> ?t:fail("ets:match_object() returned something funny.")
+ end,
+ case ets:match_object(Tab, {#{"hi"=>'_'},'_'}) of
+ [{#{"1337":="42", "hi":="hello"},_},
+ {#{"1337":="42", "hi":="hello"},_},
+ {#{"1337":="42", "hi":="hello"},_}] -> ok;
+ _ -> ?t:fail("ets:match_object() returned something funny.")
+ end,
+
+ %% match large maps
+ [{#{1:=1,2:=2,99:=99,100:=100},11}] = ets:match_object(Tab, {M1,11}),
+ [{#{1:="hi",2:="hi",99:="hi",100:="hi"},12}] = ets:match_object(Tab, {M2,12}),
+ case ets:match_object(Tab, {#{1=>'_',2=>'_'},'_'}) of
+ %% only match a part of the map
+ [{#{1:=1,5:=5,99:=99,100:=100},11},{#{1:="hi",6:="hi",99:="hi"},12}] -> ok;
+ [{#{1:="hi",2:="hi",59:="hi"},12},{#{1:=1,2:=2,39:=39,100:=100},11}] -> ok;
+ _ -> ?t:fail("ets:match_object() returned something funny.")
+ end,
+ case ets:match_object(Tab, {maps:from_list([{I,'_'}||I<-Is]),'_'}) of
+ %% only match a part of the map
+ [{#{1:=1,5:=5,99:=99,100:=100},11},{#{1:="hi",6:="hi",99:="hi"},12}] -> ok;
+ [{#{1:="hi",2:="hi",59:="hi"},12},{#{1:=1,2:=2,39:=39,100:=100},11}] -> ok;
+ _ -> ?t:fail("ets:match_object() returned something funny.")
+ end,
+ {'EXIT',{badarg,_}} = (catch ets:match_object(Tab, {#{'$1'=>'_'},7})),
+ Mve = maps:from_list([{list_to_atom([$$|integer_to_list(I)]),'_'}||I<-Is]),
+ {'EXIT',{badarg,_}} = (catch ets:match_object(Tab, {Mve,11})),
+
+ % Check that unsuccessful match returns an empty list.
+ [] = ets:match_object(Tab, {{three,'$0'}, '$92'}),
% Check that '$0' equals '_'.
Len = length(ets:match_object(Tab, '$0')),
Len = length(ets:match_object(Tab, '_')),
- ?line if Len > 4 -> ok end,
- ?line true = ets:delete(Tab),
- ?line verify_etsmem(EtsMem).
+ if Len > 4 -> ok end,
+ true = ets:delete(Tab),
+ verify_etsmem(EtsMem).
match_object2(suite) -> [];
match_object2(doc) -> ["Tests that db_match_object does not generate "
@@ -3969,12 +4078,22 @@ tab2file(doc) -> ["Check the ets:tab2file function on an empty "
"ets table."];
tab2file(suite) -> [];
tab2file(Config) when is_list(Config) ->
+ ?line FName = filename:join([?config(priv_dir, Config),"tab2file_case"]),
+ tab2file_do(FName, []),
+ tab2file_do(FName, [{sync,true}]),
+ tab2file_do(FName, [{sync,false}]),
+ {'EXIT',{{badmatch,{error,_}},_}} = (catch tab2file_do(FName, [{sync,yes}])),
+ {'EXIT',{{badmatch,{error,_}},_}} = (catch tab2file_do(FName, [sync])),
+ ok.
+
+tab2file_do(FName, Opts) ->
%% Write an empty ets table to a file, read back and check properties.
?line Tab = ets_new(ets_SUITE_foo_tab, [named_table, set, private,
{keypos, 2}]),
- ?line FName = filename:join([?config(priv_dir, Config),"tab2file_case"]),
- ?line ok = ets:tab2file(Tab, FName),
- ?line true = ets:delete(Tab),
+ catch file:delete(FName),
+ Res = ets:tab2file(Tab, FName, Opts),
+ true = ets:delete(Tab),
+ ok = Res,
%
?line EtsMem = etsmem(),
?line {ok, Tab2} = ets:file2tab(FName),
@@ -3984,6 +4103,7 @@ tab2file(Config) when is_list(Config) ->
?line set = ets:info(Tab2, type),
?line true = ets:delete(Tab2),
?line verify_etsmem(EtsMem).
+
tab2file2(doc) -> ["Check the ets:tab2file function on a ",
"filled set/bag type ets table."];
@@ -4493,16 +4613,16 @@ build_table2(L1,L2,Num) ->
T.
time_match_object(Tab,Match, Res) ->
- T1 = erlang:now(),
+ T1 = erlang:monotonic_time(micro_seconds),
Res = ets:match_object(Tab,Match),
- T2 = erlang:now(),
- nowdiff(T1,T2).
+ T2 = erlang:monotonic_time(micro_seconds),
+ T2 - T1.
time_match(Tab,Match) ->
- T1 = erlang:now(),
+ T1 = erlang:monotonic_time(micro_seconds),
ets:match(Tab,Match),
- T2 = erlang:now(),
- nowdiff(T1,T2).
+ T2 = erlang:monotonic_time(micro_seconds),
+ T2 - T1.
seventyfive_percent_success(_,S,Fa,0) ->
true = (S > ((S + Fa) * 0.75));
@@ -4527,11 +4647,6 @@ fifty_percent_success({M,F,A},S,Fa,N) ->
end.
-nowtonumber({Mega, Secs, Milli}) ->
- Milli + Secs * 1000000 + Mega * 1000000000000.
-nowdiff(T1,T2) ->
- nowtonumber(T2) - nowtonumber(T1).
-
create_random_string(0) ->
[];
@@ -5000,36 +5115,40 @@ colliding_names(Name) ->
grow_shrink(Config) when is_list(Config) ->
?line EtsMem = etsmem(),
- ?line grow_shrink_0(lists:seq(3071, 5000), EtsMem),
- ?line verify_etsmem(EtsMem).
-grow_shrink_0([N|Ns], EtsMem) ->
- ?line grow_shrink_1(N, [set]),
- ?line grow_shrink_1(N, [ordered_set]),
- %% Verifying ets-memory here takes too long time, since
- %% lock-free allocators were introduced...
- %% ?line verify_etsmem(EtsMem),
- grow_shrink_0(Ns, EtsMem);
-grow_shrink_0([], _) -> ok.
-
-grow_shrink_1(N, Flags) ->
- ?line T = ets_new(a, Flags),
- ?line grow_shrink_2(N, N, T),
- ?line ets:delete(T).
+ Set = ets_new(a, [set]),
+ grow_shrink_0(0, 3071, 3000, 5000, Set),
+ ets:delete(Set),
-grow_shrink_2(0, Orig, T) ->
- List = [{I,a} || I <- lists:seq(1, Orig)],
- List = lists:sort(ets:tab2list(T)),
- grow_shrink_3(Orig, T);
-grow_shrink_2(N, Orig, T) ->
+ %OrdSet = ets_new(a, [ordered_set]),
+ %grow_shrink_0(0, lists:seq(3071, 5000), OrdSet),
+ %ets:delete(OrdSet),
+
+ ?line verify_etsmem(EtsMem).
+
+grow_shrink_0(N, _, _, Max, _) when N >= Max ->
+ ok;
+grow_shrink_0(N0, GrowN, ShrinkN, Max, T) ->
+ N1 = grow_shrink_1(N0, GrowN, ShrinkN, T),
+ grow_shrink_0(N1, GrowN, ShrinkN, Max, T).
+
+grow_shrink_1(N0, GrowN, ShrinkN, T) ->
+ N1 = grow_shrink_2(N0+1, N0 + GrowN, T),
+ grow_shrink_3(N1, N1 - ShrinkN, T).
+
+grow_shrink_2(N, GrowTo, _) when N > GrowTo ->
+ %io:format("Grown to ~p\n", [GrowTo]),
+ GrowTo;
+grow_shrink_2(N, GrowTo, T) ->
true = ets:insert(T, {N,a}),
- grow_shrink_2(N-1, Orig, T).
+ grow_shrink_2(N+1, GrowTo, T).
-grow_shrink_3(0, T) ->
- [] = ets:tab2list(T);
-grow_shrink_3(N, T) ->
+grow_shrink_3(N, ShrinkTo, _) when N =< ShrinkTo ->
+ %io:format("Shrunk to ~p\n", [ShrinkTo]),
+ ShrinkTo;
+grow_shrink_3(N, ShrinkTo, T) ->
true = ets:delete(T, N),
- grow_shrink_3(N-1, T).
+ grow_shrink_3(N-1, ShrinkTo, T).
grow_pseudo_deleted(doc) -> ["Grow a table that still contains pseudo-deleted objects"];
grow_pseudo_deleted(suite) -> [];
@@ -5055,17 +5174,29 @@ grow_pseudo_deleted_do(Type) ->
?line Left = ets:info(T,size),
?line Mult = get_kept_objects(T),
filltabstr(T,Mult),
- my_spawn_opt(fun()-> ?line true = ets:info(T,fixed),
- Self ! start,
- io:format("Starting to filltabstr... ~p\n",[now()]),
- filltabstr(T,Mult,Mult+10000),
- io:format("Done with filltabstr. ~p\n",[now()]),
- Self ! done
- end, [link, {scheduler,2}]),
+ my_spawn_opt(
+ fun() ->
+ true = ets:info(T,fixed),
+ Self ! start,
+ io:put_chars("Starting to filltabstr...\n"),
+ do_tc(fun() ->
+ filltabstr(T, Mult, Mult+10000)
+ end,
+ fun(Elapsed) ->
+ io:format("Done with filltabstr in ~p ms\n",
+ [Elapsed])
+ end),
+ Self ! done
+ end, [link, {scheduler,2}]),
?line start = receive_any(),
- io:format("Unfixing table...~p nitems=~p\n",[now(),ets:info(T,size)]),
- ?line true = ets:safe_fixtable(T,false),
- io:format("Unfix table done. ~p nitems=~p\n",[now(),ets:info(T,size)]),
+ io:format("Unfixing table... nitems=~p\n", [ets:info(T, size)]),
+ do_tc(fun() ->
+ true = ets:safe_fixtable(T, false)
+ end,
+ fun(Elapsed) ->
+ io:format("Unfix table done in ~p ms. nitems=~p\n",
+ [Elapsed,ets:info(T, size)])
+ end),
?line false = ets:info(T,fixed),
?line 0 = get_kept_objects(T),
?line done = receive_any(),
@@ -5095,17 +5226,28 @@ shrink_pseudo_deleted_do(Type) ->
[true]}]),
?line Half = ets:info(T,size),
?line Half = get_kept_objects(T),
- my_spawn_opt(fun()-> ?line true = ets:info(T,fixed),
- Self ! start,
- io:format("Starting to delete... ~p\n",[now()]),
- del_one_by_one_set(T,1,Half+1),
- io:format("Done with delete. ~p\n",[now()]),
- Self ! done
- end, [link, {scheduler,2}]),
+ my_spawn_opt(
+ fun()-> true = ets:info(T,fixed),
+ Self ! start,
+ io:put_chars("Starting to delete... ~p\n"),
+ do_tc(fun() ->
+ del_one_by_one_set(T, 1, Half+1)
+ end,
+ fun(Elapsed) ->
+ io:format("Done with delete in ~p ms.\n",
+ [Elapsed])
+ end),
+ Self ! done
+ end, [link, {scheduler,2}]),
?line start = receive_any(),
- io:format("Unfixing table...~p nitems=~p\n",[now(),ets:info(T,size)]),
- ?line true = ets:safe_fixtable(T,false),
- io:format("Unfix table done. ~p nitems=~p\n",[now(),ets:info(T,size)]),
+ io:format("Unfixing table... nitems=~p\n", [ets:info(T, size)]),
+ do_tc(fun() ->
+ true = ets:safe_fixtable(T, false)
+ end,
+ fun(Elapsed) ->
+ io:format("Unfix table done in ~p ms. nitems=~p\n",
+ [Elapsed,ets:info(T, size)])
+ end),
?line false = ets:info(T,fixed),
?line 0 = get_kept_objects(T),
?line done = receive_any(),
@@ -5258,30 +5400,42 @@ smp_unfix_fix_do() ->
?line Deleted = get_kept_objects(T),
{Child, Mref} =
- my_spawn_opt(fun()-> ?line true = ets:info(T,fixed),
- Parent ! start,
- io:format("Child waiting for table to be unfixed... now=~p mem=~p\n",
- [now(),ets:info(T,memory)]),
- repeat_while(fun()-> ets:info(T,fixed) end),
- io:format("Table unfixed. Child Fixating! now=~p mem=~p\n",
- [now(),ets:info(T,memory)]),
- ?line true = ets:safe_fixtable(T,true),
- repeat_while(fun(Key) when Key =< NumOfObjs ->
- ets:delete(T,Key), {true,Key+1};
- (Key) -> {false,Key}
- end,
- Deleted),
- ?line 0 = ets:info(T,size),
- ?line true = get_kept_objects(T) >= Left,
- ?line done = receive_any()
- end,
- [link, monitor, {scheduler,2}]),
+ my_spawn_opt(
+ fun()->
+ true = ets:info(T,fixed),
+ Parent ! start,
+ io:format("Child waiting for table to be unfixed... mem=~p\n",
+ [ets:info(T, memory)]),
+ do_tc(fun() ->
+ repeat_while(fun()-> ets:info(T, fixed) end)
+ end,
+ fun(Elapsed) ->
+ io:format("Table unfixed in ~p ms."
+ " Child Fixating! mem=~p\n",
+ [Elapsed,ets:info(T,memory)])
+ end),
+ true = ets:safe_fixtable(T,true),
+ repeat_while(fun(Key) when Key =< NumOfObjs ->
+ ets:delete(T,Key), {true,Key+1};
+ (Key) -> {false,Key}
+ end,
+ Deleted),
+ 0 = ets:info(T,size),
+ true = get_kept_objects(T) >= Left,
+ done = receive_any()
+ end,
+ [link, monitor, {scheduler,2}]),
?line start = receive_any(),
?line true = ets:info(T,fixed),
- io:format("Parent starting to unfix... ~p\n",[now()]),
- ets:safe_fixtable(T,false),
- io:format("Parent done with unfix. ~p\n",[now()]),
+ io:put_chars("Parent starting to unfix... ~p\n"),
+ do_tc(fun() ->
+ ets:safe_fixtable(T, false)
+ end,
+ fun(Elapsed) ->
+ io:format("Parent done with unfix in ~p ms.\n",
+ [Elapsed])
+ end),
Child ! done,
{'DOWN', Mref, process, Child, normal} = receive_any(),
?line false = ets:info(T,fixed),
@@ -5582,6 +5736,43 @@ ets_all_run() ->
ets_all_run().
+take(Config) when is_list(Config) ->
+ %% Simple test for set tables.
+ T1 = ets_new(a, [set]),
+ [] = ets:take(T1, foo),
+ ets:insert(T1, {foo,bar}),
+ [] = ets:take(T1, bar),
+ [{foo,bar}] = ets:take(T1, foo),
+ [] = ets:tab2list(T1),
+ %% Non-immediate key.
+ ets:insert(T1, {{'not',<<"immediate">>},ok}),
+ [{{'not',<<"immediate">>},ok}] = ets:take(T1, {'not',<<"immediate">>}),
+ %% Same with ordered tables.
+ T2 = ets_new(b, [ordered_set]),
+ [] = ets:take(T2, foo),
+ ets:insert(T2, {foo,bar}),
+ [] = ets:take(T2, bar),
+ [{foo,bar}] = ets:take(T2, foo),
+ [] = ets:tab2list(T2),
+ ets:insert(T2, {{'not',<<"immediate">>},ok}),
+ [{{'not',<<"immediate">>},ok}] = ets:take(T2, {'not',<<"immediate">>}),
+ %% Arithmetically-equal keys.
+ ets:insert(T2, [{1.0,float},{2,integer}]),
+ [{1.0,float}] = ets:take(T2, 1),
+ [{2,integer}] = ets:take(T2, 2.0),
+ [] = ets:tab2list(T2),
+ %% Same with bag.
+ T3 = ets_new(c, [bag]),
+ ets:insert(T3, [{1,1},{1,2},{3,3}]),
+ [{1,1},{1,2}] = ets:take(T3, 1),
+ [{3,3}] = ets:take(T3, 3),
+ [] = ets:tab2list(T3),
+ ets:delete(T1),
+ ets:delete(T2),
+ ets:delete(T3),
+ ok.
+
+
%
% Utility functions:
%
@@ -6246,3 +6437,10 @@ repeat_for_opts_atom2list(compressed) -> [compressed,void].
ets_new(Name, Opts) ->
%%ets:new(Name, [compressed | Opts]).
ets:new(Name, Opts).
+
+do_tc(Do, Report) ->
+ T1 = erlang:monotonic_time(),
+ Do(),
+ T2 = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(T2 - T1, native, milli_seconds),
+ Report(Elapsed).
diff --git a/lib/stdlib/test/filename_SUITE.erl b/lib/stdlib/test/filename_SUITE.erl
index 6f1d1a891d..70e7ad9788 100644
--- a/lib/stdlib/test/filename_SUITE.erl
+++ b/lib/stdlib/test/filename_SUITE.erl
@@ -395,6 +395,8 @@ split(Config) when is_list(Config) ->
?line ["foo", "bar", "hello"]= filename:split("foo////bar//hello"),
?line ["foo", "bar", "hello"]= filename:split(["foo//",'//bar//h',"ello"]),
?line ["foo", "bar", "hello"]= filename:split(["foo//",'//bar//h'|ello]),
+ ["/"] = filename:split("/"),
+ [] = filename:split(""),
case os:type() of
{win32,_} ->
?line ["a:/","msdev","include"] =
@@ -767,6 +769,8 @@ split_bin(Config) when is_list(Config) ->
[<<"/">>,<<"usr">>,<<"local">>,<<"bin">>] = filename:split(<<"/usr/local/bin">>),
[<<"foo">>,<<"bar">>]= filename:split(<<"foo/bar">>),
[<<"foo">>, <<"bar">>, <<"hello">>]= filename:split(<<"foo////bar//hello">>),
+ [<<"/">>] = filename:split(<<"/">>),
+ [] = filename:split(<<"">>),
case os:type() of
{win32,_} ->
[<<"a:/">>,<<"msdev">>,<<"include">>] =
diff --git a/lib/stdlib/test/gen_event_SUITE.erl b/lib/stdlib/test/gen_event_SUITE.erl
index 576a5adfce..6c28eb00c3 100644
--- a/lib/stdlib/test/gen_event_SUITE.erl
+++ b/lib/stdlib/test/gen_event_SUITE.erl
@@ -131,90 +131,105 @@ start(Config) when is_list(Config) ->
ok.
-hibernate(suite) -> [];
hibernate(Config) when is_list(Config) ->
- ?line {ok,Pid} = gen_event:start({local, my_dummy_handler}),
- ?line ok = gen_event:add_handler(my_dummy_handler, dummy_h, [self()]),
- ?line [dummy_h] = gen_event:which_handlers(my_dummy_handler),
- ?line true = gen_event:call(my_dummy_handler, dummy_h, hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line later = gen_event:call(my_dummy_handler, dummy_h, hibernate_later),
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line receive after 2000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line gen_event:notify(my_dummy_handler,hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line gen_event:notify(my_dummy_handler,wakeup),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line gen_event:notify(my_dummy_handler,hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line gen_event:sync_notify(my_dummy_handler,wakeup),
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line ok = gen_event:sync_notify(my_dummy_handler,hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line ok = gen_event:add_handler(my_dummy_handler, dummy1_h, [self()]),
- ?line [_,_] = gen_event:which_handlers(my_dummy_handler),
- ?line gen_event:notify(my_dummy_handler,hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line gen_event:notify(my_dummy_handler,wakeup),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line Pid ! gnurf,
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! sleep,
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Pid ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid,current_function)),
- ?line ok = gen_event:stop(my_dummy_handler),
- ?line {ok,Pid2} = gen_event:start({local, my_dummy_handler}),
- ?line ok = gen_event:add_handler(my_dummy_handler, dummy_h, [self(),hibernate]),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid2,current_function),
- ?line sys:suspend(my_dummy_handler),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid2,current_function),
- ?line sys:resume(my_dummy_handler),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid2,current_function),
- ?line Pid2 ! wake,
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/=
- erlang:process_info(Pid2,current_function)),
+ {ok,Pid} = gen_event:start({local, my_dummy_handler}),
+ ok = gen_event:add_handler(my_dummy_handler, dummy_h, [self()]),
+ [dummy_h] = gen_event:which_handlers(my_dummy_handler),
+ true = gen_event:call(my_dummy_handler, dummy_h, hibernate),
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! wake,
+ is_not_in_erlang_hibernate(Pid),
+ later = gen_event:call(my_dummy_handler, dummy_h, hibernate_later),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! wake,
+ is_not_in_erlang_hibernate(Pid),
+ gen_event:notify(my_dummy_handler, hibernate),
+ is_in_erlang_hibernate(Pid),
+ gen_event:notify(my_dummy_handler, wakeup),
+ is_not_in_erlang_hibernate(Pid),
+ gen_event:notify(my_dummy_handler, hibernate),
+ is_in_erlang_hibernate(Pid),
+ gen_event:sync_notify(my_dummy_handler, wakeup),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ ok = gen_event:sync_notify(my_dummy_handler, hibernate),
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! wake,
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_event:add_handler(my_dummy_handler, dummy1_h, [self()]),
+ [_,_] = gen_event:which_handlers(my_dummy_handler),
+ gen_event:notify(my_dummy_handler, hibernate),
+ is_in_erlang_hibernate(Pid),
+ gen_event:notify(my_dummy_handler, wakeup),
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! wake,
+ is_not_in_erlang_hibernate(Pid),
+
+ Pid ! gnurf,
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! sleep,
+ is_in_erlang_hibernate(Pid),
+
+ Pid ! wake,
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_event:stop(my_dummy_handler),
+
+ {ok,Pid2} = gen_event:start({local, my_dummy_handler}),
+ ok = gen_event:add_handler(my_dummy_handler, dummy_h,
+ [self(),hibernate]),
+ is_in_erlang_hibernate(Pid2),
+ sys:suspend(my_dummy_handler),
+ is_in_erlang_hibernate(Pid2),
+ sys:resume(my_dummy_handler),
+ is_in_erlang_hibernate(Pid2),
+
+ Pid2 ! wake,
+ is_not_in_erlang_hibernate(Pid2),
-
- ?line ok = gen_event:stop(my_dummy_handler),
+ ok = gen_event:stop(my_dummy_handler),
ok.
+is_in_erlang_hibernate(Pid) ->
+ receive after 1 -> ok end,
+ is_in_erlang_hibernate_1(200, Pid).
+
+is_in_erlang_hibernate_1(0, Pid) ->
+ io:format("~p\n", [erlang:process_info(Pid, current_function)]),
+ ?t:fail(not_in_erlang_hibernate_3);
+is_in_erlang_hibernate_1(N, Pid) ->
+ {current_function,MFA} = erlang:process_info(Pid, current_function),
+ case MFA of
+ {erlang,hibernate,3} ->
+ ok;
+ _ ->
+ receive after 10 -> ok end,
+ is_in_erlang_hibernate_1(N-1, Pid)
+ end.
+
+is_not_in_erlang_hibernate(Pid) ->
+ receive after 1 -> ok end,
+ is_not_in_erlang_hibernate_1(200, Pid).
+
+is_not_in_erlang_hibernate_1(0, Pid) ->
+ io:format("~p\n", [erlang:process_info(Pid, current_function)]),
+ ?t:fail(not_in_erlang_hibernate_3);
+is_not_in_erlang_hibernate_1(N, Pid) ->
+ {current_function,MFA} = erlang:process_info(Pid, current_function),
+ case MFA of
+ {erlang,hibernate,3} ->
+ receive after 10 -> ok end,
+ is_not_in_erlang_hibernate_1(N-1, Pid);
+ _ ->
+ ok
+ end.
add_handler(doc) -> [];
diff --git a/lib/stdlib/test/gen_fsm_SUITE.erl b/lib/stdlib/test/gen_fsm_SUITE.erl
index dabc10aec4..f003630535 100644
--- a/lib/stdlib/test/gen_fsm_SUITE.erl
+++ b/lib/stdlib/test/gen_fsm_SUITE.erl
@@ -596,129 +596,123 @@ replace_state(Config) when is_list(Config) ->
ok.
%% Hibernation
-hibernate(suite) -> [];
hibernate(Config) when is_list(Config) ->
OldFl = process_flag(trap_exit, true),
- ?line {ok, Pid0} = gen_fsm:start_link(?MODULE, hiber_now, []),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid0,current_function),
- ?line stop_it(Pid0),
+ {ok, Pid0} = gen_fsm:start_link(?MODULE, hiber_now, []),
+ is_in_erlang_hibernate(Pid0),
+ stop_it(Pid0),
test_server:messages_get(),
-
- ?line {ok, Pid} = gen_fsm:start_link(?MODULE, hiber, []),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line hibernating = gen_fsm:sync_send_event(Pid,hibernate_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line good_morning = gen_fsm:sync_send_event(Pid,wakeup_sync),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line hibernating = gen_fsm:sync_send_event(Pid,hibernate_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line five_more = gen_fsm:sync_send_event(Pid,snooze_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line good_morning = gen_fsm:sync_send_event(Pid,wakeup_sync),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line ok = gen_fsm:send_event(Pid,hibernate_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_event(Pid,wakeup_async),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line ok = gen_fsm:send_event(Pid,hibernate_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_event(Pid,snooze_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_event(Pid,wakeup_async),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line Pid ! hibernate_later,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line receive after 2000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line 'alive!' = gen_fsm:sync_send_event(Pid,'alive?'),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line Pid ! hibernate_now,
- ?line receive after 1000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line 'alive!' = gen_fsm:sync_send_event(Pid,'alive?'),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
-
-
- ?line hibernating = gen_fsm:sync_send_all_state_event(Pid,hibernate_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line good_morning = gen_fsm:sync_send_all_state_event(Pid,wakeup_sync),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line hibernating = gen_fsm:sync_send_all_state_event(Pid,hibernate_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line five_more = gen_fsm:sync_send_all_state_event(Pid,snooze_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line good_morning = gen_fsm:sync_send_all_state_event(Pid,wakeup_sync),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line ok = gen_fsm:send_all_state_event(Pid,hibernate_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_all_state_event(Pid,wakeup_async),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line ok = gen_fsm:send_all_state_event(Pid,hibernate_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_all_state_event(Pid,snooze_async),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line ok = gen_fsm:send_all_state_event(Pid,wakeup_async),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
-
- ?line hibernating = gen_fsm:sync_send_all_state_event(Pid,hibernate_sync),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line sys:suspend(Pid),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line sys:resume(Pid),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
-
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} =
- erlang:process_info(Pid,current_function),
- ?line good_morning = gen_fsm:sync_send_all_state_event(Pid,wakeup_sync),
- ?line receive after 1000 -> ok end,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line stop_it(Pid),
+ {ok, Pid} = gen_fsm:start_link(?MODULE, hiber, []),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid,current_function)),
+ hibernating = gen_fsm:sync_send_event(Pid, hibernate_sync),
+ is_in_erlang_hibernate(Pid),
+ good_morning = gen_fsm:sync_send_event(Pid, wakeup_sync),
+ is_not_in_erlang_hibernate(Pid),
+ hibernating = gen_fsm:sync_send_event(Pid, hibernate_sync),
+ is_in_erlang_hibernate(Pid),
+ five_more = gen_fsm:sync_send_event(Pid, snooze_sync),
+ is_in_erlang_hibernate(Pid),
+ good_morning = gen_fsm:sync_send_event(Pid, wakeup_sync),
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_event(Pid, hibernate_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_event(Pid, wakeup_async),
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_event(Pid, hibernate_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_event(Pid, snooze_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_event(Pid, wakeup_async),
+ is_not_in_erlang_hibernate(Pid),
+
+ Pid ! hibernate_later,
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ is_in_erlang_hibernate(Pid),
+
+ 'alive!' = gen_fsm:sync_send_event(Pid,'alive?'),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ Pid ! hibernate_now,
+ is_in_erlang_hibernate(Pid),
+
+ 'alive!' = gen_fsm:sync_send_event(Pid,'alive?'),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+
+ hibernating = gen_fsm:sync_send_all_state_event(Pid, hibernate_sync),
+ is_in_erlang_hibernate(Pid),
+ good_morning = gen_fsm:sync_send_all_state_event(Pid, wakeup_sync),
+ is_not_in_erlang_hibernate(Pid),
+ hibernating = gen_fsm:sync_send_all_state_event(Pid, hibernate_sync),
+ is_in_erlang_hibernate(Pid),
+ five_more = gen_fsm:sync_send_all_state_event(Pid, snooze_sync),
+ is_in_erlang_hibernate(Pid),
+ good_morning = gen_fsm:sync_send_all_state_event(Pid, wakeup_sync),
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_all_state_event(Pid, hibernate_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_all_state_event(Pid, wakeup_async),
+ is_not_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_all_state_event(Pid, hibernate_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_all_state_event(Pid, snooze_async),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_fsm:send_all_state_event(Pid, wakeup_async),
+ is_not_in_erlang_hibernate(Pid),
+
+ hibernating = gen_fsm:sync_send_all_state_event(Pid, hibernate_sync),
+ is_in_erlang_hibernate(Pid),
+ sys:suspend(Pid),
+ is_in_erlang_hibernate(Pid),
+ sys:resume(Pid),
+ is_in_erlang_hibernate(Pid),
+ receive after 1000 -> ok end,
+ is_in_erlang_hibernate(Pid),
+
+ good_morning = gen_fsm:sync_send_all_state_event(Pid, wakeup_sync),
+ is_not_in_erlang_hibernate(Pid),
+ stop_it(Pid),
test_server:messages_get(),
process_flag(trap_exit, OldFl),
ok.
+is_in_erlang_hibernate(Pid) ->
+ receive after 1 -> ok end,
+ is_in_erlang_hibernate_1(200, Pid).
+
+is_in_erlang_hibernate_1(0, Pid) ->
+ io:format("~p\n", [erlang:process_info(Pid, current_function)]),
+ ?t:fail(not_in_erlang_hibernate_3);
+is_in_erlang_hibernate_1(N, Pid) ->
+ {current_function,MFA} = erlang:process_info(Pid, current_function),
+ case MFA of
+ {erlang,hibernate,3} ->
+ ok;
+ _ ->
+ receive after 10 -> ok end,
+ is_in_erlang_hibernate_1(N-1, Pid)
+ end.
+is_not_in_erlang_hibernate(Pid) ->
+ receive after 1 -> ok end,
+ is_not_in_erlang_hibernate_1(200, Pid).
+
+is_not_in_erlang_hibernate_1(0, Pid) ->
+ io:format("~p\n", [erlang:process_info(Pid, current_function)]),
+ ?t:fail(not_in_erlang_hibernate_3);
+is_not_in_erlang_hibernate_1(N, Pid) ->
+ {current_function,MFA} = erlang:process_info(Pid, current_function),
+ case MFA of
+ {erlang,hibernate,3} ->
+ receive after 10 -> ok end,
+ is_not_in_erlang_hibernate_1(N-1, Pid);
+ _ ->
+ ok
+ end.
%%sys1(suite) -> [];
%%sys1(_) ->
diff --git a/lib/stdlib/test/gen_server_SUITE.erl b/lib/stdlib/test/gen_server_SUITE.erl
index 30dabf63c5..66341f495f 100644
--- a/lib/stdlib/test/gen_server_SUITE.erl
+++ b/lib/stdlib/test/gen_server_SUITE.erl
@@ -641,15 +641,13 @@ info(Config) when is_list(Config) ->
end,
ok.
-hibernate(suite) -> [];
hibernate(Config) when is_list(Config) ->
OldFl = process_flag(trap_exit, true),
- ?line {ok, Pid0} =
+ {ok, Pid0} =
gen_server:start_link({local, my_test_name_hibernate0},
- gen_server_SUITE, hibernate, []),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid0,current_function),
- ?line ok = gen_server:call(my_test_name_hibernate0, stop),
+ gen_server_SUITE, hibernate, []),
+ is_in_erlang_hibernate(Pid0),
+ ok = gen_server:call(my_test_name_hibernate0, stop),
receive
{'EXIT', Pid0, stopped} ->
ok
@@ -657,70 +655,66 @@ hibernate(Config) when is_list(Config) ->
test_server:fail(gen_server_did_not_die)
end,
- ?line {ok, Pid} =
+ {ok, Pid} =
gen_server:start_link({local, my_test_name_hibernate},
- gen_server_SUITE, [], []),
+ gen_server_SUITE, [], []),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = gen_server:call(my_test_name_hibernate, hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line Parent = self(),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = gen_server:call(my_test_name_hibernate, hibernate),
+ is_in_erlang_hibernate(Pid),
+ Parent = self(),
Fun = fun() ->
- receive
- go ->
- ok
- end,
- receive
- after 1000 ->
- ok
- end,
- X = erlang:process_info(Pid,current_function),
+ receive go -> ok end,
+ receive after 1000 -> ok end,
+ X = erlang:process_info(Pid, current_function),
Pid ! continue,
Parent ! {result,X}
end,
- ?line Pid2 = spawn_link(Fun),
- ?line true = gen_server:call(my_test_name_hibernate, {hibernate_noreply,Pid2}),
-
- ?line gen_server:cast(my_test_name_hibernate, hibernate_later),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line receive after 2000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line gen_server:cast(my_test_name_hibernate, hibernate_now),
- ?line receive after 1000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line Pid ! hibernate_later,
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line receive after 2000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line Pid ! hibernate_now,
- ?line receive after 1000 -> ok end,
- ?line ({current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function)),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
- ?line receive
- {result,R} ->
- ?line {current_function,{erlang,hibernate,3}} = R
- end,
- ?line true = gen_server:call(my_test_name_hibernate, hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line sys:suspend(my_test_name_hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line sys:resume(my_test_name_hibernate),
- ?line receive after 1000 -> ok end,
- ?line {current_function,{erlang,hibernate,3}} = erlang:process_info(Pid,current_function),
- ?line ok = gen_server:call(my_test_name_hibernate, started_p),
- ?line true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
-
- ?line ok = gen_server:call(my_test_name_hibernate, stop),
+ Pid2 = spawn_link(Fun),
+ true = gen_server:call(my_test_name_hibernate, {hibernate_noreply,Pid2}),
+
+ gen_server:cast(my_test_name_hibernate, hibernate_later),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+
+ gen_server:cast(my_test_name_hibernate, hibernate_now),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+
+ Pid ! hibernate_later,
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+
+ Pid ! hibernate_now,
+ is_in_erlang_hibernate(Pid),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = ({current_function,{erlang,hibernate,3}} =/=
+ erlang:process_info(Pid, current_function)),
+ receive
+ {result,R} ->
+ {current_function,{erlang,hibernate,3}} = R
+ end,
+
+ true = gen_server:call(my_test_name_hibernate, hibernate),
+ is_in_erlang_hibernate(Pid),
+ sys:suspend(my_test_name_hibernate),
+ is_in_erlang_hibernate(Pid),
+ sys:resume(my_test_name_hibernate),
+ is_in_erlang_hibernate(Pid),
+ ok = gen_server:call(my_test_name_hibernate, started_p),
+ true = ({current_function,{erlang,hibernate,3}} =/= erlang:process_info(Pid,current_function)),
+
+ ok = gen_server:call(my_test_name_hibernate, stop),
receive
{'EXIT', Pid, stopped} ->
ok
@@ -730,6 +724,23 @@ hibernate(Config) when is_list(Config) ->
process_flag(trap_exit, OldFl),
ok.
+is_in_erlang_hibernate(Pid) ->
+ receive after 1 -> ok end,
+ is_in_erlang_hibernate_1(200, Pid).
+
+is_in_erlang_hibernate_1(0, Pid) ->
+ io:format("~p\n", [erlang:process_info(Pid, current_function)]),
+ ?t:fail(not_in_erlang_hibernate_3);
+is_in_erlang_hibernate_1(N, Pid) ->
+ {current_function,MFA} = erlang:process_info(Pid, current_function),
+ case MFA of
+ {erlang,hibernate,3} ->
+ ok;
+ _ ->
+ receive after 10 -> ok end,
+ is_in_erlang_hibernate_1(N-1, Pid)
+ end.
+
%% --------------------------------------
%% Test gen_server:abcast and handle_cast.
%% Test all different return values from
diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl
index 2203dd8f51..8d53949c40 100644
--- a/lib/stdlib/test/io_SUITE.erl
+++ b/lib/stdlib/test/io_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2014. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -31,7 +31,7 @@
printable_range/1,
io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1,
otp_10836/1, io_lib_width_too_small/1,
- io_with_huge_message_queue/1]).
+ io_with_huge_message_queue/1, format_string/1]).
-export([pretty/2]).
@@ -71,7 +71,8 @@ all() ->
io_fread_newlines, otp_8989, io_lib_fread_literal,
printable_range,
io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836,
- io_lib_width_too_small, io_with_huge_message_queue].
+ io_lib_width_too_small, io_with_huge_message_queue,
+ format_string].
groups() ->
[].
@@ -1035,7 +1036,14 @@ rp(Term, Col, Ll, D, M, RF) ->
lists:flatten(io_lib:format("~s", [R])).
fmt(Fmt, Args) ->
- lists:flatten(io_lib:format(Fmt, Args)).
+ FormatList = io_lib:scan_format(Fmt, Args),
+ {Fmt2, Args2} = io_lib:unscan_format(FormatList),
+ Chars1 = lists:flatten(io_lib:build_text(FormatList)),
+ Chars2 = lists:flatten(io_lib:format(Fmt2, Args2)),
+ Chars3 = lists:flatten(io_lib:format(Fmt, Args)),
+ Chars1 = Chars2,
+ Chars2 = Chars3,
+ Chars3.
rfd(a, 0) ->
[];
@@ -2261,3 +2269,9 @@ writes(0, _) -> ok;
writes(N, F1) ->
file:write(F1, "hello\n"),
writes(N - 1, F1).
+
+format_string(Config) ->
+ %% All but padding is tested by fmt/2.
+ "xxxxxxsssx" = fmt("~10.4.xs", ["sss"]),
+ "xxxxxxsssx" = fmt("~10.4.*s", [$x, "sss"]),
+ ok.
diff --git a/lib/stdlib/test/io_proto_SUITE.erl b/lib/stdlib/test/io_proto_SUITE.erl
index 76a8109a8d..78432789cd 100644
--- a/lib/stdlib/test/io_proto_SUITE.erl
+++ b/lib/stdlib/test/io_proto_SUITE.erl
@@ -69,12 +69,7 @@
init_per_testcase(_Case, Config) ->
?line Dog = ?t:timetrap(?default_timeout),
- Term = case os:getenv("TERM") of
- List when is_list(List) ->
- List;
- _ ->
- "dumb"
- end,
+ Term = os:getenv("TERM", "dumb"),
os:putenv("TERM","vt100"),
[{watchdog, Dog}, {term, Term} | Config].
end_per_testcase(_Case, Config) ->
@@ -481,149 +476,182 @@ unicode_options(Config) when is_list(Config) ->
ok.
-unicode_options_gen(suite) ->
- [];
-unicode_options_gen(doc) ->
- ["Tests various unicode options on random generated files"];
+%% Tests various unicode options on random generated files.
unicode_options_gen(Config) when is_list(Config) ->
- ?line random:seed(1240,900586,553728),
- ?line PrivDir = ?config(priv_dir,Config),
- ?line AllModes = [utf8,utf16,{utf16,big},{utf16,little},utf32,{utf32,big},{utf32,little}],
- ?line FSize = 17*1024,
- ?line NumItersRead = 2,
- ?line NumItersWrite = 2,
- ?line Dir = filename:join([PrivDir,"GENDATA1"]),
- ?line file:make_dir(Dir),
-
- %dbg:tracer(process,{fun(A,_) -> erlang:display(A) end,true}),
- %dbg:tpl(file_io_server,x),
- %dbg:ctpl(file_io_server,cafu),
- %dbg:tp(unicode,x),
-
- DoOneFile1 = fun(Encoding,N,M) ->
- ?dbg({Encoding,M,N}),
- io:format("Read test: Encoding ~p, Chunk size ~p, Iteration ~p~n",[Encoding,M,N]),
- io:format(standard_error,"Read test: Encoding ~p, Chunk size ~p, Iteration ~p\r\n",[Encoding,M,N]),
- ?line Fname = filename:join([Dir,"genfile_"++enc2str(Encoding)++"_"++integer_to_list(N)]),
- ?dbg(?LINE),
- ?line Ulist = random_unicode(FSize),
- ?dbg(?LINE),
- ?line my_write_file(Fname,Ulist,Encoding),
- ?dbg(?LINE),
- ?line {ok,F1} = file:open(Fname,[read,{encoding,Encoding}]),
-
- ?dbg(?LINE),
- ?line Res1 = read_whole_file(fun(FD) -> io:get_line(FD,'') end,F1),
- ?dbg(?LINE),
- ?line Ulist = unicode:characters_to_list(Res1,unicode),
- ?dbg(?LINE),
- ?line file:close(F1),
- ?line {ok,F2} = file:open(Fname, [read,binary,{encoding,Encoding}]),
- ?line Res2 = read_whole_file(fun(FD) -> io:get_chars(FD,'',M) end,F2),
- ?line Ulist = unicode:characters_to_list(Res2,unicode),
- ?dbg(?LINE),
- ?line file:close(F2),
- ?line {ok,F3} = file:open(Fname, [read,binary,{encoding,Encoding}]),
- ?dbg(?LINE),
-%% case {Encoding,M,N} of
-%% {{utf16,little},10,2} ->
-%% dbg:p(F3,call);
-%% _ ->
-%% ok
-%% end,
-
- ?line Res3 = read_whole_file(fun(FD) -> case io:fread(FD,'',"~ts") of {ok,D} -> D; O -> O end end, F3),
- ?dbg(?LINE),
- ?line Ulist2 = [ X || X <- Ulist,
- X =/= $\n, X =/= $ ],
- ?dbg(?LINE),
- ?line Ulist2 = unicode:characters_to_list(Res3,unicode),
- ?dbg(?LINE),
- ?line file:close(F3),
- ?line {ok,F4} = file:open(Fname, [read,{encoding,Encoding}]),
- ?line Res4 = read_whole_file(fun(FD) -> case io:fread(FD,'',"~tc") of {ok,D} -> D; O -> O end end,F4),
- ?line Ulist3 = [ X || X <- Ulist,
- X =/= $\n ],
- ?line Ulist3 = unicode:characters_to_list(Res4,unicode),
- ?dbg(?LINE),
- ?line file:close(F4),
- ?line file:delete(Fname)
- end,
-
- [ [ [ DoOneFile1(E,N,M) || E <- AllModes ] || M <- [10,1000,128,1024,8192,8193] ] || N <- lists:seq(1,NumItersRead)],
- DoOneFile2 = fun(Encoding,N,M) ->
- ?dbg({Encoding,M,N}),
- io:format("Write test: Encoding ~p, Chunk size ~p, Iteration ~p~n",[Encoding,M,N]),
- io:format(standard_error,"Write test: Encoding ~p, Chunk size ~p, Iteration ~p\r\n",[Encoding,M,N]),
- ?line Fname = filename:join([Dir,"genfile_"++enc2str(Encoding)++"_"++integer_to_list(N)]),
- ?dbg(?LINE),
- ?line Ulist = random_unicode(FSize),
- ?dbg(?LINE),
- ?line {ok,F1} = file:open(Fname,[write,{encoding,Encoding}]),
- ?line io:put_chars(F1,Ulist),
- ?line file:close(F1),
- ?line Ulist = my_read_file(Fname,Encoding),
- ?line file:delete(Fname),
- ?line {ok,F2} = file:open(Fname,[write,binary,{encoding,Encoding}]),
- ?line io:put_chars(F2,Ulist),
- ?line file:close(F2),
- ?line Ulist = my_read_file(Fname,Encoding),
- ?line file:delete(Fname),
- ?line {ok,F3} = file:open(Fname,[write,{encoding,Encoding}]),
- ?line LL = string:tokens(Ulist,"\n"),
- ?line Ulist2 = lists:flatten(LL),
- ?line [ io:format(F3,"~ts",[L]) || L <- LL ],
- ?line file:close(F3),
- ?line Ulist2 = my_read_file(Fname,Encoding),
- ?line file:delete(Fname),
- ?line {ok,F4} = file:open(Fname,[write,{encoding,Encoding}]),
- ?line [ io:format(F4,"~tc",[C]) || C <- Ulist ],
- ?line file:close(F4),
- ?line Ulist = my_read_file(Fname,Encoding),
- ?line file:delete(Fname),
- ?line {ok,F5} = file:open(Fname,[write,{encoding,Encoding}]),
- ?line io:put_chars(F5,unicode:characters_to_binary(Ulist)),
- ?line file:close(F5),
- ?line Ulist = my_read_file(Fname,Encoding),
- ?line file:delete(Fname),
- ok
- end,
- [ [ [ DoOneFile2(E,N,M) || E <- AllModes ] || M <- [10,1000,128,1024,8192,8193] ] || N <- lists:seq(1,NumItersWrite)],
+ random:seed(1240, 900586, 553728),
+ PrivDir = ?config(priv_dir, Config),
+ AllModes = [utf8,utf16,{utf16,big},{utf16,little},
+ utf32,{utf32,big},{utf32,little}],
+ FSize = 9*1024,
+ NumItersRead = 2,
+ NumItersWrite = 2,
+ Dir = filename:join(PrivDir, "GENDATA1"),
+ file:make_dir(Dir),
+
+ DoOneFile1 =
+ fun(Encoding, N, M) ->
+ ?dbg({Encoding,M,N}),
+ io:format("Read test: Encoding ~p, Chunk size ~p, Iteration ~p~n",[Encoding,M,N]),
+ io:format(standard_error,
+ "Read test: Encoding ~p, Chunk size ~p, Iteration ~p\r\n",[Encoding,M,N]),
+ Fname = filename:join(Dir,
+ "genfile_"++enc2str(Encoding)++
+ "_"++integer_to_list(N)),
+ Ulist = random_unicode(FSize),
+ Bin = unicode:characters_to_binary(Ulist, utf8, Encoding),
+ ok = file:write_file(Fname, Bin),
+
+ Read1 = fun(FD) -> io:get_line(FD, '') end,
+ Res1 = read_whole_file(Fname,
+ [read,read_ahead,{encoding,Encoding}],
+ Read1),
+
+ Read2 = fun(FD) -> io:get_chars(FD, '', M) end,
+ Res2 = read_whole_file(Fname,
+ [read,binary,
+ read_ahead,{encoding,Encoding}],
+ Read2),
+
+ Read3 = fun(FD) ->
+ case io:fread(FD, '', "~ts") of
+ {ok,D} -> D;
+ Other -> Other end
+ end,
+ Res3 = read_whole_file(Fname,
+ [read,binary,
+ read_ahead,{encoding,Encoding}],
+ Read3),
+
+ Read4 = fun(FD) ->
+ case io:fread(FD, '', "~ts") of
+ {ok,D} -> D;
+ Other -> Other end
+ end,
+ Res4 = read_whole_file(Fname,
+ [read,read_ahead,{encoding,Encoding}],
+ Read4),
+
+ Ulist2 = [X || X <- Ulist, X =/= $\n, X =/= $\s],
+ Ulist3 = [X || X <- Ulist, X =/= $\n],
+ Ulist = done(Res1),
+ Ulist = done(Res2),
+ Ulist2 = done(Res3),
+ Ulist3 = done(Res4),
+
+ file:delete(Fname)
+ end,
+ [ [ [ DoOneFile1(E, N, M) || E <- AllModes ] ||
+ M <- [10,1000,128,1024,8192,8193] ] ||
+ N <- lists:seq(1, NumItersRead) ],
+
+ DoOneFile2 =
+ fun(Encoding,N,M) ->
+ ?dbg({Encoding,M,N}),
+ io:format("Write test: Encoding ~p, Chunk size ~p, Iteration ~p~n",[Encoding,M,N]),
+ io:format(standard_error,
+ "Write test: Encoding ~p, Chunk size ~p, Iteration ~p\r\n",[Encoding,M,N]),
+ Fname = filename:join(Dir,
+ "genfile_"++enc2str(Encoding)++
+ "_"++integer_to_list(N)),
+ Ulist = random_unicode(FSize),
+
+ Res1 = write_read_file(Fname, 1,
+ [write],
+ Encoding,
+ fun(FD) -> io:put_chars(FD, Ulist) end),
+
+ Res2 = write_read_file(Fname, 2,
+ [write,binary],
+ Encoding,
+ fun(FD) -> io:put_chars(FD, Ulist) end),
+
+ Fun3 = fun(FD) ->
+ _ = [io:format(FD, "~tc", [C]) || C <- Ulist],
+ ok
+ end,
+ Res3 = write_read_file(Fname, 3,
+ [write],
+ Encoding,
+ Fun3),
+
+ Fun4 = fun(FD) ->
+ io:put_chars(FD,
+ unicode:characters_to_binary(Ulist))
+ end,
+ Res4 = write_read_file(Fname, 4,
+ [write],
+ Encoding,
+ Fun4),
+
+ LL = string:tokens(Ulist, "\n"),
+ Fun5 = fun(FD) ->
+ _ = [io:format(FD, "~ts", [L]) || L <- LL],
+ ok
+ end,
+ Res5 = write_read_file(Fname, 5,
+ [write],
+ Encoding,
+ Fun5),
+
+ Ulist2 = lists:flatten(LL),
+ ResBin = done(Res1),
+ ResBin = done(Res2),
+ ResBin = done(Res3),
+ ResBin = done(Res4),
+ Ulist = unicode:characters_to_list(ResBin, Encoding),
+
+ ResBin2 = done(Res5),
+ Ulist2 = unicode:characters_to_list(ResBin2, Encoding),
+
+ ok
+ end,
+ [ [ [ DoOneFile2(E, N, M) || E <- AllModes ] ||
+ M <- [10,1000,128,1024,8192,8193] ] ||
+ N <- lists:seq(1, NumItersWrite) ],
ok.
+read_whole_file(Fname, Options, Fun) ->
+ do(fun() ->
+ do_read_whole_file(Fname, Options, Fun)
+ end).
+do_read_whole_file(Fname, Options, Fun) ->
+ {ok,F} = file:open(Fname, Options),
+ Res = do_read_whole_file_1(Fun, F),
+ ok = file:close(F),
+ unicode:characters_to_list(Res, unicode).
-
-read_whole_file(Fun,F) ->
+do_read_whole_file_1(Fun, F) ->
case Fun(F) of
eof ->
[];
{error,Error} ->
- ?dbg(Error),
receive after 10000 -> ok end,
exit(Error);
Other ->
- %?dbg(Other),
- [Other | read_whole_file(Fun,F)]
+ [Other|do_read_whole_file_1(Fun, F)]
end.
-
+write_read_file(Fname0, N, Options, Enc, Writer) ->
+ Fname = Fname0 ++ "_" ++ integer_to_list(N),
+ do(fun() ->
+ do_write_read_file(Fname, Options, Enc, Writer)
+ end).
+
+do_write_read_file(Fname, Options, Encoding, Writer) ->
+ {ok,F} = file:open(Fname, [{encoding,Encoding}|Options]),
+ Writer(F),
+ ok = file:close(F),
+ {ok,Bin} = file:read_file(Fname),
+ ok = file:delete(Fname),
+ Bin.
+
enc2str(Atom) when is_atom(Atom) ->
atom_to_list(Atom);
enc2str({A1,A2}) when is_atom(A1), is_atom(A2) ->
atom_to_list(A1)++"_"++atom_to_list(A2).
-
-
-my_write_file(Filename,UniList,Encoding) ->
- Bin = unicode:characters_to_binary(UniList,utf8,Encoding),
- file:write_file(Filename,Bin).
-
-my_read_file(Filename,Encoding) ->
- {ok,Bin} = file:read_file(Filename),
- unicode:characters_to_list(Bin,Encoding).
-
random_unicode(0) ->
[];
random_unicode(N) ->
@@ -1738,8 +1766,7 @@ toerl_loop(Port,Acc) ->
end.
millistamp() ->
- {Mega, Secs, Micros} = erlang:now(),
- (Micros div 1000) + Secs * 1000 + Mega * 1000000000.
+ erlang:monotonic_time(milli_seconds).
get_data_within(Port, X, Acc) when X =< 0 ->
?dbg({get_data_within, X, Acc, ?LINE}),
@@ -1937,3 +1964,15 @@ chomp(<<Ch,Rest/binary>>) ->
<<Ch,X/binary>>;
chomp(Atom) ->
Atom.
+
+do(Fun) ->
+ {_,Ref} = spawn_monitor(fun() ->
+ exit(Fun())
+ end),
+ Ref.
+
+done(Ref) ->
+ receive
+ {'DOWN',Ref,process,_,Result} ->
+ Result
+ end.
diff --git a/lib/stdlib/test/lists_SUITE.erl b/lib/stdlib/test/lists_SUITE.erl
index f4589a8e24..01c138d94c 100644
--- a/lib/stdlib/test/lists_SUITE.erl
+++ b/lib/stdlib/test/lists_SUITE.erl
@@ -1704,7 +1704,7 @@ fun_pid(Fun) ->
get_seed() ->
case random:seed() of
undefined ->
- now();
+ erlang:timestamp();
Tuple ->
Tuple
end.
diff --git a/lib/stdlib/test/maps_SUITE.erl b/lib/stdlib/test/maps_SUITE.erl
index dda20a615b..f8f241d834 100644
--- a/lib/stdlib/test/maps_SUITE.erl
+++ b/lib/stdlib/test/maps_SUITE.erl
@@ -34,13 +34,23 @@
-export([init_per_testcase/2]).
-export([end_per_testcase/2]).
--export([t_get_3/1,t_with_2/1,t_without_2/1]).
+-export([t_get_3/1, t_filter_2/1,
+ t_fold_3/1,t_map_2/1,t_size_1/1,
+ t_with_2/1,t_without_2/1]).
+
+%-define(badmap(V,F,Args), {'EXIT', {{badmap,V}, [{maps,F,Args,_}|_]}}).
+%-define(badarg(F,Args), {'EXIT', {badarg, [{maps,F,Args,_}|_]}}).
+% silly broken hipe
+-define(badmap(V,F,_Args), {'EXIT', {{badmap,V}, [{maps,F,_,_}|_]}}).
+-define(badarg(F,_Args), {'EXIT', {badarg, [{maps,F,_,_}|_]}}).
suite() ->
[{ct_hooks, [ts_install_cth]}].
all() ->
- [t_get_3,t_with_2,t_without_2].
+ [t_get_3,t_filter_2,
+ t_fold_3,t_map_2,t_size_1,
+ t_with_2,t_without_2].
init_per_suite(Config) ->
Config.
@@ -63,6 +73,9 @@ t_get_3(Config) when is_list(Config) ->
value1 = maps:get(key1, Map, DefaultValue),
value2 = maps:get(key2, Map, DefaultValue),
DefaultValue = maps:get(key3, Map, DefaultValue),
+
+ %% error case
+ ?badmap(a,get,[[a,b],a,def]) = (catch maps:get([a,b],id(a),def)),
ok.
t_without_2(_Config) ->
@@ -70,6 +83,11 @@ t_without_2(_Config) ->
M0 = maps:from_list([{{k,I},{v,I}}||I<-lists:seq(1,100)]),
M1 = maps:from_list([{{k,I},{v,I}}||I<-lists:seq(1,100) -- Ki]),
M1 = maps:without([{k,I}||I <- Ki],M0),
+
+ %% error case
+ ?badmap(a,without,[[a,b],a]) = (catch maps:without([a,b],id(a))),
+ ?badmap(a,without,[{a,b},a]) = (catch maps:without({a,b},id(a))),
+ ?badarg(without,[a,#{}]) = (catch maps:without(a,#{})),
ok.
t_with_2(_Config) ->
@@ -77,4 +95,63 @@ t_with_2(_Config) ->
M0 = maps:from_list([{{k,I},{v,I}}||I<-lists:seq(1,100)]),
M1 = maps:from_list([{{k,I},{v,I}}||I<-Ki]),
M1 = maps:with([{k,I}||I <- Ki],M0),
+
+ %% error case
+ ?badmap(a,with,[[a,b],a]) = (catch maps:with([a,b],id(a))),
+ ?badmap(a,with,[{a,b},a]) = (catch maps:with({a,b},id(a))),
+ ?badarg(with,[a,#{}]) = (catch maps:with(a,#{})),
+ ok.
+
+t_filter_2(Config) when is_list(Config) ->
+ M = #{a => 2, b => 3, c=> 4, "a" => 1, "b" => 2, "c" => 4},
+ Pred1 = fun(K,V) -> is_atom(K) andalso (V rem 2) =:= 0 end,
+ Pred2 = fun(K,V) -> is_list(K) andalso (V rem 2) =:= 0 end,
+ #{a := 2,c := 4} = maps:filter(Pred1,M),
+ #{"b" := 2,"c" := 4} = maps:filter(Pred2,M),
+ %% error case
+ ?badmap(a,filter,[_,a]) = (catch maps:filter(fun(_,_) -> ok end,id(a))),
+ ?badarg(filter,[<<>>,#{}]) = (catch maps:filter(id(<<>>),#{})),
+ ok.
+
+t_fold_3(Config) when is_list(Config) ->
+ Vs = lists:seq(1,200),
+ M0 = maps:from_list([{{k,I},I}||I<-Vs]),
+ #{ {k,1} := 1, {k,200} := 200} = M0,
+ Tot0 = lists:sum(Vs),
+ Tot1 = maps:fold(fun({k,_},V,A) -> A + V end, 0, M0),
+ true = Tot0 =:= Tot1,
+
+ %% error case
+ ?badmap(a,fold,[_,0,a]) = (catch maps:fold(fun(_,_,_) -> ok end,0,id(a))),
+ ?badarg(fold,[<<>>,0,#{}]) = (catch maps:fold(id(<<>>),0,#{})),
ok.
+
+t_map_2(Config) when is_list(Config) ->
+ Vs = lists:seq(1,200),
+ M0 = maps:from_list([{{k,I},I}||I<-Vs]),
+ #{ {k,1} := 1, {k,200} := 200} = M0,
+ M1 = maps:map(fun({k,_},V) -> V + 42 end, M0),
+ #{ {k,1} := 43, {k,200} := 242} = M1,
+
+ %% error case
+ ?badmap(a,map,[_,a]) = (catch maps:map(fun(_,_) -> ok end, id(a))),
+ ?badarg(map,[<<>>,#{}]) = (catch maps:map(id(<<>>),#{})),
+ ok.
+
+
+t_size_1(Config) when is_list(Config) ->
+ 0 = maps:size(#{}),
+ 10 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,10)])),
+ 20 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,20)])),
+ 30 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,30)])),
+ 40 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,40)])),
+ 50 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,50)])),
+ 60 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,60)])),
+ 600 = maps:size(maps:from_list([{{"k",I},I}||I<-lists:seq(1,600)])),
+
+ %% error case
+ ?badmap(a,size,[a]) = (catch maps:size(id(a))),
+ ?badmap(<<>>,size,[<<>>]) = (catch maps:size(id(<<>>))),
+ ok.
+
+id(I) -> I.
diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl
index 4173a40d14..56829fac5c 100644
--- a/lib/stdlib/test/qlc_SUITE.erl
+++ b/lib/stdlib/test/qlc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -396,7 +396,8 @@ nomatch(Config) when is_list(Config) ->
qlc:q([3 || {3=4} <- []]).
">>,
[],
- {warnings,[{{2,27},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,27},qlc,nomatch_pattern}]}},
+ {warnings,[{2,v3_core,nomatch}]}},
{nomatch2,
<<"nomatch() ->
@@ -407,7 +408,8 @@ nomatch(Config) when is_list(Config) ->
end, [{1},{2}]).
">>,
[],
- {warnings,[{{3,33},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{3,33},qlc,nomatch_pattern}]}},
+ {warnings,[{3,v3_core,nomatch}]}},
{nomatch3,
<<"nomatch() ->
@@ -419,7 +421,8 @@ nomatch(Config) when is_list(Config) ->
end, [{1,2},{2,3}]).
">>,
[],
- {warnings,[{{3,52},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{3,52},qlc,nomatch_pattern}]}},
+ {warnings,[{3,v3_core,nomatch}]}},
{nomatch4,
<<"nomatch() ->
@@ -2487,8 +2490,11 @@ info(Config) when is_list(Config) ->
(catch qlc:info([X || {X} <- []], {n_elements, 0})),
L = lists:seq(1, 1000),
\"[1,2,3,4,5,6,7,8,9,10|'...']\" = qlc:info(L, {n_elements, 10}),
- {cons,1,{integer,1,1},{atom,1,'...'}} =
+ {cons,A1,{integer,A2,1},{atom,A3,'...'}} =
qlc:info(L, [{n_elements, 1},{format,abstract_code}]),
+ 1 = erl_anno:line(A1),
+ 1 = erl_anno:line(A2),
+ 1 = erl_anno:line(A3),
Q = qlc:q([{X} || X <- [a,b,c,d,e,f]]),
{call,_,_,[{cons,_,{atom,_,a},{cons,_,{atom,_,b},{cons,_,{atom,_,c},
{atom,_,'...'}}}},
@@ -2905,7 +2911,8 @@ lookup1(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1},{a}])">>,
- {warnings,[{{2,37},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,37},qlc,nomatch_pattern}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([X || {X=X,Y=Y}={Y=Y,X=X} <- ets:table(E),
@@ -2933,7 +2940,8 @@ lookup1(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{a},{b}])">>,
- {warnings,[{{2,35},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,35},qlc,nomatch_pattern}]}},
+ []},
{cres,
<<"etsc(fun(E) ->
@@ -2941,7 +2949,8 @@ lookup1(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{a},{b}])">>,
- {warnings,[{{2,35},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,35},qlc,nomatch_pattern}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([X || X = <<X>> <- ets:table(E)]),
@@ -2988,7 +2997,8 @@ lookup1(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{a,b,c},{d,e,f}])">>,
- {warnings,[{{2,34},qlc,nomatch_pattern}]}}
+ %% {warnings,[{{2,34},qlc,nomatch_pattern}]}}
+ []}
],
?line run(Config, Ts),
@@ -3021,8 +3031,9 @@ lookup2(Config) when is_list(Config) ->
end, [{3,true},{4,true}])">>,
<<"%% Only guards are inspected. No lookup.
- E1 = create_ets(1, 10),
- E2 = ets:new(join, []),
+ E1 = ets:new(e, [ordered_set]),
+ true = ets:insert(E1, [{1,1}, {2,2}, {3,3}, {4,4}, {5,5}]),
+ E2 = ets:new(join, [ordered_set]),
true = ets:insert(E2, [{true,1},{false,2}]),
Q = qlc:q([{X,Z} || {_,X} <- ets:table(E1),
{Y,Z} <- ets:table(E2),
@@ -3051,7 +3062,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1}, {2}])">>,
- {warnings,[{{3,46},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,46},qlc,nomatch_filter}]}},
+ []},
{cres,
<<"etsc(fun(E) ->
@@ -3060,7 +3072,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1}, {2}])">>,
- {warnings,[{{3,43},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,43},qlc,nomatch_filter}]}},
+ []},
{cres,
<<"etsc(fun(E) ->
@@ -3069,7 +3082,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1}, {2}])">>,
- {warnings,[{{3,48},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,48},qlc,nomatch_filter}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([{X,Y} || {X,Y} <- ets:table(E),
@@ -3084,7 +3098,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{[3]},{[3,4]}])">>,
- {warnings,[{{2,61},qlc,nomatch_filter}]}},
+ %% {warnings,[{{2,61},qlc,nomatch_filter}]}},
+ []},
<<"etsc(fun(E) ->
U = 18,
@@ -3116,7 +3131,8 @@ lookup2(Config) when is_list(Config) ->
[] = lists:sort(qlc:e(Q)),
false = lookup_keys(Q)
end, [{2},{3},{4},{8}])">>,
- {warnings,[{{4,44},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,44},qlc,nomatch_filter}]}},
+ []},
{cres,
<<"etsc(fun(E) ->
@@ -3126,7 +3142,8 @@ lookup2(Config) when is_list(Config) ->
[] = lists:sort(qlc:e(Q)),
false = lookup_keys(Q)
end, [{2},{3},{4},{8}])">>,
- {warnings,[{{4,35},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,35},qlc,nomatch_filter}]}},
+ []},
<<"F = fun(U) ->
Q = qlc:q([X || {X} <- [a,b,c],
@@ -3142,7 +3159,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1,1},{2,1}])">>,
- {warnings,[{{2,61},qlc,nomatch_filter}]}},
+ %% {warnings,[{{2,61},qlc,nomatch_filter}]}},
+ []},
<<"Two = 2.0,
etsc(fun(E) ->
@@ -3203,8 +3221,10 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1,b},{2,3}])">>,
+ %% {warnings,[{2,sys_core_fold,nomatch_guard},
+ %% {3,qlc,nomatch_filter},
+ %% {3,sys_core_fold,{eval_failure,badarg}}]}},
{warnings,[{2,sys_core_fold,nomatch_guard},
- {3,qlc,nomatch_filter},
{3,sys_core_fold,{eval_failure,badarg}}]}},
<<"etsc(fun(E) ->
@@ -3227,7 +3247,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{{1}},{{2}}])">>,
- {warnings,[{{4,47},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,47},qlc,nomatch_filter}]}},
+ []},
{cres,
<<"etsc(fun(E) ->
@@ -3237,7 +3258,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{{1}},{{2}}])">>,
- {warnings,[{{4,47},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,47},qlc,nomatch_filter}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([X || {X} <- ets:table(E),
@@ -3297,7 +3319,8 @@ lookup2(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{3}, {4}])">>,
- {warnings,[{{3,44},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,44},qlc,nomatch_filter}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([X || {{X,Y}} <- ets:table(E),
@@ -3418,7 +3441,8 @@ lookup2(Config) when is_list(Config) ->
end, [{1},{2}])">>
],
- ?line run(Config, Ts),
+
+ ok = run(Config, Ts),
TsR = [
%% is_record/2,3:
@@ -3456,7 +3480,8 @@ lookup2(Config) when is_list(Config) ->
end, [{keypos,1}], [#r{}])">>
],
- ?line run(Config, <<"-record(r, {a}).\n">>, TsR),
+
+ ok = run(Config, <<"-record(r, {a}).\n">>, TsR),
Ts2 = [
<<"etsc(fun(E) ->
@@ -3566,7 +3591,6 @@ lookup2(Config) when is_list(Config) ->
[{1,2},{2,2}] = qlc:e(Q),
[2] = lookup_keys(Q)
end, [{keypos,1}], [{1},{2},{3}])">>,
-
<<"%% Matchspec only. No cache.
etsc(fun(E) ->
Q = qlc:q([{X,Y} ||
@@ -3578,7 +3602,7 @@ lookup2(Config) when is_list(Config) ->
{generate,_,
{table,{ets,_,[_,[{traverse,_}]]}}}],[]} =
i(Q),
- [{1,2},{1,3},{2,2},{2,3}] = qlc:e(Q),
+ [{1,2},{1,3},{2,2},{2,3}] = lists:sort(qlc:e(Q)),
false = lookup_keys(Q)
end, [{keypos,1}], [{1},{2},{3}])">>,
<<"%% Matchspec only. Cache
@@ -3592,7 +3616,7 @@ lookup2(Config) when is_list(Config) ->
{generate,_,{qlc,_,
[{generate,_,{table,{ets,_,[_,[{traverse,_}]]}}}],
[{cache,ets}]}}],[]} = i(Q),
- [{1,2},{1,3},{2,2},{2,3}] = qlc:e(Q),
+ [{1,2},{1,3},{2,2},{2,3}] = lists:sort(qlc:e(Q)),
false = lookup_keys(Q)
end, [{keypos,1}], [{1},{2},{3}])">>,
<<"%% An empty list. Always unique and cached.
@@ -3645,7 +3669,7 @@ lookup2(Config) when is_list(Config) ->
],
- ?line run(Config, Ts2),
+ ok = run(Config, Ts2),
LTs = [
<<"etsc(fun(E) ->
@@ -3677,7 +3701,8 @@ lookup2(Config) when is_list(Config) ->
end, [{1,a},{2,b}])">>
],
- ?line run(Config, LTs),
+
+ ok = run(Config, LTs),
ok.
@@ -3700,7 +3725,8 @@ lookup_rec(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{keypos,2}], [#r{a = 17}, #r{a = 3}, #r{a = 5}])">>,
- {warnings,[{{4,44},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,44},qlc,nomatch_filter}]}},
+ []},
<<"%% Compares an integer and a float.
etsc(fun(E) ->
@@ -4004,7 +4030,8 @@ skip_filters(Config) when is_list(Config) ->
[] = qlc:e(Q),
false = lookup_keys(Q)
end, [{1,1},{2,0}])">>,
- {warnings,[{{4,37},qlc,nomatch_filter}]}},
+ %% {warnings,[{{4,37},qlc,nomatch_filter}]}},
+ []},
<<"etsc(fun(E) ->
Q = qlc:q([{A,B,C} ||
@@ -6093,7 +6120,7 @@ otp_6964(Config) when is_list(Config) ->
lists:flatten(qlc:format_error(ErrReply)),
qlc_SUITE:install_error_logger(),
20000 = length(F(warning_msg)),
- {error, joining} = qlc_SUITE:read_error_logger(),
+ {warning, joining} = qlc_SUITE:read_error_logger(),
20000 = length(F(info_msg)),
{info, joining} = qlc_SUITE:read_error_logger(),
20000 = length(F(error_msg)),
@@ -6128,8 +6155,8 @@ otp_6964(Config) when is_list(Config) ->
{error, caching} = qlc_SUITE:read_error_logger(),
{error, caching} = qlc_SUITE:read_error_logger(),
1 = length(F(warning_msg)),
- {error, caching} = qlc_SUITE:read_error_logger(),
- {error, caching} = qlc_SUITE:read_error_logger(),
+ {warning, caching} = qlc_SUITE:read_error_logger(),
+ {warning, caching} = qlc_SUITE:read_error_logger(),
1 = length(F(info_msg)),
{info, caching} = qlc_SUITE:read_error_logger(),
{info, caching} = qlc_SUITE:read_error_logger(),
@@ -6161,7 +6188,7 @@ otp_6964(Config) when is_list(Config) ->
L = F(info_msg),
{info, sorting} = qlc_SUITE:read_error_logger(),
L = F(warning_msg),
- {error, sorting} = qlc_SUITE:read_error_logger(),
+ {warning, sorting} = qlc_SUITE:read_error_logger(),
qlc_SUITE:uninstall_error_logger(),
ets:delete(E1),
ets:delete(E2)">>],
@@ -6188,7 +6215,7 @@ otp_6964(Config) when is_list(Config) ->
R = lists:sort(F(error_msg)),
{error, caching} = qlc_SUITE:read_error_logger(),
R = lists:sort(F(warning_msg)),
- {error, caching} = qlc_SUITE:read_error_logger(),
+ {warning, caching} = qlc_SUITE:read_error_logger(),
qlc_SUITE:uninstall_error_logger(),
ErrReply = F(not_allowed),
{error,qlc,{tmpdir_usage,caching}} = ErrReply,
@@ -6217,8 +6244,9 @@ otp_7238(Config) when is_list(Config) ->
<<"nomatch_1() ->
{qlc:q([X || X={X} <- []]), [t || \"a\"=\"b\" <- []]}.">>,
[],
- {warnings,[{{2,30},qlc,nomatch_pattern},
- {{2,44},v3_core,nomatch}]}},
+ %% {warnings,[{{2,30},qlc,nomatch_pattern},
+ %% {{2,44},v3_core,nomatch}]}},
+ {warnings,[{2,v3_core,nomatch}]}},
%% Not found by qlc...
{nomatch_2,
@@ -6231,7 +6259,8 @@ otp_7238(Config) when is_list(Config) ->
<<"nomatch_3() ->
qlc:q([t || [$a, $b] = \"ba\" <- []]).">>,
[],
- {warnings,[{{2,37},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,37},qlc,nomatch_pattern}]}},
+ {warnings,[{2,v3_core,nomatch}]}},
%% Not found by qlc...
{nomatch_4,
@@ -6252,44 +6281,51 @@ otp_7238(Config) when is_list(Config) ->
qlc:q([X || X <- [],
X =:= {X}]).">>,
[],
- {warnings,[{{3,30},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,30},qlc,nomatch_filter}]}},
+ []},
{nomatch_7,
<<"nomatch_7() ->
qlc:q([X || {X=Y,{Y}=X} <- []]).">>,
[],
- {warnings,[{{2,28},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,28},qlc,nomatch_pattern}]}},
+ []},
{nomatch_8,
<<"nomatch_8() ->
qlc:q([X || {X={},X=[]} <- []]).">>,
[],
- {warnings,[{{2,28},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,28},qlc,nomatch_pattern}]}},
+ []},
{nomatch_9,
<<"nomatch_9() ->
qlc:q([X || X <- [], X =:= {}, X =:= []]).">>,
[],
- {warnings,[{{2,49},qlc,nomatch_filter}]}},
+ %% {warnings,[{{2,49},qlc,nomatch_filter}]}},
+ []},
{nomatch_10,
<<"nomatch_10() ->
qlc:q([X || X <- [],
((X =:= 1) or (X =:= 2)) and (X =:= 3)]).">>,
[],
- {warnings,[{{3,53},qlc,nomatch_filter}]}},
+ %% {warnings,[{{3,53},qlc,nomatch_filter}]}},
+ []},
{nomatch_11,
<<"nomatch_11() ->
qlc:q([X || X <- [], x =:= []]).">>,
[],
- {warnings,[{{2,39},qlc,nomatch_filter}]}},
+ %% {warnings,[{{2,39},qlc,nomatch_filter}]}},
+ {warnings,[{2,sys_core_fold,nomatch_guard}]}},
{nomatch_12,
<<"nomatch_12() ->
qlc:q([X || X={} <- [], X =:= []]).">>,
[],
- {warnings,[{{2,42},qlc,nomatch_filter}]}},
+ %% {warnings,[{{2,42},qlc,nomatch_filter}]}},
+ []},
{nomatch_13,
<<"nomatch_13() ->
@@ -6297,8 +6333,9 @@ otp_7238(Config) when is_list(Config) ->
X={X} <- [],
Y={Y} <- []]).">>,
[],
- {warnings,[{{3,29},qlc,nomatch_pattern},
- {{4,29},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{3,29},qlc,nomatch_pattern},
+ %% {{4,29},qlc,nomatch_pattern}]}},
+ []},
{nomatch_14,
<<"nomatch_14() ->
@@ -6306,7 +6343,8 @@ otp_7238(Config) when is_list(Config) ->
1 > 0,
1 > X]).">>,
[],
- {warnings,[{{2,29},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,29},qlc,nomatch_pattern}]}},
+ []},
{nomatch_15,
<<"nomatch_15() ->
@@ -6315,7 +6353,8 @@ otp_7238(Config) when is_list(Config) ->
1 > 0,
1 > X]).">>,
[],
- {warnings,[{{2,32},qlc,nomatch_pattern}]}},
+ %% {warnings,[{{2,32},qlc,nomatch_pattern}]}},
+ []},
%% Template warning.
{nomatch_template1,
@@ -6553,18 +6592,19 @@ otp_7238(Config) when is_list(Config) ->
?line run(Config, T2),
T3 = [
- {nomatch_6,
- <<"nomatch_6() ->
- qlc:q([X || X <- [],
- X =:= {X}]).">>,
- [],
- {[],["filter evaluates to 'false'"]}},
-
- {nomatch_7,
- <<"nomatch_7() ->
- qlc:q([X || {X=Y,{Y}=X} <- []]).">>,
- [],
- {[],["pattern cannot possibly match"]}}],
+%% {nomatch_6,
+%% <<"nomatch_6() ->
+%% qlc:q([X || X <- [],
+%% X =:= {X}]).">>,
+%% [],
+%% {[],["filter evaluates to 'false'"]}},
+
+%% {nomatch_7,
+%% <<"nomatch_7() ->
+%% qlc:q([X || {X=Y,{Y}=X} <- []]).">>,
+%% [],
+%% {[],["pattern cannot possibly match"]}}
+ ],
?line compile_format(Config, T3),
%% *Very* simple test - just check that it doesn't crash.
@@ -6822,7 +6862,8 @@ otp_6674(Config) when is_list(Config) ->
A == 192, B =:= 192.0,
{Y} <- [{0},{1},{2}],
X == Y]),
- {block,0,
+ A0 = erl_anno:new(0),
+ {block,A0,
[{match,_,_,
{call,_,_,
[{lc,_,_,
@@ -7392,7 +7433,8 @@ try_old_join_info(Config) ->
{ok, M} = compile:file(File, [{outdir, ?datadir}]),
{module, M} = code:load_abs(filename:rootname(File)),
H = M:create_handle(),
- {block,0,
+ A0 = erl_anno:new(0),
+ {block,A0,
[{match,_,_,
{call,_,_,
[{lc,_,_,
@@ -7772,8 +7814,8 @@ table(List, Indices, KeyPos, ParentFun) ->
end,
FormatFun = fun(all) ->
- L = 17,
- {call,L,{remote,L,{atom,1,?MODULE},{atom,L,the_list}},
+ L = erl_anno:new(17),
+ {call,L,{remote,L,{atom,L,?MODULE},{atom,L,the_list}},
[erl_parse:abstract(List, 17)]};
({lookup, Column, Values}) ->
{?MODULE, list_keys, [Values, Column, List]}
@@ -8136,6 +8178,8 @@ read_error_logger() ->
{error, Why};
{info, Why} ->
{info, Why};
+ {warning, Why} ->
+ {warning, Why};
{error, Pid, Tuple} ->
{error, Pid, Tuple}
after 1000 ->
@@ -8150,8 +8194,7 @@ read_error_logger() ->
init(Tester) ->
{ok, Tester}.
-handle_event({error, _GL, {_Pid, _Msg, [Why, _]}}, Tester)
- when is_atom(Why) ->
+handle_event({error, _GL, {_Pid, _Msg, [Why, _]}}, Tester) when is_atom(Why) ->
Tester ! {error, Why},
{ok, Tester};
handle_event({error, _GL, {_Pid, _Msg, [P, T]}}, Tester) when is_pid(P) ->
@@ -8160,6 +8203,9 @@ handle_event({error, _GL, {_Pid, _Msg, [P, T]}}, Tester) when is_pid(P) ->
handle_event({info_msg, _GL, {_Pid, _Msg, [Why, _]}}, Tester) ->
Tester ! {info, Why},
{ok, Tester};
+handle_event({warning_msg, _GL, {_Pid, _Msg, [Why, _]}}, Tester) when is_atom(Why) ->
+ Tester ! {warning, Why},
+ {ok, Tester};
handle_event(_Event, State) ->
{ok, State}.
diff --git a/lib/stdlib/test/rand_SUITE.erl b/lib/stdlib/test/rand_SUITE.erl
new file mode 100644
index 0000000000..39ce1bd89a
--- /dev/null
+++ b/lib/stdlib/test/rand_SUITE.erl
@@ -0,0 +1,527 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2000-2011. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+
+-module(rand_SUITE).
+-export([all/0, suite/0,groups/0,
+ init_per_suite/1, end_per_suite/1,
+ init_per_group/2,end_per_group/2,
+ init_per_testcase/2, end_per_testcase/2
+ ]).
+
+-export([interval_int/1, interval_float/1, seed/1,
+ api_eq/1, reference/1, basic_stats/1,
+ plugin/1, measure/1
+ ]).
+
+-export([test/0, gen/1]).
+
+-include_lib("test_server/include/test_server.hrl").
+
+% Default timetrap timeout (set in init_per_testcase).
+-define(default_timeout, ?t:minutes(3)).
+-define(LOOP, 1000000).
+
+init_per_testcase(_Case, Config) ->
+ Dog = ?t:timetrap(?default_timeout),
+ [{watchdog, Dog} | Config].
+end_per_testcase(_Case, Config) ->
+ Dog = ?config(watchdog, Config),
+ test_server:timetrap_cancel(Dog),
+ ok.
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [seed, interval_int, interval_float,
+ api_eq,
+ reference,
+ basic_stats,
+ plugin, measure
+ ].
+
+groups() -> [].
+
+init_per_suite(Config) -> Config.
+end_per_suite(_Config) -> ok.
+
+init_per_group(_GroupName, Config) -> Config.
+end_per_group(_GroupName, Config) -> Config.
+
+%% A simple helper to test without test_server during dev
+test() ->
+ Tests = all(),
+ lists:foreach(fun(Test) ->
+ try
+ ok = ?MODULE:Test([]),
+ io:format("~p: ok~n", [Test])
+ catch _:Reason ->
+ io:format("Failed: ~p: ~p ~p~n",
+ [Test, Reason, erlang:get_stacktrace()])
+ end
+ end, Tests).
+
+algs() ->
+ [exs64, exsplus, exs1024].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+seed(doc) ->
+ ["Test that seed and seed_s and export_seed/0 is working."];
+seed(suite) ->
+ [];
+seed(Config) when is_list(Config) ->
+ Algs = algs(),
+ Test = fun(Alg) ->
+ try seed_1(Alg)
+ catch _:Reason ->
+ test_server:fail({Alg, Reason, erlang:get_stacktrace()})
+ end
+ end,
+ [Test(Alg) || Alg <- Algs],
+ ok.
+
+seed_1(Alg) ->
+ %% Check that uniform seeds automatically,
+ _ = rand:uniform(),
+ S00 = get(rand_seed),
+ erase(),
+ _ = rand:uniform(),
+ false = S00 =:= get(rand_seed), %% hopefully
+
+ %% Choosing algo and seed
+ S0 = rand:seed(Alg, {0, 0, 0}),
+ %% Check that (documented?) process_dict variable is correct
+ S0 = get(rand_seed),
+ S0 = rand:seed_s(Alg, {0, 0, 0}),
+ %% Check that process_dict should not be used for seed_s functionality
+ _ = rand:seed_s(Alg, {1, 0, 0}),
+ S0 = get(rand_seed),
+ %% Test export
+ ES0 = rand:export_seed(),
+ ES0 = rand:export_seed_s(S0),
+ S0 = rand:seed(ES0),
+ S0 = rand:seed_s(ES0),
+ %% seed/1 calls should be unique
+ S1 = rand:seed(Alg),
+ false = (S1 =:= rand:seed_s(Alg)),
+ %% Negative integers works
+ _ = rand:seed_s(Alg, {-1,-1,-1}),
+
+ %% Other term do not work
+ {'EXIT', _} = (catch rand:seed_s(foobar, os:timestamp())),
+ {'EXIT', _} = (catch rand:seed_s(Alg, {asd, 1, 1})),
+ {'EXIT', _} = (catch rand:seed_s(Alg, {0, 234.1234, 1})),
+ {'EXIT', _} = (catch rand:seed_s(Alg, {0, 234, [1, 123, 123]})),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+api_eq(doc) ->
+ ["Check that both api's are consistent with each other."];
+api_eq(suite) ->
+ [];
+api_eq(_Config) ->
+ Algs = algs(),
+ Small = fun(Alg) ->
+ Seed = rand:seed(Alg),
+ io:format("Seed ~p~n",[rand:export_seed_s(Seed)]),
+ api_eq_1(Seed)
+ end,
+ _ = [Small(Alg) || Alg <- Algs],
+ ok.
+
+api_eq_1(S00) ->
+ Check = fun(_, Seed) ->
+ {V0, S0} = rand:uniform_s(Seed),
+ V0 = rand:uniform(),
+ {V1, S1} = rand:uniform_s(1000000, S0),
+ V1 = rand:uniform(1000000),
+ {V2, S2} = rand:normal_s(S1),
+ V2 = rand:normal(),
+ S2
+ end,
+ S1 = lists:foldl(Check, S00, lists:seq(1, 200)),
+ S1 = get(rand_seed),
+ {V0, S2} = rand:uniform_s(S1),
+ V0 = rand:uniform(),
+ S2 = get(rand_seed),
+
+ Exported = rand:export_seed(),
+ Exported = rand:export_seed_s(S2),
+
+ S3 = lists:foldl(Check, S2, lists:seq(1, 200)),
+ S3 = get(rand_seed),
+
+ S4 = lists:foldl(Check, S3, lists:seq(1, 200)),
+ S4 = get(rand_seed),
+ %% Verify that we do not have loops
+ false = S1 =:= S2,
+ false = S2 =:= S3,
+ false = S3 =:= S4,
+
+ S2 = rand:seed(Exported),
+ S3 = lists:foldl(Check, S2, lists:seq(1, 200)),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+interval_int(doc) ->
+ ["Check that uniform/1 returns values within the proper interval."];
+interval_int(suite) ->
+ [];
+interval_int(Config) when is_list(Config) ->
+ Algs = algs(),
+ Small = fun(Alg) ->
+ Seed = rand:seed(Alg),
+ io:format("Seed ~p~n",[rand:export_seed_s(Seed)]),
+ Max = interval_int_1(100000, 7, 0),
+ Max =:= 7 orelse exit({7, Alg, Max})
+ end,
+ _ = [Small(Alg) || Alg <- Algs],
+ %% Test large integers
+ Large = fun(Alg) ->
+ Seed = rand:seed(Alg),
+ io:format("Seed ~p~n",[rand:export_seed_s(Seed)]),
+ Max = interval_int_1(100000, 1 bsl 128, 0),
+ Max > 1 bsl 64 orelse exit({large, Alg, Max})
+ end,
+ [Large(Alg) || Alg <- Algs],
+ ok.
+
+interval_int_1(0, _, Max) -> Max;
+interval_int_1(N, Top, Max) ->
+ X = rand:uniform(Top),
+ if
+ 0 < X, X =< Top ->
+ ok;
+ true ->
+ io:format("X=~p Top=~p 0<~p<~p~n", [X,Top,X,Top]),
+ exit({X, rand:export_seed()})
+ end,
+ interval_int_1(N-1, Top, max(X, Max)).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+interval_float(doc) ->
+ ["Check that uniform/0 returns values within the proper interval."];
+interval_float(suite) ->
+ [];
+interval_float(Config) when is_list(Config) ->
+ Algs = algs(),
+ Test = fun(Alg) ->
+ _ = rand:seed(Alg),
+ interval_float_1(100000)
+ end,
+ [Test(Alg) || Alg <- Algs],
+ ok.
+
+interval_float_1(0) -> ok;
+interval_float_1(N) ->
+ X = rand:uniform(),
+ if
+ 0.0 < X, X < 1.0 ->
+ ok;
+ true ->
+ io:format("X=~p 0<~p<1.0~n", [X,X]),
+ exit({X, rand:export_seed()})
+ end,
+ interval_float_1(N-1).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+reference(doc) -> ["Check if exs64 algorithm generates the proper sequence."];
+reference(suite) -> [];
+reference(Config) when is_list(Config) ->
+ [reference_1(Alg) || Alg <- algs()],
+ ok.
+
+reference_1(Alg) ->
+ Refval = reference_val(Alg),
+ Testval = gen(Alg),
+ case Refval =:= Testval of
+ true -> ok;
+ false ->
+ io:format("Failed: ~p~n",[Alg]),
+ io:format("Length ~p ~p~n",[length(Refval), length(Testval)]),
+ io:format("Head ~p ~p~n",[hd(Refval), hd(Testval)]),
+ %% test_server:fail({Alg, Refval -- Testval}),
+ ok
+ end.
+
+gen(Algo) ->
+ Seed = case Algo of
+ exsplus -> %% Printed with orig 'C' code and this seed
+ rand:seed_s({exsplus, [12345678|12345678]});
+ exs64 -> %% Printed with orig 'C' code and this seed
+ rand:seed_s({exs64, 12345678});
+ exs1024 -> %% Printed with orig 'C' code and this seed
+ rand:seed_s({exs1024, {lists:duplicate(16, 12345678), []}});
+ _ ->
+ rand:seed(Algo, {100, 200, 300})
+ end,
+ gen(?LOOP, Seed, []).
+
+gen(N, State0 = {#{max:=Max}, _}, Acc) when N > 0 ->
+ {Random, State} = rand:uniform_s(Max, State0),
+ case N rem (?LOOP div 100) of
+ 0 -> gen(N-1, State, [Random|Acc]);
+ _ -> gen(N-1, State, Acc)
+ end;
+gen(_, _, Acc) -> lists:reverse(Acc).
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% This just tests the basics so we have not made any serious errors
+%% when making the conversion from the original algorithms.
+%% The algorithms must have good properties to begin with
+%%
+
+basic_stats(doc) -> ["Check that the algorithms generate sound values."];
+basic_stats(suite) -> [];
+basic_stats(Config) when is_list(Config) ->
+ io:format("Testing uniform~n",[]),
+ [basic_uniform_1(?LOOP, rand:seed_s(Alg), 0.0, array:new([{default, 0}]))
+ || Alg <- algs()],
+ [basic_uniform_2(?LOOP, rand:seed_s(Alg), 0, array:new([{default, 0}]))
+ || Alg <- algs()],
+ io:format("Testing normal~n",[]),
+ [basic_normal_1(?LOOP, rand:seed_s(Alg), 0, 0) || Alg <- algs()],
+ ok.
+
+basic_uniform_1(N, S0, Sum, A0) when N > 0 ->
+ {X,S} = rand:uniform_s(S0),
+ I = trunc(X*100),
+ A = array:set(I, 1+array:get(I,A0), A0),
+ basic_uniform_1(N-1, S, Sum+X, A);
+basic_uniform_1(0, {#{type:=Alg}, _}, Sum, A) ->
+ AverN = Sum / ?LOOP,
+ io:format("~.10w: Average: ~.4f~n", [Alg, AverN]),
+ Counters = array:to_list(A),
+ Min = lists:min(Counters),
+ Max = lists:max(Counters),
+ io:format("~.10w: Min: ~p Max: ~p~n", [Alg, Min, Max]),
+
+ %% Verify that the basic statistics are ok
+ %% be gentle we don't want to see to many failing tests
+ abs(0.5 - AverN) < 0.005 orelse test_server:fail({average, Alg, AverN}),
+ abs(?LOOP div 100 - Min) < 1000 orelse test_server:fail({min, Alg, Min}),
+ abs(?LOOP div 100 - Max) < 1000 orelse test_server:fail({max, Alg, Max}),
+ ok.
+
+basic_uniform_2(N, S0, Sum, A0) when N > 0 ->
+ {X,S} = rand:uniform_s(100, S0),
+ A = array:set(X-1, 1+array:get(X-1,A0), A0),
+ basic_uniform_2(N-1, S, Sum+X, A);
+basic_uniform_2(0, {#{type:=Alg}, _}, Sum, A) ->
+ AverN = Sum / ?LOOP,
+ io:format("~.10w: Average: ~.4f~n", [Alg, AverN]),
+ Counters = tl(array:to_list(A)),
+ Min = lists:min(Counters),
+ Max = lists:max(Counters),
+ io:format("~.10w: Min: ~p Max: ~p~n", [Alg, Min, Max]),
+
+ %% Verify that the basic statistics are ok
+ %% be gentle we don't want to see to many failing tests
+ abs(50.5 - AverN) < 0.5 orelse test_server:fail({average, Alg, AverN}),
+ abs(?LOOP div 100 - Min) < 1000 orelse test_server:fail({min, Alg, Min}),
+ abs(?LOOP div 100 - Max) < 1000 orelse test_server:fail({max, Alg, Max}),
+ ok.
+
+basic_normal_1(N, S0, Sum, Sq) when N > 0 ->
+ {X,S} = rand:normal_s(S0),
+ basic_normal_1(N-1, S, X+Sum, X*X+Sq);
+basic_normal_1(0, {#{type:=Alg}, _}, Sum, SumSq) ->
+ Mean = Sum / ?LOOP,
+ StdDev = math:sqrt((SumSq - (Sum*Sum/?LOOP))/(?LOOP - 1)),
+ io:format("~.10w: Average: ~7.4f StdDev ~6.4f~n", [Alg, Mean, StdDev]),
+ %% Verify that the basic statistics are ok
+ %% be gentle we don't want to see to many failing tests
+ abs(Mean) < 0.005 orelse test_server:fail({average, Alg, Mean}),
+ abs(StdDev - 1.0) < 0.005 orelse test_server:fail({stddev, Alg, StdDev}),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+plugin(doc) -> ["Test that the user can write algorithms"];
+plugin(suite) -> [];
+plugin(Config) when is_list(Config) ->
+ _ = lists:foldl(fun(_, S0) ->
+ {V1, S1} = rand:uniform_s(10000, S0),
+ true = is_integer(V1),
+ {V2, S2} = rand:uniform_s(S1),
+ true = is_float(V2),
+ S2
+ end, crypto_seed(), lists:seq(1, 200)),
+ ok.
+
+%% Test implementation
+crypto_seed() ->
+ {#{type=>crypto,
+ max=>(1 bsl 64)-1,
+ next=>fun crypto_next/1,
+ uniform=>fun crypto_uniform/1,
+ uniform_n=>fun crypto_uniform_n/2},
+ <<>>}.
+
+%% Be fair and create bignums i.e. 64bits otherwise use 58bits
+crypto_next(<<Num:64, Bin/binary>>) ->
+ {Num, Bin};
+crypto_next(_) ->
+ crypto_next(crypto:rand_bytes((64 div 8)*100)).
+
+crypto_uniform({Api, Data0}) ->
+ {Int, Data} = crypto_next(Data0),
+ {Int / (1 bsl 64), {Api, Data}}.
+
+crypto_uniform_n(N, {Api, Data0}) when N < (1 bsl 64) ->
+ {Int, Data} = crypto_next(Data0),
+ {(Int rem N)+1, {Api, Data}};
+crypto_uniform_n(N, State0) ->
+ {F,State} = crypto_uniform(State0),
+ {trunc(F * N) + 1, State}.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%% Not a test but measures the time characteristics of the different algorithms
+measure(Suite) when is_atom(Suite) -> [];
+measure(_Config) ->
+ Algos = [crypto64|algs()],
+ io:format("RNG uniform integer performance~n",[]),
+ _ = measure_1(random, fun(State) -> {int, random:uniform_s(10000, State)} end),
+ _ = [measure_1(Algo, fun(State) -> {int, rand:uniform_s(10000, State)} end) || Algo <- Algos],
+ io:format("RNG uniform float performance~n",[]),
+ _ = measure_1(random, fun(State) -> {uniform, random:uniform_s(State)} end),
+ _ = [measure_1(Algo, fun(State) -> {uniform, rand:uniform_s(State)} end) || Algo <- Algos],
+ io:format("RNG normal float performance~n",[]),
+ io:format("~.10w: not implemented (too few bits)~n", [random]),
+ _ = [measure_1(Algo, fun(State) -> {normal, rand:normal_s(State)} end) || Algo <- Algos],
+ ok.
+
+measure_1(Algo, Gen) ->
+ Parent = self(),
+ Seed = fun(crypto64) -> crypto_seed();
+ (random) -> random:seed(os:timestamp()), get(random_seed);
+ (Alg) -> rand:seed_s(Alg)
+ end,
+
+ Pid = spawn_link(fun() ->
+ Fun = fun() -> measure_2(?LOOP, Seed(Algo), Gen) end,
+ {Time, ok} = timer:tc(Fun),
+ io:format("~.10w: ~pµs~n", [Algo, Time]),
+ Parent ! {self(), ok},
+ normal
+ end),
+ receive
+ {Pid, Msg} -> Msg
+ end.
+
+measure_2(N, State0, Fun) when N > 0 ->
+ case Fun(State0) of
+ {int, {Random, State}}
+ when is_integer(Random), Random >= 1, Random =< 100000 ->
+ measure_2(N-1, State, Fun);
+ {uniform, {Random, State}} when is_float(Random), Random > 0, Random < 1 ->
+ measure_2(N-1, State, Fun);
+ {normal, {Random, State}} when is_float(Random) ->
+ measure_2(N-1, State, Fun);
+ Res ->
+ exit({error, Res, State0})
+ end;
+measure_2(0, _, _) -> ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Data
+reference_val(exs64) ->
+ [16#3737ad0c703ff6c3,16#3868a78fe71adbbd,16#1f01b62b4338b605,16#50876a917437965f,
+ 16#b2edfe32a10e27fc,16#995924551d8ebae1,16#9f1e6b94e94e0b58,16#27ec029eb0e94f8e,
+ 16#bf654e6df7fe5c,16#b7d5ef7b79be65e3,16#4bdba4d1c159126b,16#a9c816fdc701292c,
+ 16#a377b6c89d85ac8b,16#7abb5cd0e5847a6,16#62666f1fc00a0a90,16#1edc3c3d255a8113,
+ 16#dfc764073767f18e,16#381783d577ca4e34,16#49693588c085ddcb,16#da6fcb16dd5163f3,
+ 16#e2357a703475b1b7,16#aaa84c4924b5985a,16#b8fe07bb2bac1e49,16#23973ac0160ff064,
+ 16#1afbc7b023f5d618,16#9f510f7b7caa2a0f,16#d5b0a57f7f5f1084,16#d8c49b66c5f99a29,
+ 16#e920ac3b598b5213,16#1090d7e27e7a7c76,16#81171917168ee74f,16#f08489a3eb6988e,
+ 16#396260c4f0b2ed46,16#4fd0a6a6caefd5b2,16#423dff07a3b888a,16#12718773ebd99987,
+ 16#e50991e540807cb,16#8cfa03bbaa6679d6,16#55bdf86dfbb92dbf,16#eb7145378cce74a8,
+ 16#71856c224c846595,16#20461588dae6e24d,16#c73b3e63ced74bac,16#775b11813dda0c78,
+ 16#91f358e51068ede0,16#399955ef36766bc2,16#4489ee072e8a38b1,16#ba77759d52321ca0,
+ 16#14f519eab5c53db8,16#1f754bd08e4f34c4,16#99e25ca29b2fcfeb,16#da11927c0d9837f8,
+ 16#1eeb0f87009f5a87,16#a7c444d3b0db1089,16#49c7fbf0714849ad,16#4f2b693e7f8265cb,
+ 16#80e1493cbaa8f256,16#186f345bcac2661e,16#330065ae0c698d26,16#5235ed0432c42e93,
+ 16#429792e31ddb10bb,16#8769054bb6533cff,16#1ab382483444201f,16#2216368786fc7b9,
+ 16#1efea1155216da0b,16#782dc868ba595452,16#2b80f6d159617f48,16#407fc35121b2fa1b,
+ 16#90e8be6e618873d1,16#40ad4ec92a8abf8e,16#34e2890f583f435,16#838c0aef0a5d8427,
+ 16#ed4238f4bd6cbcfa,16#7feed11f7a8bb9f0,16#2b0636a93e26c89d,16#481ad4bea5180646,
+ 16#673e5ad861afe1cc,16#298eeb519d69e74d,16#eb1dd06d168c856,16#4770651519ee7ef9,
+ 16#7456ebf1bcf608f1,16#d6200f6fbd61ce05,16#c0695dfab11ab6aa,16#5bff449249983843,
+ 16#7aba88471474c9ac,16#d7e9e4a21c989e91,16#c5e02ee67ccb7ce1,16#4ea8a3a912246153,
+ 16#f2e6db7c9ce4ec43,16#39498a95d46d2470,16#c5294fcb8cce8aa9,16#a918fe444719f3dc,
+ 16#98225f754762c0c0,16#f0721204f2cb43f5,16#b98e77b099d1f2d1,16#691d6f75aee3386,
+ 16#860c7b2354ec24fd,16#33e007bd0fbcb609,16#7170ae9c20fb3d0,16#31d46938fe383a60];
+
+reference_val(exs1024) ->
+ [16#9c61311d0d4a01fd,16#ce963ef5803b703e,16#545dcffb7b644e1a,16#edd56576a8d778d5,
+ 16#16bee799783c6b45,16#336f0b3caeb417fa,16#29291b8be26dedfa,16#1efed996d2e1b1a8,
+ 16#c5c04757bd2dadf9,16#11aa6d194009c616,16#ab2b3e82bdb38a91,16#5011ee46fd2609eb,
+ 16#766db7e5b701a9bb,16#d42cb2632c419f35,16#107c6a2667bf8557,16#3ffbf922cb306967,
+ 16#1e71e3d024ac5131,16#6fdb368ec67a5f06,16#b0d8e72e7aa6d1c1,16#e5705a02dae89e3b,
+ 16#9c24eb68c086a1d3,16#418de330f55f71f0,16#2917ddeb278bc8d2,16#aeba7fba67208f39,
+ 16#10ceaf40f6af1d8d,16#47a6d06811d33132,16#603a661d6caf720a,16#a28bd0c9bcdacb3c,
+ 16#f44754f006909762,16#6e25e8e67ccc43bc,16#174378ce374a549e,16#b5598ae9f57c4e50,
+ 16#ca85807fbcd51dd,16#1816e58d6c3cc32a,16#1b4d630d3c8e96a6,16#c19b1e92b4efc5bd,
+ 16#665597b20ddd721a,16#fdab4eb21b75c0ae,16#86a612dcfea0756c,16#8fc2da192f9a55f0,
+ 16#d7c954eb1af31b5,16#6f5ee45b1b80101b,16#ebe8ea4e5a67cbf5,16#1cb952026b4c1400,
+ 16#44e62caffe7452c0,16#b591d8f3e6d7cbcf,16#250303f8d77b6f81,16#8ef2199aae4c9b8d,
+ 16#a16baa37a14d7b89,16#c006e4d2b2da158b,16#e6ec7abd54c93b31,16#e6b0d79ae2ab6fa7,
+ 16#93e4b30e4ab7d4cd,16#42a01b6a4ef63033,16#9ab1e94fe94976e,16#426644e1de302a1f,
+ 16#8e58569192200139,16#744f014a090107c1,16#15d056801d467c6c,16#51bdad3a8c30225f,
+ 16#abfc61fb3104bd45,16#c610607122272df7,16#905e67c63116ebfc,16#1e4fd5f443bdc18,
+ 16#1945d1745bc55a4c,16#f7cd2b18989595bb,16#f0d273b2c646a038,16#ee9a6fdc6fd5d734,
+ 16#541a518bdb700518,16#6e67ab9a65361d76,16#bcfadc9bfe5b2e06,16#69fa334cf3c11496,
+ 16#9657df3e0395b631,16#fc0d0442160108ec,16#2ee538da7b1f7209,16#8b20c9fae50a5a9e,
+ 16#a971a4b5c2b3b6a,16#ff6241e32489438e,16#8fd6433f45255777,16#6e6c82f10818b0dc,
+ 16#59a8fad3f6af616b,16#7eac34f43f12221c,16#6e429ec2951723ec,16#9a65179767a45c37,
+ 16#a5f8127d1e6fdf35,16#932c50bc633d8d5c,16#f3bbea4e7ebecb8,16#efc3a2bbf6a8674,
+ 16#451644a99971cb6,16#cf70776d652c150d,16#c1fe0dcb87a25403,16#9523417132b2452e,
+ 16#8f98bc30d06b980e,16#bb4b288ecb8daa9a,16#59e54beb32f78045,16#f9ab1562456b9d66,
+ 16#6435f4130304a793,16#b4bb94c2002e1849,16#49a86d1e4bade982,16#457d63d60ed52b95];
+
+reference_val(exsplus) ->
+ [16#bc76c2e638db,16#15ede2ebb16c9fb,16#185ee2c27d6b88d,16#15d5ee9feafc3a5,
+ 16#1862e91dfce3e6b,16#2c9744b0fb69e46,16#78b21bc01cef6b,16#2d16a2fae6c76ba,
+ 16#13dfccb8ff86bce,16#1d9474c59e23f4d,16#d2f67dcd7f0dd6,16#2b6d489d51a0725,
+ 16#1fa52ef484861d8,16#1ae9e2a38f966d4,16#2264ab1e193acca,16#23bbca085039a05,
+ 16#2b6eea06a0af0e1,16#3ad47fa8866ea20,16#1ec2802d612d855,16#36c1982b134d50,
+ 16#296b6a23f5b75e0,16#c5eeb600a9875c,16#2a3fd51d735f9d4,16#56fafa3593a070,
+ 16#13e9d416ec0423e,16#28101a91b23e9dc,16#32e561eb55ce15a,16#94a7dbba66fe4a,
+ 16#2e1845043bcec1f,16#235f7513a1b5146,16#e37af1bf2d63cb,16#2048033824a1639,
+ 16#c255c750995f7,16#2c7542058e89ee3,16#204dfeefbdb62ba,16#f5a936ec63dd66,
+ 16#33b3b7dbbbd8b90,16#c4f0f79026ffe9,16#20ffee2d37aca13,16#2274f931716be2c,
+ 16#29b883902ba9df1,16#1a838cd5312717f,16#2edfc49ff3dc1d6,16#418145cbec84c2,
+ 16#d2d8f1a17d49f,16#d41637bfa4cc6f,16#24437e03a0f5df8,16#3d1d87919b94a90,
+ 16#20d6997b36769b6,16#16f9d7855cd87ca,16#821ef7e2a062a3,16#2c4d11dc4a2da70,
+ 16#24a3b27f56ed26b,16#144b23c8b97387a,16#34a2ced56930d12,16#21cc0544113a017,
+ 16#3e780771f634fb2,16#146c259c02e7e18,16#1d99e4cfad0ef1,16#fdf3dabefc6b3a,
+ 16#7d0806e4d12dfb,16#3e3ae3580532eae,16#2456544200fbd86,16#f83aad4e88db85,
+ 16#37c134779463b4d,16#21a20bf64b6e735,16#1c0585ac88b69f2,16#1b3fcea8dd30e56,
+ 16#334bc301aefd97,16#37066eb7e80a946,16#15a19a6331b570f,16#35e67fa43c3f7d0,
+ 16#152a4020145fb80,16#8d55139491dfbe,16#21d9cba585c059d,16#31475f363654635,
+ 16#2567b17acb7a104,16#39201be3a7681c5,16#6bc675fd26b601,16#334b93232b1b1e3,
+ 16#357c402cb732c6a,16#362e32efe4db46a,16#8edc7ae3da51e5,16#31573376785eac9,
+ 16#6c6145ffa1169d,16#18ec2c393d45359,16#1f1a5f256e7130c,16#131cc2f49b8004f,
+ 16#36f715a249f4ec2,16#1c27629826c50d3,16#914d9a6648726a,16#27f5bf5ce2301e8,
+ 16#3dd493b8012970f,16#be13bed1e00e5c,16#ceef033b74ae10,16#3da38c6a50abe03,
+ 16#15cbd1a421c7a8c,16#22794e3ec6ef3b1,16#26154d26e7ea99f,16#3a66681359a6ab6].
diff --git a/lib/stdlib/test/random_SUITE.erl b/lib/stdlib/test/random_SUITE.erl
index ac9d1a6c06..22c0900651 100644
--- a/lib/stdlib/test/random_SUITE.erl
+++ b/lib/stdlib/test/random_SUITE.erl
@@ -82,7 +82,7 @@ seed(suite) ->
[];
seed(Config) when is_list(Config) ->
?line Self = self(),
- ?line Seed = {S1, S2, S3} = now(),
+ Seed = {S1, S2, S3} = erlang:timestamp(),
?line _ = spawn(fun() ->
random:seed(S1,S2,S3),
Rands = lists:foldl(fun
diff --git a/lib/stdlib/test/select_SUITE.erl b/lib/stdlib/test/select_SUITE.erl
index 546c25f954..201c38b25a 100644
--- a/lib/stdlib/test/select_SUITE.erl
+++ b/lib/stdlib/test/select_SUITE.erl
@@ -211,7 +211,7 @@ init_random(Config) ->
{ok,[X]} ->
X;
_ ->
- {A,B,C} = erlang:now(),
+ {A,B,C} = erlang:timestamp(),
random:seed(A,B,C),
get(random_seed)
end,
diff --git a/lib/stdlib/test/sets_SUITE.erl b/lib/stdlib/test/sets_SUITE.erl
index c0cf1fc7e8..24f5d65f82 100644
--- a/lib/stdlib/test/sets_SUITE.erl
+++ b/lib/stdlib/test/sets_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -28,7 +28,7 @@
create/1,add_element/1,del_element/1,
subtract/1,intersection/1,union/1,is_subset/1,
is_set/1,fold/1,filter/1,
- take_smallest/1,take_largest/1]).
+ take_smallest/1,take_largest/1, iterate/1]).
-include_lib("test_server/include/test_server.hrl").
@@ -48,7 +48,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[create, add_element, del_element, subtract,
intersection, union, is_subset, is_set, fold, filter,
- take_smallest, take_largest].
+ take_smallest, take_largest, iterate].
groups() ->
[].
@@ -426,6 +426,44 @@ take_largest_3(S0, List0, M) ->
take_largest_3(S, List, M)
end.
+iterate(Config) when is_list(Config) ->
+ test_all(fun iterate_1/1).
+
+iterate_1(M) ->
+ case M(module, []) of
+ gb_sets -> iterate_2(M);
+ _ -> ok
+ end,
+ M(empty, []).
+
+iterate_2(M) ->
+ random:seed(1, 2, 42),
+ iter_set(M, 1000).
+
+iter_set(_M, 0) ->
+ ok;
+iter_set(M, N) ->
+ L = [I || I <- lists:seq(1, N)],
+ T = M(from_list, L),
+ L = lists:reverse(iterate_set(M, T)),
+ R = random:uniform(N),
+ S = lists:reverse(iterate_set(M, R, T)),
+ S = [E || E <- L, E >= R],
+ iter_set(M, N-1).
+
+iterate_set(M, Set) ->
+ I = M(iterator, Set),
+ iterate_set_1(M, M(next, I), []).
+
+iterate_set(M, Start, Set) ->
+ I = M(iterator_from, {Start, Set}),
+ iterate_set_1(M, M(next, I), []).
+
+iterate_set_1(_, none, R) ->
+ R;
+iterate_set_1(M, {E, I}, R) ->
+ iterate_set_1(M, M(next, I), [E | R]).
+
%%%
%%% Helper functions.
%%%
diff --git a/lib/stdlib/test/sets_test_lib.erl b/lib/stdlib/test/sets_test_lib.erl
index 86f009a8f9..772139406d 100644
--- a/lib/stdlib/test/sets_test_lib.erl
+++ b/lib/stdlib/test/sets_test_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -34,7 +34,10 @@ new(Mod, Eq) ->
(is_empty, S) -> is_empty(Mod, S);
(is_set, S) -> Mod:is_set(S);
(is_subset, {S,Set}) -> is_subset(Mod, Eq, S, Set);
+ (iterator, S) -> Mod:iterator(S);
+ (iterator_from, {Start, S}) -> Mod:iterator_from(Start, S);
(module, []) -> Mod;
+ (next, I) -> Mod:next(I);
(singleton, E) -> singleton(Mod, E);
(size, S) -> Mod:size(S);
(subtract, {S1,S2}) -> subtract(Mod, S1, S2);
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index f841e2c4a6..7c18560498 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -404,13 +404,14 @@ records(Config) when is_list(Config) ->
?line ok = file:write_file(Test, Contents),
RR5 = "rr(\"" ++ Test ++ "\", '_', {d,test1}), rl([test1,test2]).",
- ?line [{attribute,1,record,{test1,_}},ok] = scan(RR5),
+ A1 = erl_anno:new(1),
+ [{attribute,A1,record,{test1,_}},ok] = scan(RR5),
RR6 = "rr(\"" ++ Test ++ "\", '_', {d,test2}), rl([test1,test2]).",
- ?line [{attribute,1,record,{test2,_}},ok] = scan(RR6),
+ [{attribute,A1,record,{test2,_}},ok] = scan(RR6),
RR7 = "rr(\"" ++ Test ++
"\", '_', [{d,test1},{d,test2,17}]), rl([test1,test2]).",
- ?line [{attribute,1,record,{test1,_}},{attribute,1,record,{test2,_}},
- ok] = scan(RR7),
+ [{attribute,A1,record,{test1,_}},{attribute,A1,record,{test2,_}},ok] =
+ scan(RR7),
?line PreReply = scan(<<"rr(prim_file).">>), % preloaded...
?line true = is_list(PreReply),
?line Dir = filename:join(?config(priv_dir, Config), "*.erl"),
diff --git a/lib/stdlib/test/stdlib_SUITE.erl b/lib/stdlib/test/stdlib_SUITE.erl
index 6669a21b9c..206eb4fd74 100644
--- a/lib/stdlib/test/stdlib_SUITE.erl
+++ b/lib/stdlib/test/stdlib_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -172,9 +172,16 @@ major_upgrade(Config) ->
%% Version numbers are checked by ct_release_test, so there is nothing
%% more to check here...
-upgrade_init(State) ->
+upgrade_init(CtData,State) ->
+ {ok,{FromVsn,ToVsn}} = ct_release_test:get_app_vsns(CtData,stdlib),
+ case ct_release_test:get_appup(CtData,stdlib) of
+ {ok,{FromVsn,ToVsn,[restart_new_emulator],[restart_new_emulator]}} ->
+ io:format("Upgrade/downgrade ~p <--> ~p",[FromVsn,ToVsn]);
+ {error,{vsn_not_found,_}} when FromVsn==ToVsn ->
+ io:format("No upgrade test for stdlib, same version")
+ end,
State.
-upgrade_upgraded(State) ->
+upgrade_upgraded(_CtData,State) ->
State.
-upgrade_downgraded(State) ->
+upgrade_downgraded(_CtData,State) ->
State.
diff --git a/lib/stdlib/test/string_SUITE.erl b/lib/stdlib/test/string_SUITE.erl
index fccd1bef95..e9ea2e3522 100644
--- a/lib/stdlib/test/string_SUITE.erl
+++ b/lib/stdlib/test/string_SUITE.erl
@@ -120,7 +120,7 @@ chr_rchr(suite) ->
chr_rchr(doc) ->
[];
chr_rchr(Config) when is_list(Config) ->
- ?line {_,_,X} = now(),
+ {_,_,X} = erlang:timestamp(),
?line 0 = string:chr("", (X rem (255-32)) + 32),
?line 0 = string:rchr("", (X rem (255-32)) + 32),
?line 1 = string:chr("x", $x),
@@ -144,7 +144,7 @@ str_rstr(suite) ->
str_rstr(doc) ->
[];
str_rstr(Config) when is_list(Config) ->
- ?line {_,_,X} = now(),
+ {_,_,X} = erlang:timestamp(),
?line 0 = string:str("", [(X rem (255-32)) + 32]),
?line 0 = string:rstr("", [(X rem (255-32)) + 32]),
?line 1 = string:str("x", "x"),
@@ -217,21 +217,39 @@ substr(Config) when is_list(Config) ->
?line {'EXIT',_} = (catch string:substr("1234", "1")),
ok.
-tokens(suite) ->
- [];
-tokens(doc) ->
- [];
tokens(Config) when is_list(Config) ->
- ?line [] = string:tokens("",""),
- ?line [] = string:tokens("abc","abc"),
- ?line ["abc"] = string:tokens("abc", ""),
- ?line ["1","2 34","4","5"] = string:tokens("1,2 34,4;5", ";,"),
- %% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ [] = string:tokens("",""),
+ [] = string:tokens("abc","abc"),
+ ["abc"] = string:tokens("abc", ""),
+ ["1","2 34","45","5","6","7"] = do_tokens("1,2 34,45;5,;6;,7", ";,"),
+
%% invalid arg type
- ?line {'EXIT',_} = (catch string:tokens("x,y", ',')),
+ {'EXIT',_} = (catch string:tokens('x,y', ",")),
+ {'EXIT',_} = (catch string:tokens("x,y", ',')),
ok.
+do_tokens(S0, Sep0) ->
+ [H|T] = Sep0,
+ S = [replace_sep(C, T, H) || C <- S0],
+ Sep = [H],
+ io:format("~p ~p\n", [S0,Sep0]),
+ io:format("~p ~p\n", [S,Sep]),
+
+ Res = string:tokens(S0, Sep0),
+ Res = string:tokens(Sep0++S0, Sep0),
+ Res = string:tokens(S0++Sep0, Sep0),
+
+ Res = string:tokens(S, Sep),
+ Res = string:tokens(Sep++S, Sep),
+ Res = string:tokens(S++Sep, Sep),
+
+ Res.
+
+replace_sep(C, Seps, New) ->
+ case lists:member(C, Seps) of
+ true -> New;
+ false -> C
+ end.
chars(suite) ->
[];
diff --git a/lib/stdlib/test/supervisor_SUITE.erl b/lib/stdlib/test/supervisor_SUITE.erl
index c98654aef7..015b09f35e 100644
--- a/lib/stdlib/test/supervisor_SUITE.erl
+++ b/lib/stdlib/test/supervisor_SUITE.erl
@@ -37,6 +37,7 @@
sup_start_ignore_child/1, sup_start_ignore_temporary_child/1,
sup_start_ignore_temporary_child_start_child/1,
sup_start_ignore_temporary_child_start_child_simple/1,
+ sup_start_ignore_permanent_child_start_child_simple/1,
sup_start_error_return/1, sup_start_fail/1,
sup_start_map/1, sup_start_map_faulty_specs/1,
sup_stop_infinity/1, sup_stop_timeout/1, sup_stop_brutal_kill/1,
@@ -53,7 +54,8 @@
temporary_abnormal/1, temporary_bystander/1]).
%% Restart strategy tests
--export([ one_for_one/1,
+-export([ multiple_restarts/1,
+ one_for_one/1,
one_for_one_escalation/1, one_for_all/1,
one_for_all_escalation/1, one_for_all_other_child_fails_restart/1,
simple_one_for_one/1, simple_one_for_one_escalation/1,
@@ -78,6 +80,7 @@ suite() ->
all() ->
[{group, sup_start}, {group, sup_start_map}, {group, sup_stop}, child_adm,
child_adm_simple, extra_return, child_specs, sup_flags,
+ multiple_restarts,
{group, restart_one_for_one},
{group, restart_one_for_all},
{group, restart_simple_one_for_one},
@@ -97,6 +100,7 @@ groups() ->
sup_start_ignore_child, sup_start_ignore_temporary_child,
sup_start_ignore_temporary_child_start_child,
sup_start_ignore_temporary_child_start_child_simple,
+ sup_start_ignore_permanent_child_start_child_simple,
sup_start_error_return, sup_start_fail]},
{sup_start_map, [],
[sup_start_map, sup_start_map_faulty_specs]},
@@ -248,6 +252,27 @@ sup_start_ignore_temporary_child_start_child_simple(Config)
[1,1,0,1] = get_child_counts(sup_test).
%%-------------------------------------------------------------------------
+%% Tests what happens if child's init-callback returns ignore for a
+%% permanent child when child is started with start_child/2, and the
+%% supervisor is simple_one_for_one.
+%% Child spec shall NOT be saved!!!
+sup_start_ignore_permanent_child_start_child_simple(Config)
+ when is_list(Config) ->
+ process_flag(trap_exit, true),
+ Child1 = {child1, {supervisor_1, start_child, [ignore]},
+ permanent, 1000, worker, []},
+ {ok, Pid} = start_link({ok, {{simple_one_for_one, 2, 3600}, [Child1]}}),
+
+ {ok, undefined} = supervisor:start_child(sup_test, []),
+ {ok, CPid2} = supervisor:start_child(sup_test, []),
+
+ [{undefined, CPid2, worker, []}] = supervisor:which_children(sup_test),
+ [1,1,0,1] = get_child_counts(sup_test),
+
+ %% Regression test: check that the supervisor terminates without error.
+ exit(Pid, shutdown),
+ check_exit_reason(Pid, shutdown).
+%%-------------------------------------------------------------------------
%% Tests what happens if init-callback returns a invalid value.
sup_start_error_return(Config) when is_list(Config) ->
process_flag(trap_exit, true),
@@ -873,6 +898,39 @@ temporary_bystander(_Config) ->
[{child1, _, _, _}] = supervisor:which_children(SupPid2).
%%-------------------------------------------------------------------------
+%% Test restarting a process multiple times, being careful not
+%% to exceed the maximum restart frquency.
+multiple_restarts(Config) when is_list(Config) ->
+ process_flag(trap_exit, true),
+ Child1 = #{id => child1,
+ start => {supervisor_1, start_child, []},
+ restart => permanent,
+ shutdown => brutal_kill,
+ type => worker,
+ modules => []},
+ SupFlags = #{strategy => one_for_one,
+ intensity => 1,
+ period => 1},
+ {ok, SupPid} = start_link({ok, {SupFlags, []}}),
+ {ok, CPid1} = supervisor:start_child(sup_test, Child1),
+
+ %% Terminate the process several times, but being careful
+ %% not to exceed the maximum restart intensity.
+ terminate(SupPid, CPid1, child1, abnormal),
+ _ = [begin
+ receive after 2100 -> ok end,
+ [{_, Pid, _, _}|_] = supervisor:which_children(sup_test),
+ terminate(SupPid, Pid, child1, abnormal)
+ end || _ <- [1,2,3]],
+
+ %% Verify that the supervisor is still alive and clean up.
+ ok = supervisor:terminate_child(SupPid, child1),
+ ok = supervisor:delete_child(SupPid, child1),
+ exit(SupPid, kill),
+ ok.
+
+
+%%-------------------------------------------------------------------------
%% Test the one_for_one base case.
one_for_one(Config) when is_list(Config) ->
process_flag(trap_exit, true),
diff --git a/lib/stdlib/test/tar_SUITE.erl b/lib/stdlib/test/tar_SUITE.erl
index 9b6d65011e..3b54cd0f34 100644
--- a/lib/stdlib/test/tar_SUITE.erl
+++ b/lib/stdlib/test/tar_SUITE.erl
@@ -89,7 +89,7 @@ borderline_test(Size, TempDir) ->
?line io:format("Testing size ~p", [Size]),
%% Create a file and archive it.
- ?line {_, _, X0} = erlang:now(),
+ X0 = erlang:monotonic_time(),
?line file:write_file(Name, random_byte_list(X0, Size)),
?line ok = erl_tar:create(Archive, [Name]),
?line ok = file:delete(Name),
diff --git a/lib/stdlib/test/timer_SUITE.erl b/lib/stdlib/test/timer_SUITE.erl
index bea2b3fb2a..ae32d98807 100644
--- a/lib/stdlib/test/timer_SUITE.erl
+++ b/lib/stdlib/test/timer_SUITE.erl
@@ -25,14 +25,11 @@
-include_lib("test_server/include/test_server.hrl").
-%% Test suite for timer module. This is a really nasty test it runs a
-%% lot of timeouts and then checks in the end if any of them was
-%% trigggered too early or if any late timeouts was much too
-%% late. What should be added is more testing of the interface
-%% functions I guess. But I don't have time for that now.
+%% Random test of the timer module. This is a really nasty test, as it
+%% runs a lot of timeouts and then checks in the end if any of them
+%% was triggered too early or if any late timeouts was much too late.
%%
-%% Expect it to run for at least 5-10 minutes!
-
+%% Running time on average is about 90 seconds.
%% The main test case in this module is "do_big_test", which
%% orders a large number of timeouts and measures how
@@ -40,15 +37,8 @@
%% also a number of other concurrent processes running "nrev" at the same
%% time. The result is analyzed afterwards by trying to check if the
%% measured values are reasonable. It is hard to determine what is
-%% reasonable on different machines therefore the test can sometimes
-%% fail, even though the timer module is ok. I have checked against
-%% previous versions of the timer module (which contained bugs) and it
-%% seems it fails every time when running the buggy timer modules.
-%%
-%% The solution is to rewrite the test suite. Possible strategies for a
-%% rewrite: smarter math on the measuring data, test cases with varying
-%% amount of load. The test suite should also include tests that test the
-%% interface of the timer module.
+%% reasonable on different machines; therefore the test can sometimes
+%% fail, even though the timer module is ok.
suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -89,10 +79,7 @@ report_result(Error) -> ?line test_server:fail(Error).
big_test(N) ->
C = start_collect(),
system_time(), system_time(), system_time(),
- A1 = element(2, erlang:now()),
- A2 = A1 * 3,
- A3 = element(3, erlang:now()),
- random:seed(A1, A2, A3),
+ random:seed(erlang:timestamp()),
random:uniform(100),random:uniform(100),random:uniform(100),
big_loop(C, N, []),
@@ -146,7 +133,7 @@ big_loop(C, N, Pids) ->
%%Pids2=Pids1,
%% wait a little while
- timer:sleep(random:uniform(200)*10),
+ timer:sleep(random:uniform(200)*3),
%% spawn zero, one or two nrev to get some load ;-/
Pids3 = start_nrev(Pids2, random:uniform(100)),
@@ -166,14 +153,14 @@ start_nrev(Pids, _N) ->
start_after_test(Pids, C, 1) ->
- TO1 = random:uniform(100)*100,
+ TO1 = random:uniform(100)*47,
[s_a_t(C, TO1)|Pids];
start_after_test(Pids, C, 2) ->
- TO1 = random:uniform(100)*100,
- TO2 = TO1 div random:uniform(3) + 200,
+ TO1 = random:uniform(100)*47,
+ TO2 = TO1 div random:uniform(3) + 101,
[s_a_t(C, TO1),s_a_t(C, TO2)|Pids];
start_after_test(Pids, C, N) ->
- TO1 = random:uniform(100)*100,
+ TO1 = random:uniform(100)*47,
start_after_test([s_a_t(C, TO1)|Pids], C, N-1).
s_a_t(C, TimeOut) ->
@@ -199,7 +186,7 @@ a_t(C, TimeOut) ->
maybe_start_i_test(Pids, C, 1) ->
%% ok do it
- TOI = random:uniform(100)*100,
+ TOI = random:uniform(53)*49,
CountI = random:uniform(10) + 3, % at least 4 times
[spawn_link(timer_SUITE, i_t, [C, TOI, CountI])|Pids];
maybe_start_i_test(Pids, _C, _) ->
@@ -374,9 +361,7 @@ res_combine({error,Es}, [{error,E}|T]) ->
system_time() ->
- %%element(1, statistics(wall_clock)).
- {M,S,U} = erlang:now(),
- 1000000000 * M + 1000 * S + (U div 1000).
+ erlang:monotonic_time(milli_seconds).
%% ------------------------------------------------------- %%
diff --git a/lib/stdlib/test/timer_simple_SUITE.erl b/lib/stdlib/test/timer_simple_SUITE.erl
index dc751aad16..3c7e3c5f25 100644
--- a/lib/stdlib/test/timer_simple_SUITE.erl
+++ b/lib/stdlib/test/timer_simple_SUITE.erl
@@ -374,7 +374,6 @@ performance(Mod) ->
big_test(M) ->
Load_Pids = start_nrev(20, M), % Increase if more load wanted :)
- apply(M, sleep, [9000]),
LPids = spawn_timers(5, M, 10000, 5),
apply(M, sleep, [4000]),
@@ -483,8 +482,7 @@ append([],X) ->
X.
system_time() ->
- {M,S,U} = erlang:now(),
- 1000000*(M*1000000 + S) + U.
+ erlang:monotonic_time(micro_seconds).
%% ------------------------------------------------------- %%
diff --git a/lib/stdlib/test/unicode_SUITE.erl b/lib/stdlib/test/unicode_SUITE.erl
index 10b29d0d28..9f5d485df6 100644
--- a/lib/stdlib/test/unicode_SUITE.erl
+++ b/lib/stdlib/test/unicode_SUITE.erl
@@ -29,7 +29,13 @@
random_lists/1,
roundtrips/1,
latin1/1,
- exceptions/1, binaries_errors/1]).
+ exceptions/1,
+ binaries_errors_limit/1,
+ ex_binaries_errors_utf8/1,
+ ex_binaries_errors_utf16_little/1,
+ ex_binaries_errors_utf16_big/1,
+ ex_binaries_errors_utf32_little/1,
+ ex_binaries_errors_utf32_big/1]).
init_per_testcase(Case, Config) when is_atom(Case), is_list(Config) ->
Dog=?t:timetrap(?t:minutes(20)),
@@ -44,10 +50,17 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[utf8_illegal_sequences_bif,
utf16_illegal_sequences_bif, random_lists, roundtrips,
- latin1, exceptions, binaries_errors].
+ latin1, exceptions,
+ binaries_errors_limit,
+ {group,binaries_errors}].
groups() ->
- [].
+ [{binaries_errors,[parallel],
+ [ex_binaries_errors_utf8,
+ ex_binaries_errors_utf16_little,
+ ex_binaries_errors_utf16_big,
+ ex_binaries_errors_utf32_little,
+ ex_binaries_errors_utf32_big]}].
init_per_suite(Config) ->
Config.
@@ -61,15 +74,11 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
-binaries_errors(Config) when is_list(Config) ->
+binaries_errors_limit(Config) when is_list(Config) ->
setlimit(10),
ex_binaries_errors_utf8(Config),
setlimit(default),
- ex_binaries_errors_utf8(Config),
- ex_binaries_errors_utf16_little(Config),
- ex_binaries_errors_utf16_big(Config),
- ex_binaries_errors_utf32_little(Config),
- ex_binaries_errors_utf32_big(Config).
+ ok.
ex_binaries_errors_utf8(Config) when is_list(Config) ->
%% Original smoke test, we should not forget the original offset...
@@ -78,8 +87,9 @@ ex_binaries_errors_utf8(Config) when is_list(Config) ->
%% Now, try with longer binary (trapping)
BrokenPart = list_to_binary(lists:seq(128,255)),
BrokenSz = byte_size(BrokenPart),
+ Seq255 = lists:seq(1,255),
[ begin
- OKList = lists:flatten(lists:duplicate(N,lists:seq(1,255))),
+ OKList = lists:flatten(lists:duplicate(N,Seq255)),
OKBin = unicode:characters_to_binary(OKList),
OKLen = length(OKList),
%% Copy to avoid that the binary get's writable
@@ -102,109 +112,84 @@ ex_binaries_errors_utf8(Config) when is_list(Config) ->
ok.
ex_binaries_errors_utf16_little(Config) when is_list(Config) ->
- BrokenPart = << <<X:16/little>> || X <- lists:seq(16#DC00,16#DFFF) >>,
- BrokenSz = byte_size(BrokenPart),
- [ begin
- OKList = lists:flatten(lists:duplicate(N,lists:seq(1,255))),
- OKBin = unicode:characters_to_binary(OKList,unicode,{utf16,little}),
- OKLen = length(OKList),
- %% Copy to avoid that the binary get's writable
- PartlyBroken = binary:copy(<<OKBin/binary, BrokenPart/binary>>),
- PBSz = byte_size(PartlyBroken),
- {error,OKList,DeepBrokenPart} =
- unicode:characters_to_list(PartlyBroken,{utf16,little}),
- BrokenPart = iolist_to_binary(DeepBrokenPart),
- [ begin
- NewList = lists:nthtail(X, OKList),
- NewSz = byte_size(unicode:characters_to_binary(NewList,unicode,{utf16,little})) +
- BrokenSz,
- Chomped = binary:part(PartlyBroken,PBSz - NewSz, NewSz),
- true = (binary:referenced_byte_size(Chomped) =:= PBSz),
- {error,NewList,DeepBrokenPart2} =
- unicode:characters_to_list(Chomped,{utf16,little}),
- BrokenPart = iolist_to_binary(DeepBrokenPart2)
- end || X <- lists:seq(1,OKLen) ]
- end || N <- lists:seq(1,16,3) ],
- ok.
+ ex_binaries_errors_utf16(little).
+
ex_binaries_errors_utf16_big(Config) when is_list(Config) ->
- BrokenPart = << <<X:16/big>> || X <- lists:seq(16#DC00,16#DFFF) >>,
+ ex_binaries_errors_utf16(big).
+
+ex_binaries_errors_utf16(Endian) ->
+ BrokenSeq = lists:seq(16#DC00, 16#DFFF),
+ BrokenPart = case Endian of
+ little ->
+ << <<X:16/little>> || X <- BrokenSeq >>;
+ big ->
+ << <<X:16/big>> || X <- BrokenSeq >>
+ end,
BrokenSz = byte_size(BrokenPart),
+ Seq255 = lists:seq(1, 255),
[ begin
- OKList = lists:flatten(lists:duplicate(N,lists:seq(1,255))),
- OKBin = unicode:characters_to_binary(OKList,unicode,{utf16,big}),
- OKLen = length(OKList),
- %% Copy to avoid that the binary get's writable
- PartlyBroken = binary:copy(<<OKBin/binary, BrokenPart/binary>>),
+ OKList = lists:append(lists:duplicate(N, Seq255)),
+ OKBin = unicode:characters_to_binary(OKList, unicode, {utf16,Endian}),
+ PartlyBroken = iolist_to_binary([OKBin,BrokenPart]),
PBSz = byte_size(PartlyBroken),
{error,OKList,DeepBrokenPart} =
- unicode:characters_to_list(PartlyBroken,{utf16,big}),
+ unicode:characters_to_list(PartlyBroken, {utf16,Endian}),
BrokenPart = iolist_to_binary(DeepBrokenPart),
- [ begin
- NewList = lists:nthtail(X, OKList),
- NewSz = byte_size(unicode:characters_to_binary(NewList,unicode,{utf16,big})) +
- BrokenSz,
- Chomped = binary:part(PartlyBroken,PBSz - NewSz, NewSz),
- true = (binary:referenced_byte_size(Chomped) =:= PBSz),
- {error,NewList,DeepBrokenPart2} =
- unicode:characters_to_list(Chomped,{utf16,big}),
- BrokenPart = iolist_to_binary(DeepBrokenPart2)
- end || X <- lists:seq(1,OKLen) ]
- end || N <- lists:seq(1,16,3) ],
+ utf16_inner_loop(OKList, BrokenPart, BrokenSz,
+ PartlyBroken, PBSz, Endian)
+ end || N <- lists:seq(1, 16, 3) ],
+ ok.
+
+utf16_inner_loop([_|List], BrokenPart, BrokenSz, PartlyBroken, PBSz, Endian) ->
+ Sz = length(List)*2 + BrokenSz,
+ Chomped = binary:part(PartlyBroken, PBSz - Sz, Sz),
+ true = binary:referenced_byte_size(Chomped) =:= PBSz,
+ {error,List,DeepBrokenPart} =
+ unicode:characters_to_list(Chomped, {utf16,Endian}),
+ BrokenPart = iolist_to_binary(DeepBrokenPart),
+ utf16_inner_loop(List, BrokenPart, BrokenSz, PartlyBroken, PBSz, Endian);
+utf16_inner_loop([], _, _, _, _, _) ->
ok.
ex_binaries_errors_utf32_big(Config) when is_list(Config) ->
- BrokenPart = << <<X:32/big>> || X <- lists:seq(16#DC00,16#DFFF) >>,
- BrokenSz = byte_size(BrokenPart),
- [ begin
- OKList = lists:flatten(lists:duplicate(N,lists:seq(1,255))),
- OKBin = unicode:characters_to_binary(OKList,unicode,{utf32,big}),
- OKLen = length(OKList),
- %% Copy to avoid that the binary get's writable
- PartlyBroken = binary:copy(<<OKBin/binary, BrokenPart/binary>>),
- PBSz = byte_size(PartlyBroken),
- {error,OKList,DeepBrokenPart} =
- unicode:characters_to_list(PartlyBroken,{utf32,big}),
- BrokenPart = iolist_to_binary(DeepBrokenPart),
- [ begin
- NewList = lists:nthtail(X, OKList),
- NewSz = byte_size(unicode:characters_to_binary(NewList,unicode,{utf32,big})) +
- BrokenSz,
- Chomped = binary:part(PartlyBroken,PBSz - NewSz, NewSz),
- true = (binary:referenced_byte_size(Chomped) =:= PBSz),
- {error,NewList,DeepBrokenPart2} =
- unicode:characters_to_list(Chomped,{utf32,big}),
- BrokenPart = iolist_to_binary(DeepBrokenPart2)
- end || X <- lists:seq(1,OKLen) ]
- end || N <- lists:seq(1,16,3) ],
- ok.
+ ex_binaries_errors_utf32(big).
ex_binaries_errors_utf32_little(Config) when is_list(Config) ->
- BrokenPart = << <<X:32/little>> || X <- lists:seq(16#DC00,16#DFFF) >>,
+ ex_binaries_errors_utf32(little).
+
+ex_binaries_errors_utf32(Endian) ->
+ BrokenSeq = lists:seq(16#DC00, 16#DFFF),
+ BrokenPart = case Endian of
+ little ->
+ << <<X:32/little>> || X <- BrokenSeq >>;
+ big ->
+ << <<X:32/big>> || X <- BrokenSeq >>
+ end,
BrokenSz = byte_size(BrokenPart),
+ Seq255 = lists:seq(1, 255),
[ begin
- OKList = lists:flatten(lists:duplicate(N,lists:seq(1,255))),
- OKBin = unicode:characters_to_binary(OKList,unicode,{utf32,little}),
- OKLen = length(OKList),
- %% Copy to avoid that the binary get's writable
- PartlyBroken = binary:copy(<<OKBin/binary, BrokenPart/binary>>),
+ OKList = lists:append(lists:duplicate(N, Seq255)),
+ OKBin = unicode:characters_to_binary(OKList, unicode, {utf32,Endian}),
+ PartlyBroken = iolist_to_binary([OKBin,BrokenPart]),
PBSz = byte_size(PartlyBroken),
{error,OKList,DeepBrokenPart} =
- unicode:characters_to_list(PartlyBroken,{utf32,little}),
+ unicode:characters_to_list(PartlyBroken, {utf32,Endian}),
BrokenPart = iolist_to_binary(DeepBrokenPart),
- [ begin
- NewList = lists:nthtail(X, OKList),
- NewSz = byte_size(unicode:characters_to_binary(NewList,unicode,{utf32,little})) +
- BrokenSz,
- Chomped = binary:part(PartlyBroken,PBSz - NewSz, NewSz),
- true = (binary:referenced_byte_size(Chomped) =:= PBSz),
- {error,NewList,DeepBrokenPart2} =
- unicode:characters_to_list(Chomped,{utf32,little}),
- BrokenPart = iolist_to_binary(DeepBrokenPart2)
- end || X <- lists:seq(1,OKLen) ]
- end || N <- lists:seq(1,16,3) ],
+ utf32_inner_loop(OKList, BrokenPart, BrokenSz,
+ PartlyBroken, PBSz, Endian)
+ end || N <- lists:seq(1, 16, 3) ],
ok.
-
+utf32_inner_loop([_|List], BrokenPart, BrokenSz, PartlyBroken, PBSz, Endian) ->
+ Sz = length(List)*4 + BrokenSz,
+ Chomped = binary:part(PartlyBroken, PBSz - Sz, Sz),
+ true = binary:referenced_byte_size(Chomped) =:= PBSz,
+ {error,List,DeepBrokenPart} =
+ unicode:characters_to_list(Chomped, {utf32,Endian}),
+ BrokenPart = iolist_to_binary(DeepBrokenPart),
+ utf32_inner_loop(List, BrokenPart, BrokenSz, PartlyBroken, PBSz, Endian);
+utf32_inner_loop([], _, _, _, _, _) ->
+ ok.
exceptions(Config) when is_list(Config) ->
setlimit(10),
diff --git a/lib/stdlib/test/zip_SUITE.erl b/lib/stdlib/test/zip_SUITE.erl
index a57641ef62..08243f7c4f 100644
--- a/lib/stdlib/test/zip_SUITE.erl
+++ b/lib/stdlib/test/zip_SUITE.erl
@@ -23,7 +23,7 @@
bad_zip/1, unzip_from_binary/1, unzip_to_binary/1,
zip_to_binary/1,
unzip_options/1, zip_options/1, list_dir_options/1, aliases/1,
- openzip_api/1, zip_api/1, unzip_jar/1,
+ openzip_api/1, zip_api/1, open_leak/1, unzip_jar/1,
compress_control/1,
foldl/1]).
@@ -38,7 +38,7 @@ all() ->
[borderline, atomic, bad_zip, unzip_from_binary,
unzip_to_binary, zip_to_binary, unzip_options,
zip_options, list_dir_options, aliases, openzip_api,
- zip_api, unzip_jar, compress_control, foldl].
+ zip_api, open_leak, unzip_jar, compress_control, foldl].
groups() ->
[].
@@ -84,7 +84,7 @@ borderline_test(Size, TempDir) ->
io:format("Testing size ~p", [Size]),
%% Create a file and archive it.
- {_, _, X0} = erlang:now(),
+ {_, _, X0} = erlang:timestamp(),
file:write_file(Name, random_byte_list(X0, Size)),
{ok, Archive} = zip:zip(Archive, [Name]),
ok = file:delete(Name),
@@ -318,8 +318,46 @@ zip_api(Config) when is_list(Config) ->
%% Clean up.
delete_files([Names]),
+ ok.
+
+open_leak(doc) ->
+ ["Test that zip doesn't leak processes and ports where the "
+ "controlling process dies without closing an zip opened with "
+ "zip:zip_open/1."];
+open_leak(suite) -> [];
+open_leak(Config) when is_list(Config) ->
+ %% Create a zip archive
+ Zip = "zip.zip",
+ {ok, Zip} = zip:zip(Zip, [], []),
+
+ %% Open archive in a another process that dies immediately.
+ ZipSrv = spawn_zip(Zip, [memory]),
+
+ %% Expect the ZipSrv process to die soon after.
+ true = spawned_zip_dead(ZipSrv),
+
+ %% Clean up.
+ delete_files([Zip]),
+
ok.
+spawn_zip(Zip, Options) ->
+ Self = self(),
+ spawn(fun() -> Self ! zip:zip_open(Zip, Options) end),
+ receive
+ {ok, ZipSrv} ->
+ ZipSrv
+ end.
+
+spawned_zip_dead(ZipSrv) ->
+ Ref = monitor(process, ZipSrv),
+ receive
+ {'DOWN', Ref, _, ZipSrv, _} ->
+ true
+ after 1000 ->
+ false
+ end.
+
unzip_options(doc) ->
["Test options for unzip, only cwd and file_list currently"];
unzip_options(suite) ->
@@ -568,7 +606,7 @@ zip_to_binary(Config) when is_list(Config) ->
aliases(doc) ->
["Test using the aliases, extract/2, table/2 and create/3"];
aliases(Config) when is_list(Config) ->
- {_, _, X0} = erlang:now(),
+ {_, _, X0} = erlang:timestamp(),
Size = 100,
B = list_to_binary(random_byte_list(X0, Size)),
%% create
diff --git a/lib/stdlib/vsn.mk b/lib/stdlib/vsn.mk
index 5be130bac9..a1f2a946b1 100644
--- a/lib/stdlib/vsn.mk
+++ b/lib/stdlib/vsn.mk
@@ -1 +1 @@
-STDLIB_VSN = 2.3
+STDLIB_VSN = 2.5
diff --git a/lib/syntax_tools/doc/overview.edoc b/lib/syntax_tools/doc/overview.edoc
index df02ad0b3a..3111633a99 100644
--- a/lib/syntax_tools/doc/overview.edoc
+++ b/lib/syntax_tools/doc/overview.edoc
@@ -2,79 +2,34 @@
Syntax Tools overview page
-
@author Richard Carlsson <[email protected]>
-@copyright 1997-2004 Richard Carlsson
+@copyright 1997-2014 Richard Carlsson
@version {@version}
-@title Erlang Syntax Tools
+@title Erlang Syntax and Metaprogramming tools
-@doc This package contains modules for handling abstract Erlang syntax
-trees, in a way that is compatible with the "parse trees" of the
-standard library module `erl_parse', together with utilities for reading
-source files in unusual ways and pretty-printing syntax trees. Also
-included is an amazing module merger and renamer called Igor, as well as
-an automatic code-cleaner.
+@doc This package contains modules for handling abstract syntax trees (ASTs)
+in Erlang, in a way that is compatible with the "abstract format" parse
+trees of the stdlib module `erl_parse', together with utilities for reading
+source files, {@link erl_prettypr. pretty-printing syntax trees}, {@link
+igor. merging and renaming modules}, {@link erl_tidy. cleaning up obsolete
+constructs}, and doing {@link merl. metaprogramming} in Erlang.
-<p>The abstract layer (defined in {@link erl_syntax}) is nicely
+The abstract layer (defined in {@link erl_syntax}) is nicely
structured and the node types are context-independent. The layer makes
it possible to transparently attach source-code comments and user
annotations to nodes of the tree. Using the abstract layer makes
applications less sensitive to changes in the {@link //stdlib/erl_parse}
-data structures, only requiring the {@link erl_syntax} module to be
-up-to-date.</p>
+data structures, only requiring the `erl_syntax' module to be up-to-date.
-<p>The pretty printer {@link erl_prettypr} is implemented on top of the
+The pretty printer {@link erl_prettypr} is implemented on top of the
library module {@link prettypr}: this is a powerful and flexible generic
-pretty printing library, which is also distributed separately.</p>
-
-<p>For a short demonstration of parsing and pretty-printing, simply
-compile the included module <a
-href="../examples/demo.erl"><code>demo.erl</code></a>, and execute
-<code>demo:run()</code> from the Erlang shell. It will compile the
-remaining modules and give you further instructions.</p>
-
-<p>Also try the {@link erl_tidy} module, as follows:
-<pre>
- erl_tidy:dir("any-erlang-source-dir", [test, old_guard_tests]).</pre>
-("<code>test</code>" assures that no files are modified).</p>
-
-<p>News in 1.4:
-<ul>
- <li>Added support for {@link erl_syntax:cond_expr/1. cond-expressions},
- {@link erl_syntax:try_expr/4. try-expressions} and
- {@link erl_syntax:class_qualifier/2. class-qualifier patterns}.</li>
- <li>Added support for parameterized modules.</li>
- <li>{@link igor. Igor} is officially included.</li>
- <li>Quick-parse functionality added to {@link epp_dodger}.</li>
-</ul>
-</p>
-
-<p>News in 1.3:
-<ul>
- <li>Added support for qualified names (as used by "packages").</li>
- <li>Various internal changes.</li>
-</ul>
-</p>
+pretty printing library, which is also distributed separately.
-<p>News in 1.2:
-<ul>
- <li>HTML Documentation (generated with EDoc).</li>
- <li>A few bug fixes and some minor interface changes (sorry for any
- inconvenience).</li>
-</ul>
-</p>
+For a short demonstration of parsing and pretty-printing, simply
+compile the included module <a href="../examples/demo.erl">`demo.erl'</a>,
+and execute `demo:run()' from the Erlang shell. It will compile the
+remaining modules and give you further instructions.
-<p>News in 1.1:
-<ul>
- <li>Module {@link erl_tidy}: check or tidy either a single module, or a
- whole directory tree recursively. Rewrites and reformats the code
- without losing comments or expanding macros. Safe mode allows
- generating reports without modifying files.</li>
- <li>Module {@link erl_syntax_lib}: contains support functions for easier
- analysis of the source code structure.</li>
- <li>Module {@link epp_dodger}: Bypasses the Erlang preprocessor - avoids
- macro expansion, file inclusion, conditional compilation, etc.
- Allows you to find/modify particular definitions/applications of
- macros, and other things previously not possible.</li>
-</ul>
-</p>
+Also try the {@link erl_tidy} module, as follows:
+```erl_tidy:dir("any-erlang-source-dir", [test, old_guard_tests]).'''
+(the `test' option assures that no files are modified).
diff --git a/lib/syntax_tools/doc/src/Makefile b/lib/syntax_tools/doc/src/Makefile
index 2502bf877a..b7c599a9b9 100644
--- a/lib/syntax_tools/doc/src/Makefile
+++ b/lib/syntax_tools/doc/src/Makefile
@@ -50,6 +50,8 @@ XML_REF3_FILES = \
erl_syntax_lib.xml \
erl_tidy.xml \
igor.xml \
+ merl.xml \
+ merl_transform.xml \
prettypr.xml
XML_PART_FILES = part.xml part_notes.xml
diff --git a/lib/syntax_tools/doc/src/notes.xml b/lib/syntax_tools/doc/src/notes.xml
index b0f11bb243..408f6d5bac 100644
--- a/lib/syntax_tools/doc/src/notes.xml
+++ b/lib/syntax_tools/doc/src/notes.xml
@@ -31,6 +31,21 @@
<p>This document describes the changes made to the Syntax_Tools
application.</p>
+<section><title>Syntax_Tools 1.6.18</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix bad format of error in epp_dodger:parse_file/3</p>
+ <p>
+ Own Id: OTP-12406</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Syntax_Tools 1.6.17</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/syntax_tools/doc/src/ref_man.xml b/lib/syntax_tools/doc/src/ref_man.xml
index 598f656011..2b114c8528 100644
--- a/lib/syntax_tools/doc/src/ref_man.xml
+++ b/lib/syntax_tools/doc/src/ref_man.xml
@@ -29,12 +29,11 @@
</header>
<description>
<p><em>Syntax_Tools</em> contains modules for handling abstract
- Erlang syntax trees, in a way that is compatible with the "parse
- trees" of the STDLIB module <c>erl_parse</c>, together with
- utilities for reading source files in unusual ways and
- pretty-printing syntax trees. Also included is an amazing module
- merger and renamer called Igor, as well as an automatic
- code-cleaner.</p>
+ Erlang syntax trees, in a way that is compatible with the "external
+ format" parse trees of the STDLIB module <c>erl_parse</c>, together
+ with utilities for reading source files, pretty-printing syntax trees,
+ merging and renaming modules, cleaning up obsolete constructs, and
+ doing metaprogramming in Erlang.</p>
</description>
<xi:include href="epp_dodger.xml"/>
<xi:include href="erl_comment_scan.xml"/>
@@ -44,6 +43,8 @@
<xi:include href="erl_syntax_lib.xml"/>
<xi:include href="erl_tidy.xml"/>
<xi:include href="igor.xml"/>
+ <xi:include href="merl.xml"/>
+ <xi:include href="merl_transform.xml"/>
<xi:include href="prettypr.xml"/>
</application>
diff --git a/lib/syntax_tools/examples/merl/Makefile b/lib/syntax_tools/examples/merl/Makefile
new file mode 100644
index 0000000000..13a9703733
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/Makefile
@@ -0,0 +1,22 @@
+EBIN=../../ebin
+INCLUDES=../../include
+SOURCES=merl_build.erl lisp.erl lispc.erl basic.erl basicc.erl
+HEADERS=$(INCLUDES)/merl.hrl
+OBJECTS=$(SOURCES:%.erl=%.beam)
+ERLC_FLAGS=+debug_info -I$(INCLUDES) -pa $(EBIN)
+
+all: $(OBJECTS) test
+
+%.beam: %.erl $(HEADERS) Makefile
+ erlc $(ERLC_FLAGS) -o ./ $<
+
+# additional dependencies due to the parse transform
+lispc.beam basicc.beam: $(EBIN)/merl_transform.beam $(EBIN)/merl.beam
+
+clean:
+ -rm -f $(OBJECTS)
+
+test:
+ erl -noshell -pa $(EBIN) \
+ -eval 'eunit:test([lisp, lispc, basic, basicc],[])' \
+ -s init stop
diff --git a/lib/syntax_tools/examples/merl/basic.erl b/lib/syntax_tools/examples/merl/basic.erl
new file mode 100644
index 0000000000..9030059d11
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/basic.erl
@@ -0,0 +1,77 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Trivial Basic interpreter in Erlang
+
+-module(basic).
+
+-export([run/2]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-define(INTERPRETED, true).
+-include("basic_test.erl").
+
+run(N, Prog) ->
+ ets:new(var, [private, named_table]),
+ ets:new(line, [private, named_table, ordered_set]),
+ lists:foreach(fun (T) -> ets:insert(line, T) end, Prog),
+ goto(N).
+
+stop(N) ->
+ ets:delete(var),
+ ets:delete(line),
+ N.
+
+goto('$end_of_table') -> stop(0);
+goto(L) ->
+ L1 = ets:next(line, L),
+ %% user-supplied line numbers might not exist
+ case ets:lookup(line, L) of
+ [{_, X}] ->
+ stmt(X, L1);
+ _ ->
+ goto(L1)
+ end.
+
+stmt({print, S, As}, L) -> io:format(S, [expr(A) || A <- As]), goto(L);
+stmt({set, V, X}, L) -> ets:insert(var, {V, expr(X)}), goto(L);
+stmt({goto, X}, _L) -> goto(expr(X));
+stmt({stop, X}, _L) -> stop(expr(X));
+stmt({iff, X, A, B}, _L) ->
+ case expr(X) of
+ 0 -> goto(B);
+ _ -> goto(A)
+ end.
+
+expr(X) when is_number(X) ; is_list(X) ->
+ X;
+expr(X) when is_atom(X) ->
+ case ets:lookup(var, X) of
+ [] -> 0;
+ [{_,V}] -> V
+ end;
+expr({plus, X, Y}) ->
+ expr(X) + expr(Y);
+expr({equal, X, Y}) ->
+ bool(expr(X) == expr(Y));
+expr({gt, X, Y}) ->
+ bool(expr(X) > expr(Y));
+expr({knot, X}) ->
+ case expr(X) of
+ 0 -> 1;
+ _ -> 0
+ end.
+
+bool(true) -> 1;
+bool(false) -> 0.
diff --git a/lib/syntax_tools/examples/merl/basic_test.erl b/lib/syntax_tools/examples/merl/basic_test.erl
new file mode 100644
index 0000000000..ff35de6325
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/basic_test.erl
@@ -0,0 +1,77 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Tests. For including in another module.
+
+%-module(basic_test).
+%-import(basic, run/1)
+
+-export([basic_fib/1]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+basics_test_() ->
+ [?_assertEqual(42, run(1,[{1,{stop, 42}}])),
+ ?_assertEqual("hello", run(1,[{1,{stop,"hello"}}])),
+ ?_assertEqual(0, run(1,[{1,{print, "hello ~w", [42]}}])),
+ ?_assertEqual(5, run(1,[{1,{stop, {plus, 2, 3}}}])),
+ ?_assertEqual(5, run(1,[{1,{stop,{plus, 8, -3}}}])),
+ ?_assertEqual(0, run(1,[{1,{stop,{equal, 0, 1}}}])),
+ ?_assertEqual(1, run(1,[{1,{stop,{equal, 1, 1}}}])),
+ ?_assertEqual(0, run(1,[{1,{stop,{gt, 0, 1}}}])),
+ ?_assertEqual(0, run(1,[{1,{stop,{gt, 1, 1}}}])),
+ ?_assertEqual(1, run(1,[{1,{stop,{gt, 2, 1}}}])),
+ ?_assertEqual(0, run(1,[{1,{stop,{knot, 42}}}])),
+ ?_assertEqual(1, run(1,[{1,{stop,{knot, 0}}}])),
+ ?_assertEqual(42, run(1,[{1,{set, x, 42}}, {2,{stop,x}}])),
+ ?_assertEqual(17, run(1,[{1,{iff, 1, 2, 3}},
+ {2,{stop, 17}},
+ {3,{stop, 42}}])),
+ ?_assertEqual(42, run(1,[{1,{iff, 0, 2, 3}},
+ {2,{stop, 17}},
+ {3,{stop, 42}}])),
+ ?_assertEqual(17, run(1,[{1,{iff, 1, 2, 3}},
+ {2,{stop, 17}},
+ {3,{stop, -1}}])),
+ ?_assertEqual(42, run(1,[{1,{iff, 0, 2, 3}},
+ {2,{stop, -1}},
+ {3,{stop, 42}}]))
+
+
+ ].
+
+
+fib_test_() ->
+ [?_assertEqual(fib(N), basic_fib(N)) || N <- lists:seq(1,15)
+ ].
+
+
+fib(N) when N > 1 ->
+ fib(N-1) + fib(N-2);
+fib(_) ->
+ 1.
+
+basic_fib(N) ->
+ run(1,
+ [{1,{set,x,0}},
+ {2,{set,a,1}},
+ {3,{set,b,0}},
+ {10,{iff, {equal, x, N}, 20, 30}},
+ {20,{stop,a}},
+ {30,{print,"~w, ~w, ~w\n",[x,a,b]}},
+ {31,{set,t,a}},
+ {32,{set,a,{plus,a,b}}},
+ {33,{set,b,t}},
+ {34,{set,x,{plus,x,1}}},
+ {40,{goto,10}}
+ ]).
diff --git a/lib/syntax_tools/examples/merl/basicc.erl b/lib/syntax_tools/examples/merl/basicc.erl
new file mode 100644
index 0000000000..531ac51538
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/basicc.erl
@@ -0,0 +1,149 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Basic compiler in Erlang.
+
+-module(basicc).
+
+-export([run/2, make_lines/1, bool/1]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+-define(INTERPRETED, true).
+-include("basic_test.erl").
+
+-include("merl.hrl").
+
+run(N, Prog) ->
+ compile(Prog, tmp),
+ tmp:run(N, Prog).
+
+make_lines(Prog) ->
+ ets:new(line, [private, named_table, ordered_set]),
+ lists:foreach(fun ({L,_}) -> ets:insert(line, {L,label(L)}) end, Prog).
+
+compile(Prog, ModName) ->
+ make_lines(Prog),
+ Fs0 = lists:map(fun ({L, X}) ->
+ {true, label(L),
+ case stmt(X) of
+ {Stmt, false} ->
+ [?Q("() -> _@Stmt")];
+ {Stmt, true} ->
+ Next = case ets:next(line, L) of
+ '$end_of_table' ->
+ ?Q("stop(0)");
+ L1 ->
+ Label = label(L1),
+ ?Q("_@Label@()")
+ end,
+ [?Q("() -> _@Stmt, _@Next")]
+ end}
+ end, Prog),
+ ets:delete(line),
+ Run = ?Q(["(N, Prog) ->",
+ " ets:new(var, [private, named_table]),",
+ " basicc:make_lines(Prog),",
+ " goto(N)"
+ ]),
+ Stop = ?Q(["(R) ->",
+ " ets:delete(var),",
+ " ets:delete(line),",
+ " R"
+ ]),
+ Goto = ?Q(["(L) ->",
+ " case ets:lookup(line, L) of",
+ " [{_, X}] -> apply(tmp, X, []);",
+ " _ ->",
+ " case ets:next(line, L) of",
+ " '$end_of_table' -> stop(0);",
+ " L1 -> goto(L1)",
+ " end",
+ " end"]),
+ Fs = [{true, run, [Run]},
+ {false, stop, [Stop]},
+ {true, goto, [Goto]}
+ | Fs0],
+ Forms = merl_build:module_forms(
+ lists:foldl(fun ({X, Name, Cs}, S) ->
+ merl_build:add_function(X, Name, Cs, S)
+ end,
+ merl_build:init_module(ModName),
+ Fs)),
+ %% %% Write source to file for debugging
+ %% file:write_file(lists:concat([ModName, "_gen.erl"]),
+ %% erl_prettypr:format(erl_syntax:form_list(Forms),
+ %% [{paper,160},{ribbon,80}])),
+ merl:compile_and_load(Forms, [verbose]).
+
+label(L) ->
+ list_to_atom("label_" ++ integer_to_list(L)).
+
+stmt({print, S, As}) ->
+ Exprs = [expr(A) || A <- As],
+ {[?Q(["io:format(_@S@, [_@Exprs])"])], true};
+stmt({set, V, X}) ->
+ Expr = expr(X),
+ {[?Q(["ets:insert(var, {_@V@, _@Expr})"])], true};
+stmt({goto, X}) ->
+ {[jump(X)], false};
+stmt({stop, X}) ->
+ Expr = expr(X),
+ {[?Q(["stop(_@Expr)"])], false};
+stmt({iff, X, A, B}) ->
+ Cond = expr(X),
+ True = jump(A),
+ False = jump(B),
+ {?Q(["case _@Cond of",
+ " 0 -> _@False;",
+ " _ -> _@True",
+ "end"]),
+ false}.
+
+jump(X) ->
+ case ets:lookup(line, X) of
+ [{_, F}] ->
+ ?Q(["_@F@()"]);
+ true ->
+ Expr = expr(X),
+ [?Q(["goto(_@Expr)"])]
+ end.
+
+expr(X) when is_number(X) ; is_list(X) ->
+ ?Q("_@X@");
+expr(X) when is_atom(X) ->
+ ?Q(["case ets:lookup(var, _@X@) of",
+ " [] -> 0;",
+ " [{_,V}] -> V",
+ "end"]);
+expr({plus, X, Y}) ->
+ ExprX = expr(X),
+ ExprY = expr(Y),
+ ?Q("_@ExprX + _@ExprY");
+expr({equal, X, Y}) ->
+ ExprX = expr(X),
+ ExprY = expr(Y),
+ ?Q("basicc:bool(_@ExprX == _@ExprY)");
+expr({gt, X, Y}) ->
+ ExprX = expr(X),
+ ExprY = expr(Y),
+ ?Q("basicc:bool(_@ExprX > _@ExprY)");
+expr({knot, X}) ->
+ Expr = expr(X),
+ ?Q(["case _@Expr of",
+ " 0 -> 1;",
+ " _ -> 0",
+ "end"]).
+
+bool(true) -> 1;
+bool(false) -> 0.
diff --git a/lib/syntax_tools/examples/merl/lisp.erl b/lib/syntax_tools/examples/merl/lisp.erl
new file mode 100644
index 0000000000..371dc6b261
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/lisp.erl
@@ -0,0 +1,160 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Trivial Lisp interpreter in Erlang.
+
+-module(lisp).
+
+-export([eval/1]).
+
+-export([init/0, equal/2, gt/2, knot/1]).
+
+-record(st, {env}).
+
+-define(INTERPRETED, true).
+-include("lisp_test.erl").
+
+eval(P) ->
+ {X, _} = eval(P, init()),
+ X.
+
+init() ->
+ Env = [{print, {builtin, fun do_print/2}}
+ ,{list, {builtin, fun do_list/2}}
+ ,{apply, {builtin, fun do_apply/2}}
+ ,{plus, {builtin, fun do_plus/2}}
+ ,{equal, {builtin, fun do_equal/2}}
+ ,{gt, {builtin, fun do_gt/2}}
+ ,{knot, {builtin, fun do_knot/2}}
+ ,{y, y()}
+ ],
+ #st{env=dict:from_list(Env)}.
+
+eval([lambda, Ps, B], #st{env=E}=St) when is_list(Ps) ->
+ case lists:all(fun is_atom/1, Ps) andalso
+ (length(Ps) =:= length(lists:usort(Ps))) of
+ true -> {{lambda, Ps, B, E}, St};
+ false -> throw(bad_lambda)
+ end;
+eval([lambda | _], _) ->
+ throw(bad_lambda);
+eval([def, A, V, B], #st{env=E0}=St) when is_atom(A) ->
+ {V1, St1} = eval(V, St),
+ E1 = bind(A, V1, E0),
+ {X, St2} = eval(B, St1#st{env=E1}),
+ {X, St2#st{env=E0}};
+eval([def | _], _) ->
+ throw(bad_def);
+eval([quote, A], St) ->
+ {A, St};
+eval([quote | _], _) ->
+ throw(bad_quote);
+eval([iff, X, A, B], St) ->
+ case eval(X, St) of
+ {[], St1} -> eval(B, St1);
+ {_, St1} -> eval(A, St1)
+ end;
+eval([do], _St0) ->
+ throw(bad_do);
+eval([do | As], St0) ->
+ lists:foldl(fun (X, {_,St}) -> eval(X, St) end, {[],St0}, As);
+eval([_|_]=L, St) ->
+ {[F | As], St1} = lists:mapfoldl(fun eval/2, St, L),
+ call(F, As, St1);
+eval(A, St) when is_atom(A) ->
+ {deref(A, St), St};
+eval(C, St) ->
+ {C, St}.
+
+%% UTILITY FUNCTIONS
+
+deref(A, #st{env=E}) ->
+ case dict:find(A, E) of
+ {ok, V} -> V;
+ error -> throw({undefined, A})
+ end.
+
+bind(A, V, E) ->
+ dict:store(A, V, E).
+
+bind_args([P | Ps], [A | As], E) ->
+ bind_args(Ps, As, dict:store(P, A, E));
+bind_args([], [], E) ->
+ E;
+bind_args(_, _, _) ->
+ throw(bad_arity).
+
+call({lambda, Ps, B, E}, As, #st{env=E0}=St) ->
+ {X, St1} = eval(B, St#st{env=bind_args(Ps, As, E)}),
+ {X, St1#st{env=E0}};
+call({builtin, F}, As, St) ->
+ F(As, St);
+call(X, _, _) ->
+ throw({bad_fun, X}).
+
+bool(true) -> 1;
+bool(false) -> [].
+
+%% BUILTINS
+
+y() ->
+ {Y, _} = eval([lambda, [f],
+ [[lambda, [x], [f, [lambda, [y], [[x, x], y]]]],
+ [lambda, [x], [f, [lambda, [y], [[x, x], y]]]]]],
+ #st{env=dict:new()}),
+ Y.
+
+do_print([S | Xs], St) ->
+ io:format(S, Xs),
+ {[], St};
+do_print(_, _) ->
+ throw(bad_print).
+
+do_list(As, St) ->
+ {As, St}.
+
+do_apply([F, As], St) ->
+ call(F, As, St);
+do_apply(_, _) ->
+ throw(bad_apply).
+
+do_plus([X, Y], St) when is_number(X), is_number(Y) ->
+ {X + Y, St};
+do_plus(As, _) ->
+ throw({bad_plus, As}).
+
+do_equal([X, Y], St) ->
+ {equal(X, Y), St};
+do_equal(As, _) ->
+ throw({bad_equal, As}).
+
+equal(X, Y) ->
+ bool(X =:= Y).
+
+do_gt([X, Y], St) ->
+ {gt(X, Y), St};
+do_gt(As, _) ->
+ throw({bad_gt, As}).
+
+gt(X, Y) ->
+ bool(X > Y).
+
+do_knot([X], St) ->
+ {knot(X), St};
+do_knot(As, _) ->
+ throw({bad_gt, As}).
+
+knot([]) ->
+ 1;
+knot(_) ->
+ [].
diff --git a/lib/syntax_tools/examples/merl/lisp_test.erl b/lib/syntax_tools/examples/merl/lisp_test.erl
new file mode 100644
index 0000000000..cab8134b8f
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/lisp_test.erl
@@ -0,0 +1,98 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Tests. For including in another module.
+
+%-module(lisp_test).
+%-import(lisp, eval/1)
+
+-export([fib/1, lisp_fib/1]).
+
+-include_lib("eunit/include/eunit.hrl").
+
+basics_test_() ->
+ [?_assertEqual(42, eval(42)),
+ ?_assertEqual("hello", eval([quote, "hello"])),
+ ?_assertEqual(print, eval([quote, print])),
+ ?_assertMatch([17,[1,2],42], eval([list,17,[list,1,2],42])),
+ ?_assertEqual([], eval([print, [quote, "hello ~w"], [list, 42]])),
+ ?_assertEqual(5, eval([plus, 2, 3])),
+ ?_assertEqual(5, eval([plus, 8, -3])),
+ ?_assertEqual([], eval([equal, 0, 1])),
+ ?_assertEqual(1, eval([equal, 1, 1])),
+ ?_assertEqual([], eval([gt, 0, 1])),
+ ?_assertEqual([], eval([gt, 1, 1])),
+ ?_assertEqual(1, eval([gt, 2, 1])),
+ ?_assertEqual([], eval([knot, 42])),
+ ?_assertEqual(1, eval([knot, []])),
+ ?_assertEqual(42, eval([do, 17, 42])),
+ ?_assertEqual([], eval([apply, print, [quote, ["~p", [42]]]])),
+ ?_assertEqual(42, eval([iff, [], 17, 42])),
+ ?_assertEqual(17, eval([iff, 1, 17, 42])),
+ ?_assertEqual(42, eval([iff, [], [apply], 42])),
+ ?_assertEqual(17, eval([iff, 1, 17, [apply]])),
+ ?_assertEqual(17, eval([def, foo, 17, foo])),
+ ?_assertEqual(17, eval([def, bar, 42, [def, foo, 17, foo]])),
+ ?_assertEqual(42, eval([def, bar, 42, [def, foo, 17, bar]])),
+ ?_assertEqual(17, eval([def, foo, 42, [def, foo, 17, foo]]))
+ ].
+
+-ifdef(INTERPRETED).
+interpreter_basics_test_() ->
+ [?_assertThrow({undefined, foo}, eval(foo)),
+ ?_assertMatch({builtin,_}, eval(print)),
+ ?_assertThrow(bad_do, eval([do])),
+ ?_assertThrow(bad_apply, eval([apply])),
+ ?_assertThrow({undefined, foo}, eval([def, bar, 17, foo]))
+ ].
+
+interpreter_lambda_test_() ->
+ [?_assertMatch({lambda,_,_,_}, eval([lambda, [], 42])),
+ ?_assertMatch({lambda,_,_,_}, eval([lambda, [x], x])),
+ ?_assertMatch({lambda,_,_,_}, eval([lambda, [x,y], 42]))
+ ].
+-endif.
+
+lambda_test_() ->
+ [?_assertThrow(bad_lambda, eval([lambda])),
+ ?_assertThrow(bad_lambda, eval([lambda, []])),
+ ?_assertThrow(bad_lambda, eval([lambda, [], 17, 42])),
+ ?_assertThrow(bad_lambda, eval([lambda, 17, 42])),
+ ?_assertThrow(bad_lambda, eval([lambda, [17], 42])),
+ ?_assertThrow(bad_lambda, eval([lambda, [foo, foo], 42])),
+ ?_assertEqual(42, eval([[lambda, [x], x], 42])),
+ ?_assertEqual([42, 17], eval([[lambda, [x], [list, x, 17]], 42])),
+ ?_assertEqual([42, 17], eval([def, f, [def, y, 42,
+ [lambda, [x], [list, y, x]]],
+ [f, 17]]))
+ ].
+
+fib_test_() ->
+ [?_assertEqual(fib(N), lisp_fib(N)) || N <- lists:seq(1,15)
+ ].
+
+
+fib(N) when N > 1 ->
+ fib(N-1) + fib(N-2);
+fib(_) ->
+ 1.
+
+lisp_fib(N) ->
+ eval([def, fib,
+ [y, [lambda, [f], [lambda, [x],
+ [iff, [gt, x, 1],
+ [plus, [f, [plus,x,-1]], [f, [plus,x,-2]]],
+ 1]
+ ]]],
+ [fib, N]
+ ]).
diff --git a/lib/syntax_tools/examples/merl/lispc.erl b/lib/syntax_tools/examples/merl/lispc.erl
new file mode 100644
index 0000000000..97072cdab7
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/lispc.erl
@@ -0,0 +1,102 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Lisp compiler in Erlang.
+
+-module(lispc).
+
+-export([eval/1]).
+
+-record(st, {}).
+
+-include("lisp_test.erl").
+
+-include("merl.hrl").
+
+eval(Lisp) ->
+ compile(Lisp, tmp),
+ tmp:eval().
+
+compile(Lisp, ModName) ->
+ {Code, _} = gen(Lisp, #st{}),
+ Main = ?Q(["() ->",
+ " __print = fun (S, Xs) -> io:format(S,Xs), [] end,",
+ " __apply = fun erlang:apply/2,",
+ " __plus = fun erlang:'+'/2,",
+ " __equal = fun lisp:equal/2,",
+ " __gt = fun lisp:gt/2,",
+ " __knot = fun lisp:knot/1,",
+ " __y = fun (F) ->",
+ " (fun (X) -> F(fun (Y) -> (X(X))(Y) end) end)",
+ " (fun (X) -> F(fun (Y) -> (X(X))(Y) end) end)",
+ " end,",
+ " _@Code"]),
+ Forms = merl_build:module_forms(
+ merl_build:add_function(true, eval, [Main],
+ merl_build:init_module(ModName))),
+ %% %% Write source to file for debugging
+ %% file:write_file(lists:concat([ModName, "_gen.erl"]),
+ %% erl_prettypr:format(erl_syntax:form_list(Forms),
+ %% [{paper,160},{ribbon,80}])),
+ merl:compile_and_load(Forms, [verbose]).
+
+var(Atom) ->
+ merl:var(list_to_atom("__" ++ atom_to_list(Atom))).
+
+gen([lambda, Ps, B], St) when is_list(Ps) ->
+ case lists:all(fun is_atom/1, Ps) andalso
+ (length(Ps) =:= length(lists:usort(Ps))) of
+ true ->
+ Vars = [var(P) || P <- Ps],
+ {Body, St1} = gen(B, St),
+ {?Q("fun (_@Vars) -> _@Body end"), St1};
+ false ->
+ throw(bad_lambda)
+ end;
+gen([lambda | _], _) ->
+ throw(bad_lambda);
+gen([def, A, V, B], St) when is_atom(A) ->
+ Var = var(A),
+ {Val, St1} = gen(V, St),
+ {Body, St2} = gen(B, St1),
+ {?Q("(fun (_@Var) -> _@Body end)(_@Val)"), St2};
+gen([def | _], _) ->
+ throw(bad_def);
+gen([quote, A], St) ->
+ {merl:term(A), St};
+gen([quote | _], _) ->
+ throw(bad_quote);
+gen([iff, X, A, B], St) ->
+ {Cond, St1} = gen(X, St),
+ {True, St2} = gen(A, St1),
+ {False, St3} = gen(B, St2),
+ {?Q(["case _@Cond of",
+ " [] -> _@False;",
+ " _ -> _@True",
+ "end"]),
+ St3};
+gen([do], _) ->
+ throw(bad_do);
+gen([do | As], St0) ->
+ {Body, St1} = lists:mapfoldl(fun gen/2, St0, As),
+ {?Q("begin _@Body end"), St1};
+gen([list | As], St0) ->
+ {Elem, St1} = lists:mapfoldl(fun gen/2, St0, As),
+ {?Q("[ _@Elem ]"), St1};
+gen([_|_]=L, St) ->
+ {[F | As], St1} = lists:mapfoldl(fun gen/2, St, L),
+ {?Q("((_@F)(_@As))"), St1};
+gen(A, St) when is_atom(A) ->
+ {var(A), St};
+gen(C, St) ->
+ {merl:term(C), St}.
diff --git a/lib/syntax_tools/examples/merl/merl_build.erl b/lib/syntax_tools/examples/merl/merl_build.erl
new file mode 100644
index 0000000000..c539f8e2af
--- /dev/null
+++ b/lib/syntax_tools/examples/merl/merl_build.erl
@@ -0,0 +1,104 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012 Richard Carlsson
+%% @doc Making it simple to build a module with merl
+
+-module(merl_build).
+
+-export([init_module/1, module_forms/1, add_function/4, add_record/3,
+ add_import/3, add_attribute/3, set_file/2]).
+
+-import(merl, [term/1]).
+
+-include("merl.hrl").
+
+-type filename() :: string().
+
+-record(module, { name :: atom()
+ , file :: filename()
+ , exports=[] :: [{atom(), integer()}]
+ , imports=[] :: [{atom(), [{atom(), integer()}]}]
+ , attributes=[] :: [{filename(), atom(), [term()]}]
+ , records=[] :: [{filename(), atom(),
+ [{atom(), merl:tree()}]}]
+ , functions=[] :: [{filename(), atom(), [merl:tree()]}]
+ }).
+
+%% TODO: init module from a list of forms (from various sources)
+
+%% @doc Create a new module representation, using the given module name.
+init_module(Name) when is_atom(Name) ->
+ %% use the module name as the default file name - better than nothing
+ #module{name=Name, file=atom_to_list(Name)}.
+
+%% @doc Get the list of syntax tree forms for a module representation. This can
+%% be passed to compile/2.
+module_forms(#module{name=Name,
+ exports=Xs,
+ imports=Is,
+ records=Rs,
+ attributes=As,
+ functions=Fs})
+ when is_atom(Name), Name =/= undefined ->
+ Module = ?Q("-module('@Name@')."),
+ Exported = [erl_syntax:arity_qualifier(term(N), term(A))
+ || {N,A} <- ordsets:from_list(Xs)],
+ Export = ?Q("-export(['@_Exported'/1])."),
+ Imports = [?Q("-import('@M@', ['@_NAs'/1]).")
+ || {M, Ns} <- Is,
+ NAs <- [[erl_syntax:arity_qualifier(term(N), term(A))
+ || {N,A} <- ordsets:from_list(Ns)]]
+ ],
+ Attrs = [?Q("-file(\"'@File@\",1). -'@N@'('@T@').")
+ || {File, N, T} <- lists:reverse(As)],
+ Records = [?Q("-file(\"'@File@\",1). -record('@N@',{'@_RFs'=[]}).")
+ || {File, N, Es} <- lists:reverse(Rs),
+ RFs <- [[erl_syntax:record_field(term(F), V)
+ || {F,V} <- Es]]
+ ],
+ Functions = [?Q("-file(\"'@File@\",1). '@_F'() -> [].")
+ || {File, N, Cs} <- lists:reverse(Fs),
+ F <- [erl_syntax:function(term(N), Cs)]],
+ lists:flatten([Module, Export, Imports, Attrs, Records, Functions]).
+
+%% @doc Set the source file name for all subsequently added functions,
+%% records, and attributes.
+set_file(Filename, #module{}=M) ->
+ M#module{file=filename:flatten(Filename)}.
+
+%% @doc Add a function to a module representation.
+add_function(Exported, Name, Clauses,
+ #module{file=File, exports=Xs, functions=Fs}=M)
+ when is_boolean(Exported), is_atom(Name), Clauses =/= [] ->
+ Arity = length(erl_syntax:clause_patterns(hd(Clauses))),
+ Xs1 = case Exported of
+ true -> [{Name,Arity} | Xs];
+ false -> Xs
+ end,
+ M#module{exports=Xs1, functions=[{File, Name, Clauses} | Fs]}.
+
+%% @doc Add a record declaration to a module representation.
+add_record(Name, Fields, #module{file=File, records=Rs}=M)
+ when is_atom(Name) ->
+ M#module{records=[{File, Name, Fields} | Rs]}.
+
+%% @doc Add a "wild" attribute, such as `-compile(Opts)' to a module
+%% representation. Note that such attributes can only have a single argument.
+add_attribute(Name, Term, #module{file=File, attributes=As}=M)
+ when is_atom(Name) ->
+ M#module{attributes=[{File, Name, Term} | As]}.
+
+%% @doc Add an import declaration to a module representation.
+add_import(From, Names, #module{imports=Is}=M)
+ when is_atom(From), is_list(Names) ->
+ M#module{imports=[{From, Names} | Is]}.
diff --git a/lib/syntax_tools/include/merl.hrl b/lib/syntax_tools/include/merl.hrl
new file mode 100644
index 0000000000..e44a78dece
--- /dev/null
+++ b/lib/syntax_tools/include/merl.hrl
@@ -0,0 +1,29 @@
+%% ---------------------------------------------------------------------
+%% Header file for merl
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+
+-ifndef(MERL_HRL).
+
+
+%% Quoting a piece of code
+-define(Q(Text), merl:quote(?LINE, Text)).
+
+%% Quasi-quoting code, substituting metavariables listed in Env
+-define(Q(Text, Env), merl:qquote(?LINE, Text, Env)).
+
+
+-ifndef(MERL_NO_TRANSFORM).
+-compile({parse_transform, merl_transform}).
+-endif.
+
+
+-endif.
diff --git a/lib/syntax_tools/src/Makefile b/lib/syntax_tools/src/Makefile
index c9fbad8f9a..2e91adf8af 100644
--- a/lib/syntax_tools/src/Makefile
+++ b/lib/syntax_tools/src/Makefile
@@ -22,6 +22,9 @@ RELSYSDIR = $(RELEASE_PATH)/lib/syntax_tools-$(VSN)
#
EBIN = ../ebin
+INCLUDE=../include
+
+ERL_COMPILE_FLAGS += -pa $(EBIN) -pa ./ -I$(INCLUDE)
ifeq ($(NATIVE_LIBS_ENABLED),yes)
ERL_COMPILE_FLAGS += +native
@@ -30,10 +33,15 @@ ERL_COMPILE_FLAGS += +nowarn_shadow_vars +warn_unused_import -Werror # +warn_mis
SOURCES=erl_syntax.erl erl_prettypr.erl erl_syntax_lib.erl \
erl_comment_scan.erl erl_recomment.erl erl_tidy.erl \
- epp_dodger.erl prettypr.erl igor.erl
+ epp_dodger.erl prettypr.erl igor.erl \
+ merl.erl merl_transform.erl
+
+INCLUDE_FILES = merl.hrl
OBJECTS=$(SOURCES:%.erl=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
+INCLUDE_DELIVERABLES = $(INCLUDE_FILES:%=$(INCLUDE)/%)
+
APP_FILE= syntax_tools.app
APP_SRC= $(APP_FILE).src
APP_TARGET= $(EBIN)/$(APP_FILE)
@@ -52,6 +60,7 @@ all: $(OBJECTS)
clean:
+ rm -f ./merl_transform.beam
rm -f $(OBJECTS)
rm -f core *~
@@ -64,6 +73,15 @@ realclean: clean
$(EBIN)/%.$(EMULATOR):%.erl
$(erlc_verbose)erlc -W $(ERL_COMPILE_FLAGS) -o$(EBIN) $<
+# special rules and dependencies to apply the transform to itself
+$(EBIN)/merl_transform.beam: $(EBIN)/merl.beam ./merl_transform.beam \
+ ../include/merl.hrl \
+ $(EBIN)/erl_syntax.beam $(EBIN)/erl_syntax_lib.beam
+./merl_transform.beam: ./merl_transform.erl $(EBIN)/merl.beam \
+ ../include/merl.hrl
+ $(V_ERLC) -DMERL_NO_TRANSFORM $(ERL_COMPILE_FLAGS) -o ./ $<
+
+
# ----------------------------------------------------
# Special Build Targets
# ----------------------------------------------------
@@ -84,6 +102,8 @@ release_spec: opt
$(INSTALL_DATA) $(OBJECTS) "$(RELSYSDIR)/ebin"
$(INSTALL_DIR) "$(RELSYSDIR)/src"
$(INSTALL_DATA) $(SOURCES) "$(RELSYSDIR)/src"
+ $(INSTALL_DIR) "$(RELSYSDIR)/include"
+ $(INSTALL_DATA) $(INCLUDE_DELIVERABLES) "$(RELSYSDIR)/include"
release_docs_spec:
diff --git a/lib/syntax_tools/src/epp_dodger.erl b/lib/syntax_tools/src/epp_dodger.erl
index 7e12eab1b5..39c522fd11 100644
--- a/lib/syntax_tools/src/epp_dodger.erl
+++ b/lib/syntax_tools/src/epp_dodger.erl
@@ -88,7 +88,7 @@
%% This is a so-called Erlang I/O ErrorInfo structure; see the {@link
%% //stdlib/io} module for details.
--type errorinfo() :: term(). % {integer(), atom(), term()}.
+-type errorinfo() :: {integer(), atom(), term()}.
-type option() :: atom() | {atom(), term()}.
@@ -208,8 +208,8 @@ do_parse_file(DefEncoding, File, Parser, Options) ->
try Parser(Dev, 1, Options)
after ok = file:close(Dev)
end;
- {error, _} = Error ->
- Error
+ {error, Error} ->
+ {error, {0, file, Error}} % defer to file:format_error/1
end.
find_invalid_unicode([H|T]) ->
@@ -454,7 +454,7 @@ io_error(L, Desc) ->
{L, ?MODULE, Desc}.
start_pos([T | _Ts], _L) ->
- element(2, T);
+ erl_anno:line(element(2, T));
start_pos([], L) ->
L.
diff --git a/lib/syntax_tools/src/erl_prettypr.erl b/lib/syntax_tools/src/erl_prettypr.erl
index 877675772f..81272e62de 100644
--- a/lib/syntax_tools/src/erl_prettypr.erl
+++ b/lib/syntax_tools/src/erl_prettypr.erl
@@ -50,8 +50,7 @@
| fun((erl_syntax:syntaxTree(), _, _) -> prettypr:document()).
-type clause_t() :: 'case_expr' | 'cond_expr' | 'fun_expr'
| 'if_expr' | 'receive_expr' | 'try_expr'
- | {'function', prettypr:document()}
- | {'rule', prettypr:document()}.
+ | {'function', prettypr:document()}.
-record(ctxt, {prec = 0 :: integer(),
sub_indent = 2 :: non_neg_integer(),
@@ -587,8 +586,6 @@ lay_2(Node, Ctxt) ->
make_case_clause(D1, D2, D3, Ctxt);
try_expr ->
make_case_clause(D1, D2, D3, Ctxt);
- {rule, N} ->
- make_rule_clause(N, D1, D2, D3, Ctxt);
undefined ->
%% If a clause is formatted out of context, we
%% use a "fun-expression" clause style.
@@ -851,14 +848,10 @@ lay_2(Node, Ctxt) ->
floating(text(".")),
lay(erl_syntax:record_access_field(Node),
set_prec(Ctxt, PrecR))),
- D3 = case erl_syntax:record_access_type(Node) of
- none ->
- D2;
- T ->
- beside(beside(floating(text("#")),
- lay(T, reset_prec(Ctxt))),
- D2)
- end,
+ T = erl_syntax:record_access_type(Node),
+ D3 = beside(beside(floating(text("#")),
+ lay(T, reset_prec(Ctxt))),
+ D2),
maybe_parentheses(beside(D1, D3), Prec, Ctxt);
record_expr ->
@@ -926,15 +919,6 @@ lay_2(Node, Ctxt) ->
D2 = lay(erl_syntax:map_field_exact_value(Node), Ctxt1),
par([D1, floating(text(":=")), D2], Ctxt1#ctxt.break_indent);
- rule ->
- %% Comments on the name will be repeated; cf.
- %% `function'.
- Ctxt1 = reset_prec(Ctxt),
- D1 = lay(erl_syntax:rule_name(Node), Ctxt1),
- D2 = lay_clauses(erl_syntax:rule_clauses(Node),
- {rule, D1}, Ctxt1),
- beside(D2, floating(text(".")));
-
size_qualifier ->
Ctxt1 = set_prec(Ctxt, max_prec()),
D1 = lay(erl_syntax:size_qualifier_body(Node), Ctxt1),
@@ -1073,10 +1057,6 @@ make_fun_clause_head(N, P, Ctxt) ->
beside(N, D)
end.
-make_rule_clause(N, P, G, B, Ctxt) ->
- D = make_fun_clause_head(N, P, Ctxt),
- append_rule_body(B, append_guard(G, D, Ctxt), Ctxt).
-
make_case_clause(P, G, B, Ctxt) ->
append_clause_body(B, append_guard(G, P, Ctxt), Ctxt).
@@ -1092,9 +1072,6 @@ make_if_clause(_P, G, B, Ctxt) ->
append_clause_body(B, D, Ctxt) ->
append_clause_body(B, D, floating(text(" ->")), Ctxt).
-append_rule_body(B, D, Ctxt) ->
- append_clause_body(B, D, floating(text(" :-")), Ctxt).
-
append_clause_body(B, D, S, Ctxt) ->
sep([beside(D, S), nest(Ctxt#ctxt.break_indent, B)]).
diff --git a/lib/syntax_tools/src/erl_recomment.erl b/lib/syntax_tools/src/erl_recomment.erl
index 7b2f9f7adb..72e1e2d2f5 100644
--- a/lib/syntax_tools/src/erl_recomment.erl
+++ b/lib/syntax_tools/src/erl_recomment.erl
@@ -123,7 +123,6 @@ recomment_forms(Tree, Cs, Insert) ->
form_list ->
Tree1 = erl_syntax:flatten_form_list(Tree),
Node = build_tree(Tree1),
-
%% Here we make a small assumption about the substructure of
%% a `form_list' tree: it has exactly one group of subtrees.
[Node1] = node_subtrees(Node),
@@ -753,7 +752,13 @@ get_line(Node) ->
{_, L, _} when is_integer(L) ->
L;
Pos ->
- exit({bad_position, Pos})
+ try erl_anno:line(Pos) of
+ Line ->
+ Line
+ catch
+ _:_ ->
+ exit({bad_position, Pos})
+ end
end.
diff --git a/lib/syntax_tools/src/erl_syntax.erl b/lib/syntax_tools/src/erl_syntax.erl
index de271d7f2f..3f2a3e05dd 100644
--- a/lib/syntax_tools/src/erl_syntax.erl
+++ b/lib/syntax_tools/src/erl_syntax.erl
@@ -254,7 +254,6 @@
receive_expr_action/1,
receive_expr_clauses/1,
receive_expr_timeout/1,
- record_access/2,
record_access/3,
record_access_argument/1,
record_access_field/1,
@@ -271,10 +270,6 @@
record_index_expr/2,
record_index_expr_field/1,
record_index_expr_type/1,
- rule/2,
- rule_arity/1,
- rule_clauses/1,
- rule_name/1,
size_qualifier/2,
size_qualifier_argument/1,
size_qualifier_body/1,
@@ -472,19 +467,16 @@
%% <td>record_field</td>
%% </tr><tr>
%% <td>record_index_expr</td>
-%% <td>rule</td>
%% <td>size_qualifier</td>
%% <td>string</td>
-%% </tr><tr>
%% <td>text</td>
+%% </tr><tr>
%% <td>try_expr</td>
%% <td>tuple</td>
%% <td>underscore</td>
-%% </tr><tr>
%% <td>variable</td>
+%% </tr><tr>
%% <td>warning_marker</td>
-%% <td></td>
-%% <td></td>
%% </tr>
%% </table></center>
%%
@@ -540,7 +532,6 @@
%% @see record_expr/2
%% @see record_field/2
%% @see record_index_expr/2
-%% @see rule/2
%% @see size_qualifier/2
%% @see string/1
%% @see text/1
@@ -607,10 +598,8 @@ type(Node) ->
{record, _, _, _, _} -> record_expr;
{record, _, _, _} -> record_expr;
{record_field, _, _, _, _} -> record_access;
- {record_field, _, _, _} -> record_access;
{record_index, _, _, _} -> record_index_expr;
{remote, _, _, _} -> module_qualifier;
- {rule, _, _, _, _} -> rule;
{'try', _, _, _, _, _} -> try_expr;
{tuple, _, _} -> tuple;
_ ->
@@ -693,10 +682,9 @@ is_leaf(Node) ->
%% <td>`comment'</td>
%% <td>`error_marker'</td>
%% <td>`eof_marker'</td>
-%% <td>`form_list'</td>
%% </tr><tr>
+%% <td>`form_list'</td>
%% <td>`function'</td>
-%% <td>`rule'</td>
%% <td>`warning_marker'</td>
%% <td>`text'</td>
%% </tr>
@@ -709,7 +697,6 @@ is_leaf(Node) ->
%% @see error_marker/1
%% @see form_list/1
%% @see function/2
-%% @see rule/2
%% @see warning_marker/1
-spec is_form(syntaxTree()) -> boolean().
@@ -722,7 +709,6 @@ is_form(Node) ->
eof_marker -> true;
error_marker -> true;
form_list -> true;
- rule -> true;
warning_marker -> true;
text -> true;
_ -> false
@@ -3480,7 +3466,6 @@ module_qualifier_body(Node) ->
%% @see function_clauses/1
%% @see function_arity/1
%% @see is_form/1
-%% @see rule/2
%% Don't use the name 'function' for this record, to avoid confusion with
%% the tuples on the form {function,Name,Arity} used by erl_parse.
@@ -4310,49 +4295,32 @@ record_index_expr_field(Node) ->
%% =====================================================================
-%% @equiv record_access(Argument, none, Field)
-
--spec record_access(syntaxTree(), syntaxTree()) -> syntaxTree().
-
-record_access(Argument, Field) ->
- record_access(Argument, none, Field).
-
-
-%% =====================================================================
-%% @doc Creates an abstract record field access expression. If
-%% `Type' is not `none', the result represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
-%%
-%% If `Type' is `none', the result represents
-%% "<code><em>Argument</em>.<em>Field</em></code>". This is a special
-%% form only allowed within Mnemosyne queries.
+%% @doc Creates an abstract record field access expression. The result
+%% represents "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>".
%%
-%% @see record_access/2
%% @see record_access_argument/1
%% @see record_access_type/1
%% @see record_access_field/1
%% @see record_expr/3
-record(record_access, {argument :: syntaxTree(),
- type :: 'none' | syntaxTree(),
+ type :: syntaxTree(),
field :: syntaxTree()}).
%% type(Node) = record_access
%% data(Node) = #record_access{argument :: Argument, type :: Type,
%% field :: Field}
%%
-%% Argument = Field = syntaxTree()
-%% Type = none | syntaxTree()
+%% Argument = Type = Field = syntaxTree()
%%
%% `erl_parse' representation:
%%
%% {record_field, Pos, Argument, Type, Field}
-%% {record_field, Pos, Argument, Field}
%%
%% Argument = Field = erl_parse()
%% Type = atom()
--spec record_access(syntaxTree(), 'none' | syntaxTree(), syntaxTree()) ->
+-spec record_access(syntaxTree(), syntaxTree(), syntaxTree()) ->
syntaxTree().
record_access(Argument, Type, Field) ->
@@ -4365,16 +4333,11 @@ revert_record_access(Node) ->
Argument = record_access_argument(Node),
Type = record_access_type(Node),
Field = record_access_field(Node),
- if Type =:= none ->
- {record_field, Pos, Argument, Field};
- true ->
- case type(Type) of
- atom ->
- {record_field, Pos,
- Argument, concrete(Type), Field};
- _ ->
- Node
- end
+ case type(Type) of
+ atom ->
+ {record_field, Pos, Argument, concrete(Type), Field};
+ _ ->
+ Node
end.
@@ -4387,8 +4350,6 @@ revert_record_access(Node) ->
record_access_argument(Node) ->
case unwrap(Node) of
- {record_field, _, Argument, _} ->
- Argument;
{record_field, _, Argument, _, _} ->
Argument;
Node1 ->
@@ -4397,21 +4358,14 @@ record_access_argument(Node) ->
%% =====================================================================
-%% @doc Returns the type subtree of a `record_access' node,
-%% if any. If `Node' represents
-%% "<code><em>Argument</em>.<em>Field</em></code>", `none'
-%% is returned, otherwise if `Node' represents
-%% "<code><em>Argument</em>#<em>Type</em>.<em>Field</em></code>",
-%% `Type' is returned.
+%% @doc Returns the type subtree of a `record_access' node.
%%
%% @see record_access/3
--spec record_access_type(syntaxTree()) -> 'none' | syntaxTree().
+-spec record_access_type(syntaxTree()) -> syntaxTree().
record_access_type(Node) ->
case unwrap(Node) of
- {record_field, _, _, _} ->
- none;
{record_field, Pos, _, Type, _} ->
set_pos(atom(Type), Pos);
Node1 ->
@@ -4428,8 +4382,6 @@ record_access_type(Node) ->
record_access_field(Node) ->
case unwrap(Node) of
- {record_field, _, _, Field} ->
- Field;
{record_field, _, _, _, Field} ->
Field;
Node1 ->
@@ -4808,117 +4760,6 @@ binary_comp_body(Node) ->
%% =====================================================================
-%% @doc Creates an abstract Mnemosyne rule. If `Clauses' is
-%% `[C1, ..., Cn]', the results represents
-%% "<code><em>Name</em> <em>C1</em>; ...; <em>Name</em>
-%% <em>Cn</em>.</code>". More exactly, if each `Ci'
-%% represents "<code>(<em>Pi1</em>, ..., <em>Pim</em>) <em>Gi</em> ->
-%% <em>Bi</em></code>", then the result represents
-%% "<code><em>Name</em>(<em>P11</em>, ..., <em>P1m</em>) <em>G1</em> :-
-%% <em>B1</em>; ...; <em>Name</em>(<em>Pn1</em>, ..., <em>Pnm</em>)
-%% <em>Gn</em> :- <em>Bn</em>.</code>". Rules are source code forms.
-%%
-%% @see rule_name/1
-%% @see rule_clauses/1
-%% @see rule_arity/1
-%% @see is_form/1
-%% @see function/2
-
--record(rule, {name :: syntaxTree(), clauses :: [syntaxTree()]}).
-
-%% type(Node) = rule
-%% data(Node) = #rule{name :: Name, clauses :: Clauses}
-%%
-%% Name = syntaxTree()
-%% Clauses = [syntaxTree()]
-%%
-%% (See `function' for notes on why the arity is not stored.)
-%%
-%% `erl_parse' representation:
-%%
-%% {rule, Pos, Name, Arity, Clauses}
-%%
-%% Name = atom()
-%% Arity = integer()
-%% Clauses = [Clause] \ []
-%% Clause = {clause, ...}
-%%
-%% where the number of patterns in each clause should be equal to
-%% the integer `Arity'; see `clause' for documentation on
-%% `erl_parse' clauses.
-
--spec rule(syntaxTree(), [syntaxTree()]) -> syntaxTree().
-
-rule(Name, Clauses) ->
- tree(rule, #rule{name = Name, clauses = Clauses}).
-
-revert_rule(Node) ->
- Name = rule_name(Node),
- Clauses = [revert_clause(C) || C <- rule_clauses(Node)],
- Pos = get_pos(Node),
- case type(Name) of
- atom ->
- A = rule_arity(Node),
- {rule, Pos, concrete(Name), A, Clauses};
- _ ->
- Node
- end.
-
-
-%% =====================================================================
-%% @doc Returns the name subtree of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_name(syntaxTree()) -> syntaxTree().
-
-rule_name(Node) ->
- case unwrap(Node) of
- {rule, Pos, Name, _, _} ->
- set_pos(atom(Name), Pos);
- Node1 ->
- (data(Node1))#rule.name
- end.
-
-%% =====================================================================
-%% @doc Returns the list of clause subtrees of a `rule' node.
-%%
-%% @see rule/2
-
--spec rule_clauses(syntaxTree()) -> [syntaxTree()].
-
-rule_clauses(Node) ->
- case unwrap(Node) of
- {rule, _, _, _, Clauses} ->
- Clauses;
- Node1 ->
- (data(Node1))#rule.clauses
- end.
-
-%% =====================================================================
-%% @doc Returns the arity of a `rule' node. The result is the
-%% number of parameter patterns in the first clause of the rule;
-%% subsequent clauses are ignored.
-%%
-%% An exception is thrown if `rule_clauses(Node)' returns
-%% an empty list, or if the first element of that list is not a syntax
-%% tree `C' of type `clause' such that
-%% `clause_patterns(C)' is a nonempty list.
-%%
-%% @see rule/2
-%% @see rule_clauses/1
-%% @see clause/3
-%% @see clause_patterns/1
-
--spec rule_arity(syntaxTree()) -> arity().
-
-rule_arity(Node) ->
- %% Note that this never accesses the arity field of
- %% `erl_parse' rule nodes.
- length(clause_patterns(hd(rule_clauses(Node)))).
-
-
-%% =====================================================================
%% @doc Creates an abstract generator. The result represents
%% "<code><em>Pattern</em> &lt;- <em>Body</em></code>".
%%
@@ -6403,8 +6244,6 @@ revert_root(Node) ->
revert_record_expr(Node);
record_index_expr ->
revert_record_index_expr(Node);
- rule ->
- revert_rule(Node);
string ->
revert_string(Node);
try_expr ->
@@ -6661,15 +6500,9 @@ subtrees(T) ->
receive_expr_action(T)]
end;
record_access ->
- case record_access_type(T) of
- none ->
- [[record_access_argument(T)],
- [record_access_field(T)]];
- R ->
- [[record_access_argument(T)],
- [R],
- [record_access_field(T)]]
- end;
+ [[record_access_argument(T)],
+ [record_access_type(T)],
+ [record_access_field(T)]];
record_expr ->
case record_expr_argument(T) of
none ->
@@ -6690,8 +6523,6 @@ subtrees(T) ->
record_index_expr ->
[[record_index_expr_type(T)],
[record_index_expr_field(T)]];
- rule ->
- [[rule_name(T)], rule_clauses(T)];
size_qualifier ->
[[size_qualifier_body(T)],
[size_qualifier_argument(T)]];
@@ -6786,8 +6617,6 @@ make_tree(parentheses, [[E]]) -> parentheses(E);
make_tree(prefix_expr, [[F], [A]]) -> prefix_expr(F, A);
make_tree(receive_expr, [C]) -> receive_expr(C);
make_tree(receive_expr, [C, [E], A]) -> receive_expr(C, E, A);
-make_tree(record_access, [[E], [F]]) ->
- record_access(E, F);
make_tree(record_access, [[E], [T], [F]]) ->
record_access(E, T, F);
make_tree(record_expr, [[T], F]) -> record_expr(T, F);
@@ -6796,7 +6625,6 @@ make_tree(record_field, [[N]]) -> record_field(N);
make_tree(record_field, [[N], [E]]) -> record_field(N, E);
make_tree(record_index_expr, [[T], [F]]) ->
record_index_expr(T, F);
-make_tree(rule, [[N], C]) -> rule(N, C);
make_tree(size_qualifier, [[N], [A]]) -> size_qualifier(N, A);
make_tree(try_expr, [B, C, H, A]) -> try_expr(B, C, H, A);
make_tree(tuple, [E]) -> tuple(E).
diff --git a/lib/syntax_tools/src/erl_syntax_lib.erl b/lib/syntax_tools/src/erl_syntax_lib.erl
index 2f0488abec..5b5b18d15b 100644
--- a/lib/syntax_tools/src/erl_syntax_lib.erl
+++ b/lib/syntax_tools/src/erl_syntax_lib.erl
@@ -35,8 +35,7 @@
analyze_function_name/1, analyze_implicit_fun/1,
analyze_import_attribute/1, analyze_module_attribute/1,
analyze_record_attribute/1, analyze_record_expr/1,
- analyze_record_field/1, analyze_rule/1,
- analyze_wild_attribute/1, annotate_bindings/1,
+ analyze_record_field/1, analyze_wild_attribute/1, annotate_bindings/1,
annotate_bindings/2, fold/3, fold_subtrees/3, foldl_listlist/3,
function_name_expansions/1, is_fail_expr/1, limit/2, limit/3,
map/2, map_subtrees/2, mapfold/3, mapfold_subtrees/3,
@@ -527,8 +526,6 @@ vann(Tree, Env) ->
vann_try_expr(Tree, Env);
function ->
vann_function(Tree, Env);
- rule ->
- vann_rule(Tree, Env);
fun_expr ->
vann_fun_expr(Tree, Env);
list_comp ->
@@ -569,15 +566,6 @@ vann_function(Tree, Env) ->
Bound = [],
{ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-vann_rule(Tree, Env) ->
- Cs = erl_syntax:rule_clauses(Tree),
- {Cs1, {_, Free}} = vann_clauses(Cs, Env),
- N = erl_syntax:rule_name(Tree),
- {N1, _, _} = vann(N, Env),
- Tree1 = rewrite(Tree, erl_syntax:rule(N1, Cs1)),
- Bound = [],
- {ann_bindings(Tree1, Env, Bound, Free), Bound, Free}.
-
vann_fun_expr(Tree, Env) ->
Cs = erl_syntax:fun_expr_clauses(Tree),
{Cs1, {_, Free}} = vann_clauses(Cs, Env),
@@ -946,7 +934,7 @@ is_fail_expr(E) ->
%%
%% Forms = syntaxTree() | [syntaxTree()]
%% Key = attributes | errors | exports | functions | imports
-%% | module | records | rules | warnings
+%% | module | records | warnings
%%
%% @doc Analyzes a sequence of "program forms". The given
%% `Forms' may be a single syntax tree of type
@@ -1047,16 +1035,6 @@ is_fail_expr(E) ->
%% that each record name occurs at most once in the list. The
%% order of listing is not defined.</dd>
%%
-%% <dt>`{rules, Rules}'</dt>
-%% <dd><ul>
-%% <li>`Rules = [{atom(), integer()}]'</li>
-%% </ul>
-%% `Rules' is a list of the names of the rules that are
-%% defined in `Forms' (cf.
-%% `analyze_rule/1'). We do not guarantee that each
-%% name occurs at most once in the list. The order of listing is
-%% not defined.</dd>
-%%
%% <dt>`{warnings, Warnings}'</dt>
%% <dd><ul>
%% <li>`Warnings = [term()]'</li>
@@ -1074,12 +1052,11 @@ is_fail_expr(E) ->
%% @see analyze_import_attribute/1
%% @see analyze_record_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
-type key() :: 'attributes' | 'errors' | 'exports' | 'functions' | 'imports'
- | 'module' | 'records' | 'rules' | 'warnings'.
+ | 'module' | 'records' | 'warnings'.
-type info_pair() :: {key(), term()}.
-spec analyze_forms(erl_syntax:forms()) -> [info_pair()].
@@ -1099,8 +1076,6 @@ collect_form(Node, Info) ->
Info;
{function, Name} ->
finfo_add_function(Name, Info);
- {rule, Name} ->
- finfo_add_rule(Name, Info);
{error_marker, Data} ->
finfo_add_error(Data, Info);
{warning_marker, Data} ->
@@ -1136,8 +1111,7 @@ collect_attribute(_, {N, V}, Info) ->
records = [] :: [{atom(), [{atom(), field_default()}]}],
errors = [] :: [term()],
warnings = [] :: [term()],
- functions = [] :: [{atom(), arity()}],
- rules = [] :: [{atom(), arity()}]}).
+ functions = [] :: [{atom(), arity()}]}).
-type field_default() :: 'none' | erl_syntax:syntaxTree().
@@ -1183,9 +1157,6 @@ finfo_add_warning(R, Info) ->
finfo_add_function(F, Info) ->
Info#forms{functions = [F | Info#forms.functions]}.
-finfo_add_rule(F, Info) ->
- Info#forms{rules = [F | Info#forms.rules]}.
-
finfo_to_list(Info) ->
[{Key, Value}
|| {Key, {value, Value}} <-
@@ -1197,8 +1168,7 @@ finfo_to_list(Info) ->
{records, list_value(Info#forms.records)},
{errors, list_value(Info#forms.errors)},
{warnings, list_value(Info#forms.warnings)},
- {functions, list_value(Info#forms.functions)},
- {rules, list_value(Info#forms.rules)}
+ {functions, list_value(Info#forms.functions)}
]].
list_value([]) ->
@@ -1229,10 +1199,6 @@ list_value(List) ->
%%
%% <dd>where `Info = analyze_function(Node)'.</dd>
%%
-%% <dt>`{rule, Info}'</dt>
-%%
-%% <dd>where `Info = analyze_rule(Node)'.</dd>
-%%
%% <dt>`{warning_marker, Info}'</dt>
%%
%% <dd>where `Info =
@@ -1245,7 +1211,6 @@ list_value(List) ->
%%
%% @see analyze_attribute/1
%% @see analyze_function/1
-%% @see analyze_rule/1
%% @see erl_syntax:is_form/1
%% @see erl_syntax:error_marker_info/1
%% @see erl_syntax:warning_marker_info/1
@@ -1258,8 +1223,6 @@ analyze_form(Node) ->
{attribute, analyze_attribute(Node)};
function ->
{function, analyze_function(Node)};
- rule ->
- {rule, analyze_rule(Node)};
error_marker ->
{error_marker, erl_syntax:error_marker_info(Node)};
warning_marker ->
@@ -1669,7 +1632,7 @@ analyze_record_attribute_tuple(Node) ->
%% <dt>`record_expr':</dt>
%% <dd>`{atom(), [{atom(), Value}]}'</dd>
%% <dt>`record_access':</dt>
-%% <dd>`{atom(), atom()} | atom()'</dd>
+%% <dd>`{atom(), atom()}'</dd>
%% <dt>`record_index_expr':</dt>
%% <dd>`{atom(), atom()}'</dd>
%% </dl>
@@ -1679,9 +1642,7 @@ analyze_record_attribute_tuple(Node) ->
%% listed in the order they appear. (See
%% `analyze_record_field/1' for details on the field
%% descriptors). For a `record_access' node,
-%% `Info' represents the record name and the field name (or
-%% if the record name is not included, only the field name; this is
-%% allowed only in Mnemosyne-query syntax). For a
+%% `Info' represents the record name and the field name. For a
%% `record_index_expr' node, `Info' represents the
%% record name and the name field name.
%%
@@ -1713,18 +1674,14 @@ analyze_record_expr(Node) ->
F = erl_syntax:record_access_field(Node),
case erl_syntax:type(F) of
atom ->
- case erl_syntax:record_access_type(Node) of
- none ->
- {record_access, erl_syntax:atom_value(F)};
- A ->
- case erl_syntax:type(A) of
- atom ->
- {record_access,
- {erl_syntax:atom_value(A),
- erl_syntax:atom_value(F)}};
- _ ->
- throw(syntax_error)
- end
+ A = erl_syntax:record_access_type(Node),
+ case erl_syntax:type(A) of
+ atom ->
+ {record_access,
+ {erl_syntax:atom_value(A),
+ erl_syntax:atom_value(F)}};
+ _ ->
+ throw(syntax_error)
end;
_ ->
throw(syntax_error)
@@ -1835,8 +1792,6 @@ analyze_file_attribute(Node) ->
%% The evaluation throws `syntax_error' if
%% `Node' does not represent a well-formed function
%% definition.
-%%
-%% @see analyze_rule/1
-spec analyze_function(erl_syntax:syntaxTree()) -> {atom(), arity()}.
@@ -1857,37 +1812,6 @@ analyze_function(Node) ->
%% =====================================================================
-%% @spec analyze_rule(Node::syntaxTree()) -> {atom(), integer()}
-%%
-%% @doc Returns the name and arity of a Mnemosyne rule. The result is a
-%% pair `{Name, A}' if `Node' represents a rule
-%% "`Name(<em>P_1</em>, ..., <em>P_A</em>) :- ...'".
-%%
-%% The evaluation throws `syntax_error' if
-%% `Node' does not represent a well-formed Mnemosyne
-%% rule.
-%%
-%% @see analyze_function/1
-
--spec analyze_rule(erl_syntax:syntaxTree()) -> {atom(), arity()}.
-
-analyze_rule(Node) ->
- case erl_syntax:type(Node) of
- rule ->
- N = erl_syntax:rule_name(Node),
- case erl_syntax:type(N) of
- atom ->
- {erl_syntax:atom_value(N),
- erl_syntax:rule_arity(Node)};
- _ ->
- throw(syntax_error)
- end;
- _ ->
- throw(syntax_error)
- end.
-
-
-%% =====================================================================
%% @spec analyze_implicit_fun(Node::syntaxTree()) -> FunctionName
%%
%% FunctionName = atom() | {atom(), integer()}
diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl
index 38e0c2099b..db7f0939a3 100644
--- a/lib/syntax_tools/src/erl_tidy.erl
+++ b/lib/syntax_tools/src/erl_tidy.erl
@@ -792,16 +792,11 @@ keep_form(Form, Used, Opts) ->
N = erl_syntax_lib:analyze_function(Form),
case sets:is_element(N, Used) of
false ->
- report_removed_def("function", N, Form, Opts),
- false;
- true ->
- true
- end;
- rule ->
- N = erl_syntax_lib:analyze_rule(Form),
- case sets:is_element(N, Used) of
- false ->
- report_removed_def("rule", N, Form, Opts),
+ {F, A} = N,
+ File = proplists:get_value(file, Opts, ""),
+ report({File, erl_syntax:get_pos(Form),
+ "removing unused function `~w/~w'."},
+ [F, A], Opts),
false;
true ->
true
@@ -823,12 +818,6 @@ keep_form(Form, Used, Opts) ->
true
end.
-report_removed_def(Type, {N, A}, Form, Opts) ->
- File = proplists:get_value(file, Opts, ""),
- report({File, erl_syntax:get_pos(Form),
- "removing unused ~s `~w/~w'."},
- [Type, N, A], Opts).
-
collect_functions(Forms) ->
lists:foldl(
fun (F, {Names, Defs}) ->
@@ -837,10 +826,6 @@ collect_functions(Forms) ->
N = erl_syntax_lib:analyze_function(F),
{sets:add_element(N, Names),
dict:store(N, {F, []}, Defs)};
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {sets:add_element(N, Names),
- dict:store(N, {F, []}, Defs)};
_ ->
{Names, Defs}
end
@@ -855,11 +840,6 @@ update_forms([F | Fs], Defs, Imports, Opts) ->
{F1, Fs1} = dict:fetch(N, Defs),
[F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
Opts);
- rule ->
- N = erl_syntax_lib:analyze_rule(F),
- {F1, Fs1} = dict:fetch(N, Defs),
- [F1 | lists:reverse(Fs1)] ++ update_forms(Fs, Defs, Imports,
- Opts);
attribute ->
[update_attribute(F, Imports, Opts)
| update_forms(Fs, Defs, Imports, Opts)];
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index 0420508f2a..eac5af5540 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -1713,8 +1713,6 @@ transform(Tree, Env, St) ->
transform_function(Tree, Env, St);
implicit_fun ->
transform_implicit_fun(Tree, Env, St);
- rule ->
- transform_rule(Tree, Env, St);
record_expr ->
transform_record(Tree, Env, St);
record_index_expr ->
@@ -1778,27 +1776,6 @@ renaming_note(Name) ->
rename_atom(Node, Atom) ->
rewrite(Node, erl_syntax:atom(Atom)).
-%% Renaming Mnemosyne rules (just like function definitions)
-
-transform_rule(T, Env, St) ->
- {T1, St1} = default_transform(T, Env, St),
- F = erl_syntax_lib:analyze_rule(T1),
- {V, Text} = case (Env#code.map)(F) of
- F ->
- %% Not renamed
- {none, []};
- {Atom, _Arity} ->
- %% Renamed
- Cs = erl_syntax:rule_clauses(T1),
- N = rename_atom(
- erl_syntax:rule_name(T1),
- Atom),
- T2 = rewrite(T1,
- erl_syntax:rule(N, Cs)),
- {{value, T2}, renaming_note(Atom)}
- end,
- {maybe_modified(V, T1, 2, Text, Env), St1}.
-
%% Renaming "implicit fun" expressions (done quietly).
transform_implicit_fun(T, Env, St) ->
diff --git a/lib/syntax_tools/src/merl.erl b/lib/syntax_tools/src/merl.erl
new file mode 100644
index 0000000000..690306c17b
--- /dev/null
+++ b/lib/syntax_tools/src/merl.erl
@@ -0,0 +1,1230 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% Note: EDoc uses @@ and @} as escape sequences, so in the doc text below,
+%% `@@' must be written `@@@@' and `@}' must be written `@@}'.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2010-2015 Richard Carlsson
+%%
+%% @doc Metaprogramming in Erlang.
+%% Merl is a more user friendly interface to the `erl_syntax' module, making
+%% it easy both to build new ASTs from scratch and to
+%% match and decompose existing ASTs. For details that are outside the scope
+%% of Merl itself, please see the documentation of {@link erl_syntax}.
+%%
+%% == Quick start ==
+%%
+%% To enable the full power of Merl, your module needs to include the Merl
+%% header file:
+%% ```-include_lib("syntax_tools/include/merl.hrl").'''
+%%
+%% Then, you can use the `?Q(Text)' macros in your code to create ASTs or match
+%% on existing ASTs. For example:
+%% ```Tuple = ?Q("{foo, 42}"),
+%% ?Q("{foo, _@Number}") = Tuple,
+%% Call = ?Q("foo:bar(_@Number)")'''
+%%
+%% Calling `merl:print(Call)' will then print the following code:
+%% ```foo:bar(42)'''
+%%
+%% The `?Q' macros turn the quoted code fragments into ASTs, and lifts
+%% metavariables such as `_@Tuple' and `_@Number' to the level of your Erlang
+%% code, so you can use the corresponding Erlang variables `Tuple' and `Number'
+%% directly. This is the most straightforward way to use Merl, and in many
+%% cases it's all you need.
+%%
+%% You can even write case switches using `?Q' macros as patterns. For example:
+%% ```case AST of
+%% ?Q("{foo, _@Foo}") -> handle(Foo);
+%% ?Q("{bar, _@Bar}") when erl_syntax:is_integer(Bar) -> handle(Bar);
+%% _ -> handle_default()
+%% end'''
+%%
+%% These case switches only allow `?Q(...)' or `_' as clause patterns, and the
+%% guards may contain any expressions, not just Erlang guard expressions.
+%%
+%% If the macro `MERL_NO_TRANSFORM' is defined before the `merl.hrl' header
+%% file is included, the parse transform used by Merl will be disabled, and in
+%% that case, the match expressions `?Q(...) = ...', case switches using
+%% `?Q(...)' patterns, and automatic metavariables like `_@Tuple' cannot be
+%% used in your code, but the Merl macros and functions still work. To do
+%% metavariable substitution, you need to use the `?Q(Text, Map)' macro, e.g.:
+%% ```Tuple = ?Q("{foo, _@bar, _@baz}", [{bar, Bar}, {baz,Baz}])'''
+%%
+%% The text given to a `?Q(Text)' macro can be either a single string, or a
+%% list of strings. The latter is useful when you need to split a long
+%% expression over multiple lines, e.g.:
+%% ```?Q(["case _@Expr of",
+%% " {foo, X} -> f(X);",
+%% " {bar, X} -> g(X)",
+%% " _ -> h(X)"
+%% "end"])'''
+%% If there is a syntax error somewhere in the text (like the missing semicolon
+%% in the second clause above) this allows Merl to generate an error message
+%% pointing to the exact line in your source code. (Just remember to
+%% comma-separate the strings in the list, otherwise Erlang will concatenate
+%% the string fragments as if they were a single string.)
+%%
+%% == Metavariable syntax ==
+%%
+%% There are several ways to write a metavariable in your quoted code:
+%% <ul>
+%% <li>Atoms starting with `@', for example `` '@foo' '' or `` '@Foo' ''</li>
+%% <li>Variables starting with `_@', for example `_@bar' or `_@Bar'</li>
+%% <li>Strings starting with ``"'@'', for example ``"'@File"''</li>
+%% <li>Integers starting with 909, for example `9091' or `909123'</li>
+%% </ul>
+%% Following the prefix, one or more `_' or `0' characters may be used to
+%% indicate "lifting" of the variable one or more levels, and after that, a `@'
+%% or `9' character indicates a glob metavariable (matching zero or more
+%% elements in a sequence) rather than a normal metavariable. For example:
+%% <ul>
+%% <li>`` '@_foo' '' is lifted one level, and `_@__foo' is lifted two
+%% levels</li>
+%% <li>`_@@@@bar' is a glob variable, and `_@_@bar' is a lifted glob
+%% variable</li>
+%% <li>`90901' is a lifted variable,`90991' is a glob variable, and `9090091'
+%% is a glob variable lifted two levels</li>
+%% </ul>
+%% (Note that the last character in the name is never considered to be a lift
+%% or glob marker, hence, `_@__' and `90900' are only lifted one level, not
+%% two. Also note that globs only matter for matching; when doing
+%% substitutions, a non-glob variable can be used to inject a sequence of
+%% elements, and vice versa.)
+%%
+%% If the name after the prefix and any lift and glob markers is `_' or `0',
+%% the variable is treated as an anonymous catch-all pattern in matches. For
+%% example, `_@_', `_@@@@_', `_@__', or even `_@__@_'.
+%%
+%% Finally, if the name without any prefixes or lift/glob markers begins with
+%% an uppercase character, as in `_@Foo' or `_@_@Foo', it will become a
+%% variable on the Erlang level, and can be used to easily deconstruct and
+%% construct syntax trees:
+%% ```case Input of
+%% ?Q("{foo, _@Number}") -> ?Q("foo:bar(_@Number)");
+%% ...'''
+%% We refer to these as "automatic metavariables". If in addition the name ends
+%% with `@', as in `_@Foo@', the value of the variable as an Erlang term will
+%% be automatically converted to the corresponding abstract syntax tree when
+%% used to construct a larger tree. For example, in:
+%% ```Bar = {bar, 42},
+%% Foo = ?Q("{foo, _@Bar@@}")'''
+%% (where Bar is just some term, not a syntax tree) the result `Foo' will be a
+%% syntax tree representing `{foo, {bar, 42}}'. This avoids the need for
+%% temporary variables in order to inject data, as in
+%% ```TmpBar = erl_syntax:abstract(Bar),
+%% Foo = ?Q("{foo, _@TmpBar}")'''
+%%
+%% If the context requires an integer rather than a variable, an atom, or a
+%% string, you cannot use the uppercase convention to mark an automatic
+%% metavariable. Instead, if the integer (without the `909'-prefix and
+%% lift/glob markers) ends in a `9', the integer will become an Erlang-level
+%% variable prefixed with `Q', and if it ends with `99' it will also be
+%% automatically abstracted. For example, the following will increment the
+%% arity of the exported function f:
+%% ```case Form of
+%% ?Q("-export([f/90919]).") ->
+%% Q2 = erl_syntax:concrete(Q1) + 1,
+%% ?Q("-export([f/909299]).");
+%% ...'''
+%%
+%% == When to use the various forms of metavariables ==
+%%
+%% Merl can only parse a fragment of text if it follows the basic syntactical
+%% rules of Erlang. In most places, a normal Erlang variable can be used as
+%% metavariable, for example:
+%% ```?Q("f(_@Arg)") = Expr'''
+%% but if you want to match on something like the name of a function, you have
+%% to use an atom as metavariable:
+%% ```?Q("'@Name'() -> _@@@@_." = Function'''
+%% (note the anonymous glob variable `_@@@@_' to ignore the function body).
+%%
+%% In some contexts, only a string or an integer is allowed. For example, the
+%% directive `-file(Name, Line)' requires that `Name' is a string literal and
+%% `Line' an integer literal:
+%%
+%% ```?Q("-file(\"'@File\", 9090).") = ?Q("-file(\"foo.erl\", 42).")).'''
+%% This will extract the string literal `"foo.erl"' into the variable `Foo'.
+%% Note the use of the anonymous variable `9090' to ignore the line number. To
+%% match and also bind a metavariable that must be an integer literal, we can
+%% use the convention of ending the integer with a 9, turning it into a
+%% Q-prefixed variable on the Erlang level (see the previous section).
+%%
+%% === Globs ===
+%%
+%% Whenever you want to match out a number of elements in a sequence (zero or
+%% more) rather than a fixed set of elements, you need to use a glob. For
+%% example:
+%% ```?Q("{_@@@@Elements}") = ?Q({a, b, c})'''
+%% will bind Elements to the list of individual syntax trees representing the
+%% atoms `a', `b', and `c'. This can also be used with static prefix and suffix
+%% elements in the sequence. For example:
+%% ```?Q("{a, b, _@@@@Elements}") = ?Q({a, b, c, d})'''
+%% will bind Elements to the list of the `c' and `d' subtrees, and
+%% ```?Q("{_@@@@Elements, c, d}") = ?Q({a, b, c, d})'''
+%% will bind Elements to the list of the `a' and `b' subtrees. You can even use
+%% plain metavariables in the prefix or suffix:
+%% ```?Q("{_@First, _@@@@Rest}") = ?Q({a, b, c})'''
+%% or
+%% ```?Q("{_@@@@_, _@Last}") = ?Q({a, b, c})'''
+%% (ignoring all but the last element). You cannot however have two globs as
+%% part of the same sequence.
+%%
+%% === Lifted metavariables ===
+%%
+%% In some cases, the Erlang syntax rules make it impossible to place a
+%% metavariable directly where you would like it. For example, you cannot
+%% write:
+%% ```?Q("-export([_@@@@Name]).")'''
+%% to match out all name/arity pairs in the export list, or to insert a list of
+%% exports in a declaration, because the Erlang parser only allows elements on
+%% the form `A/I' (where `A' is an atom and `I' an integer) in the export list.
+%% A variable like the above is not allowed, but neither is a single atom or
+%% integer, so `` '@@@@Name' '' or `909919' wouldn't work either.
+%%
+%% What you have to do in such cases is to write your metavariable in a
+%% syntactically valid position, and use lifting markers to denote where it
+%% should really apply, as in:
+%% ```?Q("-export(['@@_@@Name'/0]).")'''
+%% This causes the variable to be lifted (after parsing) to the next higher
+%% level in the syntax tree, replacing that entire subtree. In this case, the
+%% `` '@@_@@Name'/0 '' will be replaced with `` '@@@@Name' '', and the ``/0''
+%% part was just used as dummy notation and will be discarded.
+%%
+%% You may even need to apply lifting more than once. To match the entire
+%% export list as a single syntax tree, you can write:
+%% ```?Q("-export(['@@__Name'/0]).")'''
+%% using two underscores, but with no glob marker this time. This will make the
+%% entire ``['@@__Name'/0]'' part be replaced with `` '@@Name' ''.
+%%
+%% Sometimes, the tree structure of a code fragment isn't very obvious, and
+%% parts of the structure may be invisible when printed as source code. For
+%% instance, a simple function definition like the following:
+%% ```zero() -> 0.'''
+%% consists of the name (the atom `zero'), and a list of clauses containing the
+%% single clause `() -> 0'. The clause consists of an argument list (empty), a
+%% guard (empty), and a body (which is always a list of expressions) containing
+%% the single expression `0'. This means that to match out the name and the
+%% list of clauses of any function, you'll need to use a pattern like
+%% ``?Q("'@Name'() -> _@_@Body.")'', using a dummy clause whose body is a glob
+%% lifted one level.
+%%
+%% To visualize the structure of a syntax tree, you can use the function
+%% `merl:show(T)', which prints a summary. For example, entering
+%% ```merl:show(merl:quote("inc(X, Y) when Y > 0 -> X + Y."))'''
+%% in the Erlang shell will print the following (where the `+' signs separate
+%% groups of subtrees on the same level):
+%% ```function: inc(X, Y) when ... -> X + Y.
+%% atom: inc
+%% +
+%% clause: (X, Y) when ... -> X + Y
+%% variable: X
+%% variable: Y
+%% +
+%% disjunction: Y > 0
+%% conjunction: Y > 0
+%% infix_expr: Y > 0
+%% variable: Y
+%% +
+%% operator: >
+%% +
+%% integer: 0
+%% +
+%% infix_expr: X + Y
+%% variable: X
+%% +
+%% operator: +
+%% +
+%% variable: Y'''
+%%
+%% This shows another important non-obvious case: a clause guard, even if it's
+%% as simple as `Y > 0', always consists of a single disjunction of one or more
+%% conjunctions of tests, much like a tuple of tuples. Thus:
+%% <ul>
+%% <li>``"when _@Guard ->"'' will only match a guard with exactly one
+%% test</li>
+%% <li>``"when _@@@@Guard ->"'' will match a guard with one or more
+%% comma-separated tests (but no semicolons), binding `Guard' to the list
+%% of tests</li>
+%% <li>``"when _@_Guard ->"'' will match just like the previous pattern, but
+%% binds `Guard' to the conjunction subtree</li>
+%% <li>``"when _@_@Guard ->"'' will match an arbitrary nonempty guard,
+%% binding `Guard' to the list of conjunction subtrees</li>
+%% <li>``"when _@__Guard ->"'' will match like the previous pattern, but
+%% binds `Guard' to the whole disjunction subtree</li>
+%% <li>and finally, ``"when _@__@Guard ->"'' will match any clause,
+%% binding `Guard' to `[]' if the guard is empty and to `[Disjunction]'
+%% otherwise</li>
+%% </ul>
+%%
+%% Thus, the following pattern matches all possible clauses:
+%% ```"(_@@Args) when _@__@Guard -> _@@Body"'''
+%% @end
+
+-module(merl).
+
+-export([term/1, var/1, print/1, show/1]).
+
+-export([quote/1, quote/2, qquote/2, qquote/3]).
+
+-export([template/1, tree/1, subst/2, tsubst/2, alpha/2, match/2, switch/2]).
+
+-export([template_vars/1, meta_template/1]).
+
+-export([compile/1, compile/2, compile_and_load/1, compile_and_load/2]).
+
+%% NOTE: this module may not include merl.hrl!
+
+-type tree() :: erl_syntax:syntaxTree().
+
+-type tree_or_trees() :: tree() | [tree()].
+
+-type pattern() :: tree() | template().
+
+-type pattern_or_patterns() :: pattern() | [pattern()].
+
+-type env() :: [{Key::id(), pattern_or_patterns()}].
+
+-type id() :: atom() | integer().
+
+%% A list of strings or binaries is assumed to represent individual lines,
+%% while a flat string or binary represents source code containing newlines.
+-type text() :: string() | binary() | [string()] | [binary()].
+
+-type location() :: erl_anno:location().
+
+
+%% ------------------------------------------------------------------------
+%% Compiling and loading code directly to memory
+
+%% @equiv compile(Code, [])
+compile(Code) ->
+ compile(Code, []).
+
+%% @doc Compile a syntax tree or list of syntax trees representing a module
+%% into a binary BEAM object.
+%% @see compile_and_load/2
+%% @see compile/1
+compile(Code, Options) when not is_list(Code)->
+ case type(Code) of
+ form_list -> compile(erl_syntax:form_list_elements(Code));
+ _ -> compile([Code], Options)
+ end;
+compile(Code, Options0) when is_list(Options0) ->
+ Forms = [erl_syntax:revert(F) || F <- Code],
+ Options = [verbose, report_errors, report_warnings, binary | Options0],
+ compile:noenv_forms(Forms, Options).
+
+
+%% @equiv compile_and_load(Code, [])
+compile_and_load(Code) ->
+ compile_and_load(Code, []).
+
+%% @doc Compile a syntax tree or list of syntax trees representing a module
+%% and load the resulting module into memory.
+%% @see compile/2
+%% @see compile_and_load/1
+compile_and_load(Code, Options) ->
+ case compile(Code, Options) of
+ {ok, ModuleName, Binary} ->
+ _ = code:load_binary(ModuleName, "", Binary),
+ {ok, Binary};
+ Other -> Other
+ end.
+
+
+%% ------------------------------------------------------------------------
+%% Utility functions
+
+
+-spec var(atom()) -> tree().
+
+%% @doc Create a variable.
+
+var(Name) ->
+ erl_syntax:variable(Name).
+
+
+-spec term(term()) -> tree().
+
+%% @doc Create a syntax tree for a constant term.
+
+term(Term) ->
+ erl_syntax:abstract(Term).
+
+
+%% @doc Pretty-print a syntax tree or template to the standard output. This
+%% is a utility function for development and debugging.
+
+print(Ts) when is_list(Ts) ->
+ lists:foreach(fun print/1, Ts);
+print(T) ->
+ io:put_chars(erl_prettypr:format(tree(T))),
+ io:nl().
+
+%% @doc Print the structure of a syntax tree or template to the standard
+%% output. This is a utility function for development and debugging.
+
+show(Ts) when is_list(Ts) ->
+ lists:foreach(fun show/1, Ts);
+show(T) ->
+ io:put_chars(pp(tree(T), 0)),
+ io:nl().
+
+pp(T, I) ->
+ [lists:duplicate(I, $\s),
+ limit(lists:flatten([atom_to_list(type(T)), ": ",
+ erl_prettypr:format(erl_syntax_lib:limit(T,3))]),
+ 79-I),
+ $\n,
+ pp_1(lists:filter(fun (X) -> X =/= [] end, subtrees(T)), I+2)
+ ].
+
+pp_1([G], I) ->
+ pp_2(G, I);
+pp_1([G | Gs], I) ->
+ [pp_2(G, I), lists:duplicate(I, $\s), "+\n" | pp_1(Gs, I)];
+pp_1([], _I) ->
+ [].
+
+pp_2(G, I) ->
+ [pp(E, I) || E <- G].
+
+%% limit string to N characters, stay on a single line and compact whitespace
+limit([$\n | Cs], N) -> limit([$\s | Cs], N);
+limit([$\r | Cs], N) -> limit([$\s | Cs], N);
+limit([$\v | Cs], N) -> limit([$\s | Cs], N);
+limit([$\t | Cs], N) -> limit([$\s | Cs], N);
+limit([$\s, $\s | Cs], N) -> limit([$\s | Cs], N);
+limit([C | Cs], N) when C < 32 -> limit(Cs, N);
+limit([C | Cs], N) when N > 3 -> [C | limit(Cs, N-1)];
+limit([_C1, _C2, _C3, _C4 | _Cs], 3) -> "...";
+limit(Cs, 3) -> Cs;
+limit([_C1, _C2, _C3 | _], 2) -> "..";
+limit(Cs, 2) -> Cs;
+limit([_C1, _C2 | _], 1) -> ".";
+limit(Cs, 1) -> Cs;
+limit(_, _) -> [].
+
+%% ------------------------------------------------------------------------
+%% Parsing and instantiating code fragments
+
+
+-spec qquote(Text::text(), Env::env()) -> tree_or_trees().
+
+%% @doc Parse text and substitute meta-variables.
+%%
+%% @equiv qquote(1, Text, Env)
+
+qquote(Text, Env) ->
+ qquote(1, Text, Env).
+
+
+-spec qquote(StartPos::location(), Text::text(), Env::env()) -> tree_or_trees().
+
+%% @doc Parse text and substitute meta-variables. Takes an initial scanner
+%% starting position as first argument.
+%%
+%% The macro `?Q(Text, Env)' expands to `merl:qquote(?LINE, Text, Env)'.
+%%
+%% @see quote/2
+
+qquote(StartPos, Text, Env) ->
+ subst(quote(StartPos, Text), Env).
+
+
+-spec quote(Text::text()) -> tree_or_trees().
+
+%% @doc Parse text.
+%%
+%% @equiv quote(1, Text)
+
+quote(Text) ->
+ quote(1, Text).
+
+
+-spec quote(StartPos::location(), Text::text()) -> tree_or_trees().
+
+%% @doc Parse text. Takes an initial scanner starting position as first
+%% argument.
+%%
+%% The macro `?Q(Text)' expands to `merl:quote(?LINE, Text, Env)'.
+%%
+%% @see quote/1
+
+quote({Line, Col}, Text)
+ when is_integer(Line), is_integer(Col) ->
+ quote_1(Line, Col, Text);
+quote(StartPos, Text) when is_integer(StartPos) ->
+ quote_1(StartPos, undefined, Text).
+
+quote_1(StartLine, StartCol, Text) ->
+ %% be backwards compatible as far as R12, ignoring any starting column
+ StartPos = case erlang:system_info(version) of
+ "5.6" ++ _ -> StartLine;
+ "5.7" ++ _ -> StartLine;
+ "5.8" ++ _ -> StartLine;
+ _ when StartCol =:= undefined -> StartLine;
+ _ -> {StartLine, StartCol}
+ end,
+ FlatText = flatten_text(Text),
+ {ok, Ts, _} = erl_scan:string(FlatText, StartPos),
+ merge_comments(StartLine, erl_comment_scan:string(FlatText), parse_1(Ts)).
+
+parse_1(Ts) ->
+ %% if dot tokens are present, it is assumed that the text represents
+ %% complete forms, not dot-terminated expressions or similar
+ case split_forms(Ts) of
+ {ok, Fs} -> parse_forms(Fs);
+ error ->
+ parse_2(Ts)
+ end.
+
+split_forms(Ts) ->
+ split_forms(Ts, [], []).
+
+split_forms([{dot,_}=T|Ts], Fs, As) ->
+ split_forms(Ts, [lists:reverse(As, [T]) | Fs], []);
+split_forms([T|Ts], Fs, As) ->
+ split_forms(Ts, Fs, [T|As]);
+split_forms([], Fs, []) ->
+ {ok, lists:reverse(Fs)};
+split_forms([], [], _) ->
+ error; % no dot tokens found - not representing form(s)
+split_forms([], _, [T|_]) ->
+ fail("incomplete form after ~p", [T]).
+
+parse_forms([Ts | Tss]) ->
+ case erl_parse:parse_form(Ts) of
+ {ok, Form} -> [Form | parse_forms(Tss)];
+ {error, R} -> parse_error(R)
+ end;
+parse_forms([]) ->
+ [].
+
+parse_2(Ts) ->
+ %% one or more comma-separated expressions?
+ %% (recall that Ts has no dot tokens if we get to this stage)
+ case erl_parse:parse_exprs(Ts ++ [{dot,0}]) of
+ {ok, Exprs} -> Exprs;
+ {error, E} ->
+ parse_3(Ts ++ [{'end',0}, {dot,0}], [E])
+ end.
+
+parse_3(Ts, Es) ->
+ %% try-clause or clauses?
+ case erl_parse:parse_exprs([{'try',0}, {atom,0,true}, {'catch',0} | Ts]) of
+ {ok, [{'try',_,_,_,_,_}=X]} ->
+ %% get the right kind of qualifiers in the clause patterns
+ erl_syntax:try_expr_handlers(X);
+ {error, E} ->
+ parse_4(Ts, [E|Es])
+ end.
+
+parse_4(Ts, Es) ->
+ %% fun-clause or clauses? (`(a)' is also a pattern, but `(a,b)' isn't,
+ %% so fun-clauses must be tried before normal case-clauses
+ case erl_parse:parse_exprs([{'fun',0} | Ts]) of
+ {ok, [{'fun',_,{clauses,Cs}}]} -> Cs;
+ {error, E} ->
+ parse_5(Ts, [E|Es])
+ end.
+
+parse_5(Ts, Es) ->
+ %% case-clause or clauses?
+ case erl_parse:parse_exprs([{'case',0}, {atom,0,true}, {'of',0} | Ts]) of
+ {ok, [{'case',_,_,Cs}]} -> Cs;
+ {error, E} ->
+ %% select the best error to report
+ parse_error(lists:last(lists:sort([E|Es])))
+ end.
+
+-dialyzer({nowarn_function, parse_error/1}). % no local return
+
+parse_error({L, M, R}) when is_atom(M), is_integer(L) ->
+ fail("~w: ~s", [L, M:format_error(R)]);
+parse_error({{L,C}, M, R}) when is_atom(M), is_integer(L), is_integer(C) ->
+ fail("~w:~w: ~s", [L,C,M:format_error(R)]);
+parse_error({_, M, R}) when is_atom(M) ->
+ fail(M:format_error(R));
+parse_error(R) ->
+ fail("unknown parse error: ~p", [R]).
+
+%% ------------------------------------------------------------------------
+%% Templates, substitution and matching
+
+%% Leaves are normal syntax trees, and inner nodes are tuples
+%% {template,Type,Attrs,Groups} where Groups are lists of lists of nodes.
+%% Metavariables are 1-tuples {VarName}, where VarName is an atom or an
+%% integer. {'_'} and {0} work as anonymous variables in matching. Glob
+%% metavariables are tuples {'*',VarName}, and {'*','_'} and {'*',0} are
+%% anonymous globs.
+
+%% Note that although template() :: tree() | ..., it is implied that these
+%% syntax trees are free from metavariables, so pattern() :: tree() |
+%% template() is in fact a wider type than template().
+
+-type template() :: tree()
+ | {id()}
+ | {'*',id()}
+ | {template, atom(), term(), [[template()]]}.
+
+-type template_or_templates() :: template() | [template()].
+
+-spec template(pattern_or_patterns()) -> template_or_templates().
+
+%% @doc Turn a syntax tree or list of trees into a template or templates.
+%% Templates can be instantiated or matched against, and reverted back to
+%% normal syntax trees using {@link tree/1}. If the input is already a
+%% template, it is not modified further.
+%%
+%% @see subst/2
+%% @see match/2
+%% @see tree/1
+
+template(Trees) when is_list(Trees) ->
+ [template_0(T) || T <- Trees];
+template(Tree) ->
+ template_0(Tree).
+
+template_0({template, _, _, _}=Template) -> Template;
+template_0({'*',_}=Template) -> Template;
+template_0({_}=Template) -> Template;
+template_0(Tree) ->
+ case template_1(Tree) of
+ false -> Tree;
+ {Name} when is_list(Name) ->
+ fail("bad metavariable: '~s'", [tl(Name)]); % drop v/n from name
+ Template -> Template
+ end.
+
+%% returns either a template or a lifted metavariable {String}, or 'false'
+%% if Tree contained no metavariables
+template_1(Tree) ->
+ case subtrees(Tree) of
+ [] ->
+ case metavar(Tree) of
+ {"v_"++Cs}=V when Cs =/= [] -> V; % to be lifted
+ {"n0"++Cs}=V when Cs =/= [] -> V; % to be lifted
+ {"v@"++Cs} when Cs =/= [] -> {'*',list_to_atom(Cs)};
+ {"n9"++Cs} when Cs =/= [] -> {'*',list_to_integer(Cs)};
+ {"v"++Cs} -> {list_to_atom(Cs)};
+ {"n"++Cs} -> {list_to_integer(Cs)};
+ false -> false
+ end;
+ Gs ->
+ case template_2(Gs, [], false) of
+ Gs1 when is_list(Gs1) ->
+ {template, type(Tree), erl_syntax:get_attrs(Tree), Gs1};
+ Other ->
+ Other
+ end
+ end.
+
+template_2([G | Gs], As, Bool) ->
+ case template_3(G, [], false) of
+ {"v_"++Cs}=V when Cs =/= [] -> V; % lift further
+ {"n0"++Cs}=V when Cs =/= [] -> V; % lift further
+ {"v@"++Cs} when Cs =/= [] -> {'*',list_to_atom(Cs)}; % stop
+ {"n9"++Cs} when Cs =/= [] -> {'*',list_to_integer(Cs)}; % stop
+ {"v"++Cs} when is_list(Cs) -> {list_to_atom(Cs)}; % stop
+ {"n"++Cs} when is_list(Cs) -> {list_to_integer(Cs)}; % stop
+ false -> template_2(Gs, [G | As], Bool);
+ G1 -> template_2(Gs, [G1 | As], true)
+ end;
+template_2([], _As, false) -> false;
+template_2([], As, true) -> lists:reverse(As).
+
+template_3([T | Ts], As, Bool) ->
+ case template_1(T) of
+ {"v_"++Cs} when Cs =/= [] -> {"v"++Cs}; % lift
+ {"n0"++Cs} when Cs =/= [] -> {"n"++Cs}; % lift
+ false -> template_3(Ts, [T | As], Bool);
+ T1 -> template_3(Ts, [T1 | As], true)
+ end;
+template_3([], _As, false) -> false;
+template_3([], As, true) -> lists:reverse(As).
+
+
+%% @doc Turn a template into a syntax tree representing the template.
+%% Meta-variables in the template are turned into normal Erlang variables if
+%% their names (after the metavariable prefix characters) begin with an
+%% uppercase character. E.g., `_@Foo' in the template becomes the variable
+%% `Foo' in the meta-template. Furthermore, variables ending with `@' are
+%% automatically wrapped in a call to merl:term/1, so e.g. `_@Foo@ in the
+%% template becomes `merl:term(Foo)' in the meta-template.
+
+-spec meta_template(template_or_templates()) -> tree_or_trees().
+
+meta_template(Templates) when is_list(Templates) ->
+ [meta_template_1(T) || T <- Templates];
+meta_template(Template) ->
+ meta_template_1(Template).
+
+meta_template_1({template, Type, Attrs, Groups}) ->
+ erl_syntax:tuple(
+ [erl_syntax:atom(template),
+ erl_syntax:atom(Type),
+ erl_syntax:abstract(Attrs),
+ erl_syntax:list([erl_syntax:list([meta_template_1(T) || T <- G])
+ || G <- Groups])]);
+meta_template_1({Var}=V) ->
+ meta_template_2(Var, V);
+meta_template_1({'*',Var}=V) ->
+ meta_template_2(Var, V);
+meta_template_1(Leaf) ->
+ erl_syntax:abstract(Leaf).
+
+meta_template_2(Var, V) when is_atom(Var) ->
+ case atom_to_list(Var) of
+ [C|_]=Name when C >= $A, C =< $Z ; C >= $À, C =< $Þ, C /= $× ->
+ case lists:reverse(Name) of
+ "@"++([_|_]=RevRealName) -> % don't allow empty RealName
+ RealName = lists:reverse(RevRealName),
+ erl_syntax:application(erl_syntax:atom(merl),
+ erl_syntax:atom(term),
+ [erl_syntax:variable(RealName)]);
+ _ ->
+ %% plain automatic metavariable
+ erl_syntax:variable(Name)
+ end;
+ _ ->
+ erl_syntax:abstract(V)
+ end;
+meta_template_2(Var, V) when is_integer(Var) ->
+ if Var > 9, (Var rem 10) =:= 9 ->
+ %% at least 2 digits, ends in 9: make it a Q-variable
+ if Var > 99, (Var rem 100) =:= 99 ->
+ %% at least 3 digits, ends in 99: wrap in merl:term/1
+ Name = "Q" ++ integer_to_list(Var div 100),
+ erl_syntax:application(erl_syntax:atom(merl),
+ erl_syntax:atom(term),
+ [erl_syntax:variable(Name)]);
+ true ->
+ %% plain automatic Q-variable
+ Name = integer_to_list(Var div 10),
+ erl_syntax:variable("Q" ++ Name)
+ end;
+ true ->
+ erl_syntax:abstract(V)
+ end.
+
+
+
+-spec template_vars(template_or_templates()) -> [id()].
+
+%% @doc Return an ordered list of the metavariables in the template.
+
+template_vars(Template) ->
+ template_vars(Template, []).
+
+template_vars(Templates, Vars) when is_list(Templates) ->
+ lists:foldl(fun template_vars_1/2, Vars, Templates);
+template_vars(Template, Vars) ->
+ template_vars_1(Template, Vars).
+
+template_vars_1({template, _, _, Groups}, Vars) ->
+ lists:foldl(fun (G, V) -> lists:foldl(fun template_vars_1/2, V, G) end,
+ Vars, Groups);
+template_vars_1({Var}, Vars) ->
+ ordsets:add_element(Var, Vars);
+template_vars_1({'*',Var}, Vars) ->
+ ordsets:add_element(Var, Vars);
+template_vars_1(_, Vars) ->
+ Vars.
+
+
+-spec tree(template_or_templates()) -> tree_or_trees().
+
+%% @doc Revert a template to a normal syntax tree. Any remaining
+%% metavariables are turned into `@'-prefixed atoms or `909'-prefixed
+%% integers.
+%% @see template/1
+
+tree(Templates) when is_list(Templates) ->
+ [tree_1(T) || T <- Templates];
+tree(Template) ->
+ tree_1(Template).
+
+tree_1({template, Type, Attrs, Groups}) ->
+ %% flattening here is needed for templates created via source transforms
+ Gs = [lists:flatten([tree_1(T) || T <- G]) || G <- Groups],
+ erl_syntax:set_attrs(make_tree(Type, Gs), Attrs);
+tree_1({Var}) when is_atom(Var) ->
+ erl_syntax:atom(list_to_atom("@"++atom_to_list(Var)));
+tree_1({Var}) when is_integer(Var) ->
+ erl_syntax:integer(list_to_integer("909"++integer_to_list(Var)));
+tree_1({'*',Var}) when is_atom(Var) ->
+ erl_syntax:atom(list_to_atom("@@"++atom_to_list(Var)));
+tree_1({'*',Var}) when is_integer(Var) ->
+ erl_syntax:integer(list_to_integer("9099"++integer_to_list(Var)));
+tree_1(Leaf) ->
+ Leaf. % any syntax tree, not necessarily atomic (due to substitutions)
+
+
+-spec subst(pattern_or_patterns(), env()) -> tree_or_trees().
+
+%% @doc Substitute metavariables in a pattern or list of patterns, yielding
+%% a syntax tree or list of trees as result. Both for normal metavariables
+%% and glob metavariables, the substituted value may be a single element or
+%% a list of elements. For example, if a list representing `1, 2, 3' is
+%% substituted for `var' in either of `[foo, _@var, bar]' or `[foo, _@@var,
+%% bar]', the result represents `[foo, 1, 2, 3, bar]'.
+
+subst(Trees, Env) when is_list(Trees) ->
+ [subst_0(T, Env) || T <- Trees];
+subst(Tree, Env) ->
+ subst_0(Tree, Env).
+
+subst_0(Tree, Env) ->
+ tree_1(subst_1(template(Tree), Env)).
+
+
+-spec tsubst(pattern_or_patterns(), env()) -> template_or_templates().
+
+%% @doc Like subst/2, but does not convert the result from a template back
+%% to a tree. Useful if you want to do multiple separate substitutions.
+%% @see subst/2
+%% @see tree/1
+
+tsubst(Trees, Env) when is_list(Trees) ->
+ [subst_1(template(T), Env) || T <- Trees];
+tsubst(Tree, Env) ->
+ subst_1(template(Tree), Env).
+
+subst_1({template, Type, Attrs, Groups}, Env) ->
+ Gs1 = [lists:flatten([subst_1(T, Env) || T <- G]) || G <- Groups],
+ {template, Type, Attrs, Gs1};
+subst_1({Var}=V, Env) ->
+ case lists:keyfind(Var, 1, Env) of
+ {Var, TreeOrTrees} -> TreeOrTrees;
+ false -> V
+ end;
+subst_1({'*',Var}=V, Env) ->
+ case lists:keyfind(Var, 1, Env) of
+ {Var, TreeOrTrees} -> TreeOrTrees;
+ false -> V
+ end;
+subst_1(Leaf, _Env) ->
+ Leaf.
+
+
+-spec alpha(pattern_or_patterns(), [{id(), id()}]) -> template_or_templates().
+
+%% @doc Alpha converts a pattern (renames variables). Similar to tsubst/1,
+%% but only renames variables (including globs).
+%% @see tsubst/2
+
+alpha(Trees, Env) when is_list(Trees) ->
+ [alpha_1(template(T), Env) || T <- Trees];
+alpha(Tree, Env) ->
+ alpha_1(template(Tree), Env).
+
+alpha_1({template, Type, Attrs, Groups}, Env) ->
+ Gs1 = [lists:flatten([alpha_1(T, Env) || T <- G]) || G <- Groups],
+ {template, Type, Attrs, Gs1};
+alpha_1({Var}=V, Env) ->
+ case lists:keyfind(Var, 1, Env) of
+ {Var, NewVar} -> {NewVar};
+ false -> V
+ end;
+alpha_1({'*',Var}=V, Env) ->
+ case lists:keyfind(Var, 1, Env) of
+ {Var, NewVar} -> {'*',NewVar};
+ false -> V
+ end;
+alpha_1(Leaf, _Env) ->
+ Leaf.
+
+
+-spec match(pattern_or_patterns(), tree_or_trees()) ->
+ {ok, env()} | error.
+
+%% @doc Match a pattern against a syntax tree (or patterns against syntax
+%% trees) returning an environment mapping variable names to subtrees; the
+%% environment is always sorted on keys. Note that multiple occurrences of
+%% metavariables in the pattern is not allowed, but is not checked.
+%%
+%% @see template/1
+%% @see switch/2
+
+match(Patterns, Trees) when is_list(Patterns), is_list(Trees) ->
+ try {ok, match_1(Patterns, Trees, [])}
+ catch
+ error -> error
+ end;
+match(Patterns, Tree) when is_list(Patterns) -> match(Patterns, [Tree]);
+match(Pattern, Trees) when is_list(Trees) -> match([Pattern], Trees);
+match(Pattern, Tree) ->
+ try {ok, match_template(template(Pattern), Tree, [])}
+ catch
+ error -> error
+ end.
+
+match_1([P|Ps], [T | Ts], Dict) ->
+ match_1(Ps, Ts, match_template(template(P), T, Dict));
+match_1([], [], Dict) ->
+ Dict;
+match_1(_, _, _Dict) ->
+ erlang:error(merl_match_arity).
+
+%% match a template against a syntax tree
+match_template({template, Type, _, Gs}, Tree, Dict) ->
+ case type(Tree) of
+ Type -> match_template_1(Gs, subtrees(Tree), Dict);
+ _ -> throw(error) % type mismatch
+ end;
+match_template({Var}, _Tree, Dict)
+ when Var =:= '_' ; Var =:= 0 ->
+ Dict; % anonymous variable
+match_template({Var}, Tree, Dict) ->
+ orddict:store(Var, Tree, Dict);
+match_template(Tree1, Tree2, Dict) ->
+ %% if Tree1 is not a template, Tree1 and Tree2 are both syntax trees
+ case compare_trees(Tree1, Tree2) of
+ true -> Dict;
+ false -> throw(error) % different trees
+ end.
+
+match_template_1([G1 | Gs1], [G2 | Gs2], Dict) ->
+ match_template_2(G1, G2, match_template_1(Gs1, Gs2, Dict));
+match_template_1([], [], Dict) ->
+ Dict;
+match_template_1(_, _, _Dict) ->
+ throw(error). % shape mismatch
+
+match_template_2([{Var} | Ts1], [_ | Ts2], Dict)
+ when Var =:= '_' ; Var =:= 0 ->
+ match_template_2(Ts1, Ts2, Dict); % anonymous variable
+match_template_2([{Var} | Ts1], [Tree | Ts2], Dict) ->
+ match_template_2(Ts1, Ts2, orddict:store(Var, Tree, Dict));
+match_template_2([{'*',Var} | Ts1], Ts2, Dict) ->
+ match_glob(lists:reverse(Ts1), lists:reverse(Ts2), Var, Dict);
+match_template_2([T1 | Ts1], [T2 | Ts2], Dict) ->
+ match_template_2(Ts1, Ts2, match_template(T1, T2, Dict));
+match_template_2([], [], Dict) ->
+ Dict;
+match_template_2(_, _, _Dict) ->
+ throw(error). % shape mismatch
+
+%% match the tails in reverse order; no further globs allowed
+match_glob([{'*',Var} | _], _, _, _) ->
+ fail("multiple glob variables in same match group: ~w", [Var]);
+match_glob([T1 | Ts1], [T2 | Ts2], Var, Dict) ->
+ match_glob(Ts1, Ts2, Var, match_template(T1, T2, Dict));
+match_glob([], _Group, Var, Dict) when Var =:= '_' ; Var =:= 0 ->
+ Dict; % anonymous glob variable
+match_glob([], Group, Var, Dict) ->
+ orddict:store(Var, lists:reverse(Group), Dict);
+match_glob(_, _, _, _Dict) ->
+ throw(error). % shape mismatch
+
+
+%% compare two syntax trees for equivalence
+compare_trees(T1, T2) ->
+ Type1 = type(T1),
+ case type(T2) of
+ Type1 ->
+ case subtrees(T1) of
+ [] ->
+ case subtrees(T2) of
+ [] -> compare_leaves(Type1, T1, T2);
+ _Gs2 -> false % shape mismatch
+ end;
+ Gs1 ->
+ case subtrees(T2) of
+ [] -> false; % shape mismatch
+ Gs2 -> compare_trees_1(Gs1, Gs2)
+ end
+ end;
+ _Type2 ->
+ false % different tree types
+ end.
+
+compare_trees_1([G1 | Gs1], [G2 | Gs2]) ->
+ compare_trees_2(G1, G2) andalso compare_trees_1(Gs1, Gs2);
+compare_trees_1([], []) ->
+ true;
+compare_trees_1(_, _) ->
+ false. % shape mismatch
+
+compare_trees_2([T1 | Ts1], [T2 | Ts2]) ->
+ compare_trees(T1, T2) andalso compare_trees_2(Ts1, Ts2);
+compare_trees_2([], []) ->
+ true;
+compare_trees_2(_, _) ->
+ false. % shape mismatch
+
+compare_leaves(Type, T1, T2) ->
+ case Type of
+ atom ->
+ erl_syntax:atom_value(T1)
+ =:= erl_syntax:atom_value(T2);
+ char ->
+ erl_syntax:char_value(T1)
+ =:= erl_syntax:char_value(T2);
+ float ->
+ erl_syntax:float_value(T1)
+ =:= erl_syntax:float_value(T2);
+ integer ->
+ erl_syntax:integer_value(T1)
+ =:= erl_syntax:integer_value(T2);
+ string ->
+ erl_syntax:string_value(T1)
+ =:= erl_syntax:string_value(T2);
+ operator ->
+ erl_syntax:operator_name(T1)
+ =:= erl_syntax:operator_name(T2);
+ text ->
+ erl_syntax:text_string(T1)
+ =:= erl_syntax:text_string(T2);
+ variable ->
+ erl_syntax:variable_name(T1)
+ =:= erl_syntax:variable_name(T2);
+ _ ->
+ true % trivially equal nodes
+ end.
+
+
+%% @doc Match against one or more clauses with patterns and optional guards.
+%%
+%% Note that clauses following a default action will be ignored.
+%%
+%% @see match/2
+
+-type switch_clause() ::
+ {pattern_or_patterns(), guarded_actions()}
+ | {pattern_or_patterns(), guard_test(), switch_action()}
+ | default_action().
+
+-type guarded_actions() :: guarded_action() | [guarded_action()].
+
+-type guarded_action() :: switch_action() | {guard_test(), switch_action()}.
+
+-type switch_action() :: fun( (env()) -> any() ).
+
+-type guard_test() :: fun( (env()) -> boolean() ).
+
+-type default_action() :: fun( () -> any() ).
+
+
+-spec switch(tree_or_trees(), [switch_clause()]) -> any().
+
+switch(Trees, [{Patterns, GuardedActions} | Cs]) when is_list(GuardedActions) ->
+ switch_1(Trees, Patterns, GuardedActions, Cs);
+switch(Trees, [{Patterns, GuardedAction} | Cs]) ->
+ switch_1(Trees, Patterns, [GuardedAction], Cs);
+switch(Trees, [{Patterns, Guard, Action} | Cs]) ->
+ switch_1(Trees, Patterns, [{Guard, Action}], Cs);
+switch(_Trees, [Default | _Cs]) when is_function(Default, 0) ->
+ Default();
+switch(_Trees, []) ->
+ erlang:error(merl_switch_clause);
+switch(_Tree, _) ->
+ erlang:error(merl_switch_badarg).
+
+switch_1(Trees, Patterns, GuardedActions, Cs) ->
+ case match(Patterns, Trees) of
+ {ok, Env} ->
+ switch_2(Env, GuardedActions, Trees, Cs);
+ error ->
+ switch(Trees, Cs)
+ end.
+
+switch_2(Env, [{Guard, Action} | Bs], Trees, Cs)
+ when is_function(Guard, 1), is_function(Action, 1) ->
+ case Guard(Env) of
+ true -> Action(Env);
+ false -> switch_2(Env, Bs, Trees, Cs)
+ end;
+switch_2(Env, [Action | _Bs], _Trees, _Cs) when is_function(Action, 1) ->
+ Action(Env);
+switch_2(_Env, [], Trees, Cs) ->
+ switch(Trees, Cs);
+switch_2(_Env, _, _Trees, _Cs) ->
+ erlang:error(merl_switch_badarg).
+
+
+%% ------------------------------------------------------------------------
+%% Internal utility functions
+
+-dialyzer({nowarn_function, fail/1}). % no local return
+
+fail(Text) ->
+ fail(Text, []).
+
+fail(Fs, As) ->
+ throw({error, lists:flatten(io_lib:format(Fs, As))}).
+
+flatten_text([L | _]=Lines) when is_list(L) ->
+ lists:foldr(fun(S, T) -> S ++ [$\n | T] end, "", Lines);
+flatten_text([B | _]=Lines) when is_binary(B) ->
+ lists:foldr(fun(S, T) -> binary_to_list(S) ++ [$\n | T] end, "", Lines);
+flatten_text(Text) when is_binary(Text) ->
+ binary_to_list(Text);
+flatten_text(Text) ->
+ Text.
+
+-spec metavar(tree()) -> {string()} | false.
+
+%% Check if a syntax tree represents a metavariable. If not, 'false' is
+%% returned; otherwise, this returns a 1-tuple with a string containing the
+%% variable name including lift/glob prefixes but without any leading
+%% metavariable prefix, and instead prefixed with "v" for a variable or "i"
+%% for an integer.
+%%
+%% Metavariables are atoms starting with @, variables starting with _@,
+%% strings starting with "'@, or integers starting with 909. Following the
+%% prefix, one or more _ or 0 characters (unless it's the last character in
+%% the name) may be used to indicate "lifting" of the variable one or more
+%% levels , and after that, a @ or 9 character indicates a glob metavariable
+%% rather than a normal metavariable. If the name after the prefix is _ or
+%% 0, the variable is treated as an anonymous catch-all pattern in matches.
+
+metavar(Tree) ->
+ case type(Tree) of
+ atom ->
+ case erl_syntax:atom_name(Tree) of
+ "@" ++ Cs when Cs =/= [] -> {"v"++Cs};
+ _ -> false
+ end;
+ variable ->
+ case erl_syntax:variable_literal(Tree) of
+ "_@" ++ Cs when Cs =/= [] -> {"v"++Cs};
+ _ -> false
+ end;
+ integer ->
+ case erl_syntax:integer_value(Tree) of
+ N when N >= 9090 ->
+ case integer_to_list(N) of
+ "909" ++ Cs -> {"n"++Cs};
+ _ -> false
+ end;
+ _ -> false
+ end;
+ string ->
+ case erl_syntax:string_value(Tree) of
+ "'@" ++ Cs -> {"v"++Cs};
+ _ -> false
+ end;
+ _ ->
+ false
+ end.
+
+%% wrappers around erl_syntax functions to provide more uniform shape of
+%% generic subtrees (maybe this can be fixed in syntax_tools one day)
+
+type(T) ->
+ case erl_syntax:type(T) of
+ nil -> list;
+ Type -> Type
+ end.
+
+subtrees(T) ->
+ case erl_syntax:type(T) of
+ tuple ->
+ [erl_syntax:tuple_elements(T)]; %% don't treat {} as a leaf
+ nil ->
+ [[], []]; %% don't treat [] as a leaf, but as a list
+ list ->
+ case erl_syntax:list_suffix(T) of
+ none ->
+ [erl_syntax:list_prefix(T), []];
+ S ->
+ [erl_syntax:list_prefix(T), [S]]
+ end;
+ binary_field ->
+ [[erl_syntax:binary_field_body(T)],
+ erl_syntax:binary_field_types(T)];
+ clause ->
+ case erl_syntax:clause_guard(T) of
+ none ->
+ [erl_syntax:clause_patterns(T), [],
+ erl_syntax:clause_body(T)];
+ G ->
+ [erl_syntax:clause_patterns(T), [G],
+ erl_syntax:clause_body(T)]
+ end;
+ receive_expr ->
+ case erl_syntax:receive_expr_timeout(T) of
+ none ->
+ [erl_syntax:receive_expr_clauses(T), [], []];
+ E ->
+ [erl_syntax:receive_expr_clauses(T), [E],
+ erl_syntax:receive_expr_action(T)]
+ end;
+ record_expr ->
+ case erl_syntax:record_expr_argument(T) of
+ none ->
+ [[], [erl_syntax:record_expr_type(T)],
+ erl_syntax:record_expr_fields(T)];
+ V ->
+ [[V], [erl_syntax:record_expr_type(T)],
+ erl_syntax:record_expr_fields(T)]
+ end;
+ record_field ->
+ case erl_syntax:record_field_value(T) of
+ none ->
+ [[erl_syntax:record_field_name(T)], []];
+ V ->
+ [[erl_syntax:record_field_name(T)], [V]]
+ end;
+ _ ->
+ erl_syntax:subtrees(T)
+ end.
+
+make_tree(list, [P, []]) -> erl_syntax:list(P);
+make_tree(list, [P, [S]]) -> erl_syntax:list(P, S);
+make_tree(tuple, [E]) -> erl_syntax:tuple(E);
+make_tree(binary_field, [[B], Ts]) -> erl_syntax:binary_field(B, Ts);
+make_tree(clause, [P, [], B]) -> erl_syntax:clause(P, none, B);
+make_tree(clause, [P, [G], B]) -> erl_syntax:clause(P, G, B);
+make_tree(receive_expr, [C, [], _A]) -> erl_syntax:receive_expr(C);
+make_tree(receive_expr, [C, [E], A]) -> erl_syntax:receive_expr(C, E, A);
+make_tree(record_expr, [[], [T], F]) -> erl_syntax:record_expr(T, F);
+make_tree(record_expr, [[E], [T], F]) -> erl_syntax:record_expr(E, T, F);
+make_tree(record_field, [[N], []]) -> erl_syntax:record_field(N);
+make_tree(record_field, [[N], [E]]) -> erl_syntax:record_field(N, E);
+make_tree(Type, Groups) ->
+ erl_syntax:make_tree(Type, Groups).
+
+merge_comments(_StartLine, [], [T]) -> T;
+merge_comments(_StartLine, [], Ts) -> Ts;
+merge_comments(StartLine, Comments, Ts) ->
+ merge_comments(StartLine, Comments, Ts, []).
+
+merge_comments(_StartLine, [], [], [T]) -> T;
+merge_comments(_StartLine, [], [T], []) -> T;
+merge_comments(_StartLine, [], Ts, Acc) ->
+ lists:reverse(Acc, Ts);
+merge_comments(StartLine, Cs, [], Acc) ->
+ merge_comments(StartLine, [], [],
+ [erl_syntax:set_pos(
+ erl_syntax:comment(Indent, Text),
+ StartLine + Line - 1)
+ || {Line, _, Indent, Text} <- Cs] ++ Acc);
+merge_comments(StartLine, [C|Cs], [T|Ts], Acc) ->
+ {Line, _Col, Indent, Text} = C,
+ CommentLine = StartLine + Line - 1,
+ case erl_syntax:get_pos(T) of
+ Pos when Pos < CommentLine ->
+ %% TODO: traverse sub-tree rather than only the top level nodes
+ merge_comments(StartLine, [C|Cs], Ts, [T|Acc]);
+ CommentLine ->
+ Tc = erl_syntax:add_postcomments(
+ [erl_syntax:comment(Indent, Text)], T),
+ merge_comments(StartLine, Cs, [Tc|Ts], Acc);
+ _ ->
+ Tc = erl_syntax:add_precomments(
+ [erl_syntax:comment(Indent, Text)], T),
+ merge_comments(StartLine, Cs, [Tc|Ts], Acc)
+ end.
diff --git a/lib/syntax_tools/src/merl_tests.erl b/lib/syntax_tools/src/merl_tests.erl
new file mode 100644
index 0000000000..c1aae3100e
--- /dev/null
+++ b/lib/syntax_tools/src/merl_tests.erl
@@ -0,0 +1,539 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012-2015 Richard Carlsson
+%% @doc Unit tests for merl.
+%% @private
+
+-module(merl_tests).
+
+%-define(MERL_NO_TRANSFORM, true).
+-include("merl.hrl").
+
+-include_lib("eunit/include/eunit.hrl").
+
+
+%% utilities
+
+f(Ts) when is_list(Ts) ->
+ lists:flatmap(fun erl_prettypr:format/1, Ts);
+f(T) ->
+ erl_prettypr:format(T).
+
+fe(Env) -> [{Key, f(T)} || {Key, T} <- Env].
+
+g_exported_() ->
+ %% for testing the parse transform, autoexported to avoid complaints
+ {ok, merl:quote(?LINE, "42")}.
+
+
+ok({ok, X}) -> X.
+
+
+%%
+%% tests
+%%
+
+parse_error_test_() ->
+ [?_assertThrow({error, "1: syntax error before: '{'" ++ _},
+ f(merl:quote("{")))
+ ].
+
+term_test_() ->
+ [?_assertEqual(tuple, erl_syntax:type(merl:term({}))),
+ ?_assertEqual("{foo, 42}", f(merl:term({foo, 42})))
+ ].
+
+quote_form_test_() ->
+ [?_assertEqual("f(X) -> {ok, X}.",
+ f(?Q("f(X) -> {ok, X}."))),
+ ?_assertEqual("-module(foo).",
+ f(?Q("-module(foo)."))),
+ ?_assertEqual("-import(bar, [f/1, g/2]).",
+ f(?Q("-import(bar, [f/1, g/2])."))),
+ ?_assertEqual(("-module(foo)."
+ "-export([f/1])."
+ "f(X) -> {ok, X}."),
+ f(?Q(["-module(foo).",
+ "-export([f/1]).",
+ "f(X) -> {ok, X}."])))
+ ].
+
+quote_term_test_() ->
+ [?_assertEqual("foo",
+ f(?Q("foo"))),
+ ?_assertEqual("42",
+ f(?Q("42"))),
+ ?_assertEqual("{foo, 42}",
+ f(?Q("{foo, 42}"))),
+ ?_assertEqual(("1" ++ "2" ++ "3"),
+ f(?Q("1, 2, 3"))),
+ ?_assertEqual(("foo" "42" "{}" "true"),
+ f(?Q("foo, 42, {}, (true)")))
+ ].
+
+quote_expr_test_() ->
+ [?_assertEqual("2 + 2",
+ f(?Q("2 + 2"))),
+ ?_assertEqual("f(foo, 42)",
+ f(?Q("f(foo, 42)"))),
+ ?_assertEqual("case X of\n a -> 1;\n b -> 2\nend",
+ f(?Q("case X of a -> 1; b -> 2 end"))),
+ ?_assertEqual(("2 + 2" ++ "f(42)" ++ "catch 22"),
+ f(?Q("2 + 2, f(42), catch 22")))
+ ].
+
+quote_try_clause_test_() ->
+ [?_assertEqual("(error:R) when R =/= foo -> ok",
+ f(?Q("error:R when R =/= foo -> ok"))),
+ %% note that without any context, clauses are printed as fun-clauses
+ ?_assertEqual(("(error:badarg) -> badarg"
+ "(exit:normal) -> normal"
+ "(_) -> other"),
+ f(?Q(["error:badarg -> badarg;",
+ "exit:normal -> normal;"
+ "_ -> other"])))
+ ].
+
+quote_fun_clause_test_() ->
+ [?_assertEqual("(X, Y) when X < Y -> {ok, X}",
+ f(?Q("(X, Y) when X < Y -> {ok, X}"))),
+ ?_assertEqual(("(X, Y) when X < Y -> less"
+ "(X, Y) when X > Y -> greater"
+ "(_, _) -> equal"),
+ f(?Q(["(X, Y) when X < Y -> less;",
+ "(X, Y) when X > Y -> greater;"
+ "(_, _) -> equal"])))].
+
+quote_case_clause_test_() ->
+ [?_assertEqual("({X, Y}) when X < Y -> X",
+ f(?Q("{X, Y} when X < Y -> X"))),
+ ?_assertEqual(("({X, Y}) when X < Y -> -1"
+ "({X, Y}) when X > Y -> 1"
+ "(_) -> 0"),
+ f(?Q(["{X, Y} when X < Y -> -1;",
+ "{X, Y} when X > Y -> 1;"
+ "_ -> 0"])))].
+
+quote_comment_test_() ->
+ [?_assertEqual("%% comment preserved\n"
+ "{foo, 42}",
+ f(?Q(["%% comment preserved",
+ "{foo, 42}"]))),
+ ?_assertEqual("{foo, 42}"
+ "%% comment preserved\n",
+ f(?Q(["{foo, 42}",
+ "%% comment preserved"]))),
+ ?_assertEqual(" % just a comment (with indent)\n",
+ f(?Q(" % just a comment (with indent)")))
+ ].
+
+metavar_test_() ->
+ [?_assertEqual("'@foo'", f(merl:tree(merl:template(?Q("'@foo'"))))),
+ ?_assertEqual("'@foo'", f(merl:tree(merl:template(?Q("_@foo"))))),
+ ?_assertEqual("'@foo'", f(merl:tree(merl:template(?Q("\"'@foo\""))))),
+ ?_assertEqual("{'@foo'}", f(merl:tree(merl:template(?Q("{_@foo}"))))),
+ ?_assertEqual("'@foo'", f(merl:tree(merl:template(?Q("{_@_foo}"))))),
+ ?_assertEqual("909123", f(merl:tree(merl:template(?Q("{9090123}"))))),
+ ?_assertEqual("{'@foo'}",
+ f(merl:tree(merl:template(?Q("{{{_@__foo}}}"))))),
+ ?_assertEqual("{909123}",
+ f(merl:tree(merl:template(?Q("{{{90900123}}}"))))),
+ ?_assertEqual("{'@@foo'}",
+ f(merl:tree(merl:template(?Q("{{{_@__@foo}}}"))))),
+ ?_assertEqual("{9099123}",
+ f(merl:tree(merl:template(?Q("{{{909009123}}}")))))
+ ].
+
+subst_test_() ->
+ [?_assertEqual("42",
+ f(merl:subst(?Q("_@foo"), [{foo, merl:term(42)}]))),
+ ?_assertEqual("'@foo'",
+ f(merl:subst(?Q("_@foo"), []))),
+ ?_assertEqual("{42}",
+ f(merl:subst(?Q("{_@foo}"),
+ [{foo, merl:term(42)}]))),
+ ?_assertEqual("{'@foo'}",
+ f(merl:subst(?Q("{_@foo}"), []))),
+ ?_assertEqual("fun bar/0",
+ f(merl:subst(merl:template(?Q("fun '@foo'/0")),
+ [{foo, merl:term(bar)}]))),
+ ?_assertEqual("fun foo/3",
+ f(merl:subst(merl:template(?Q("fun foo/9091")),
+ [{1, merl:term(3)}]))),
+ ?_assertEqual("[42]",
+ f(merl:subst(merl:template(?Q("[_@foo]")),
+ [{foo, merl:term(42)}]))),
+ ?_assertEqual("[foo, bar]",
+ f(merl:subst(merl:template(?Q("[_@foo]")),
+ [{foo, [merl:term(foo),merl:term(bar)]}]))),
+ ?_assertEqual("{fee, fie, foe, fum}",
+ f(merl:subst(merl:template(?Q("{fee, _@foo, fum}")),
+ [{foo, [merl:term(fie),merl:term(foe)]}]))),
+ ?_assertEqual("[foo, bar]",
+ f(merl:subst(merl:template(?Q("[_@@foo]")),
+ [{foo, [merl:term(foo),merl:term(bar)]}]))),
+ ?_assertEqual("{fee, fie, foe, fum}",
+ f(merl:subst(merl:template(?Q("{fee, _@@foo, fum}")),
+ [{foo, [merl:term(fie),merl:term(foe)]}]))),
+ ?_assertEqual("['@@foo']",
+ f(merl:subst(merl:template(?Q("[_@@foo]")), []))),
+ ?_assertEqual("foo",
+ f(merl:subst(merl:template(?Q("[_@_foo]")),
+ [{foo, merl:term(foo)}]))),
+ ?_assertEqual("{'@foo'}",
+ f(merl:subst(merl:template(?Q("{[_@_foo]}")), []))),
+ ?_assertEqual("{'@@foo'}",
+ f(merl:subst(merl:template(?Q("{[_@_@foo]}")), []))),
+ ?_assertEqual("-export([foo/1, bar/2]).",
+ f(merl:subst(merl:template(?Q("-export(['@_@foo'/0]).")),
+ [{foo, [erl_syntax:arity_qualifier(
+ merl:term(foo),
+ merl:term(1)),
+ erl_syntax:arity_qualifier(
+ merl:term(bar),
+ merl:term(2))
+ ]}
+ ])))
+ ].
+
+match_test_() ->
+ [?_assertEqual({ok, []}, merl:match(?Q("foo"), ?Q("foo"))),
+ ?_assertEqual(error, merl:match(?Q("foo"), ?Q("bar"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("{foo,42}"), ?Q("{foo,42}"))),
+ ?_assertEqual(error, merl:match(?Q("{foo,42}"), ?Q("{foo,bar}"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("[foo,[42]]"), ?Q("[foo,[42]]"))),
+ ?_assertEqual(error, merl:match(?Q("[foo,[42]]"), ?Q("[foo,{42}]"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("[foo,[_@_]]"),
+ ?Q("[foo,[42]]"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("[foo,[9090]]"),
+ ?Q("[foo,[42]]"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("{_@_,[_@_,2]}"),
+ ?Q("{foo,[1,2]}"))),
+ ?_assertEqual(error, merl:match(?Q("{_@_,[_@_,2]}"),
+ ?Q("{foo,[1,3]}"))),
+ ?_assertEqual({ok,[]}, merl:match(?Q("[foo,[9090,9090]]"),
+ ?Q("[foo,[1,2]]"))),
+ ?_assertEqual(error, merl:match(?Q("[foo,[9090,9090]]"),
+ ?Q("[foo,[1,2,3]]"))),
+ ?_assertEqual([{foo,"42"}],
+ fe(ok(merl:match(?Q("_@foo"), ?Q("42"))))),
+ ?_assertEqual([{foo,"42"}],
+ fe(ok(merl:match(?Q("{_@foo}"), ?Q("{42}"))))),
+ ?_assertEqual([{1,"0"},{foo,"bar"}],
+ fe(ok(merl:match(?Q("fun '@foo'/9091"),
+ ?Q("fun bar/0"))))),
+ ?_assertEqual([{line,"17"},{text,"\"hello\""}],
+ fe(ok(merl:match(?Q("{_@line, _@text}"),
+ ?Q("{17, \"hello\"}"))))),
+ ?_assertEqual([{line,"17"},{text,"\"hello\""}],
+ fe(ok(merl:match(?Q("foo(_@line, _@text)"),
+ ?Q("foo(17, \"hello\")"))))),
+ ?_assertEqual([{foo,""}],
+ fe(ok(merl:match(?Q("f(_@@foo)"),
+ ?Q("f()"))))),
+ ?_assertEqual([{foo,"fee"}],
+ fe(ok(merl:match(?Q("f(_@@foo)"),
+ ?Q("f(fee)"))))),
+ ?_assertEqual([{foo,"feefiefum"}],
+ fe(ok(merl:match(?Q("f(_@@foo)"),
+ ?Q("f(fee, fie, fum)"))))),
+ ?_assertEqual([{foo,""}],
+ fe(ok(merl:match(?Q("[_@@foo]"),
+ ?Q("[]"))))),
+ ?_assertEqual([{foo,"fee"}],
+ fe(ok(merl:match(?Q("[_@@foo]"),
+ ?Q("[fee]"))))),
+ ?_assertEqual([{foo,"feefiefoefum"}],
+ fe(ok(merl:match(?Q("[_@@foo]"),
+ ?Q("[fee, fie, foe, fum]"))))),
+ ?_assertEqual([{foo,""}],
+ fe(ok(merl:match(?Q("{_@@foo}"),
+ ?Q("{}"))))),
+ ?_assertEqual([{foo,"fee"}],
+ fe(ok(merl:match(?Q("{_@@foo}"),
+ ?Q("{fee}"))))),
+ ?_assertEqual([{foo,"feefiefoefum"}],
+ fe(ok(merl:match(?Q("{_@@foo}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([{foo,"fie"}],
+ fe(ok(merl:match(?Q("{fee, _@@foo}"),
+ ?Q("{fee, fie}"))))),
+ ?_assertEqual([{foo,"fiefoefum"}],
+ fe(ok(merl:match(?Q("{fee, _@@foo}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([{foo,"fie"}],
+ fe(ok(merl:match(?Q("{_@@foo, foe, fum}"),
+ ?Q("{fie, foe, fum}"))))),
+ ?_assertEqual([{foo,"feefie"}],
+ fe(ok(merl:match(?Q("{_@@foo, foe, fum}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([{foo,"fie"}],
+ fe(ok(merl:match(?Q("{fee, _@@foo, fum}"),
+ ?Q("{fee, fie, fum}"))))),
+ ?_assertEqual([{foo,"fiefoe"}],
+ fe(ok(merl:match(?Q("{fee, _@@foo, fum}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([{foo,"fiefoe"},{post,"fum"},{pre,"fee"}],
+ fe(ok(merl:match(?Q("{_@pre, _@@foo, _@post}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertThrow({error, "multiple glob variables"++_},
+ fe(ok(merl:match(?Q("{_@@foo, _@@bar}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([],
+ fe(ok(merl:match(?Q("{fee, _@@_}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([],
+ fe(ok(merl:match(?Q("{_@@_, foe, fum}"),
+ ?Q("{fee, fie, foe, fum}"))))),
+ ?_assertEqual([{post,"fum"},{pre,"fee"}],
+ fe(ok(merl:match(?Q("{_@pre, _@@_, _@post}"),
+ ?Q("{fee, fie, foe, fum}")))))
+ ].
+
+switch_test_() ->
+ [?_assertEqual(42, merl:switch(?Q("foo"), [fun () -> 42 end])),
+ ?_assertEqual(17, merl:switch(?Q("foo"), [fun () -> 17 end,
+ fun () -> 42 end])),
+ ?_assertEqual(17, merl:switch(?Q("foo"), [{?Q("foo"),
+ fun ([]) -> 17 end},
+ fun () -> 42 end])),
+ ?_assertEqual(17,
+ merl:switch(?Q("foo"), [{?Q("bar"), fun ([]) -> 0 end},
+ {?Q("foo"), fun ([]) -> 17 end},
+ fun () -> 42 end])),
+ ?_assertEqual([{foo,"17"}],
+ merl:switch(?Q("{foo,17}"),
+ [{?Q("{bar, _@foo}"), fun (_) -> 0 end},
+ {?Q("{foo, _@foo}"), fun fe/1},
+ fun () -> 42 end])),
+ ?_assertEqual(17,
+ merl:switch(?Q("{foo, 17}"),
+ [{?Q("{foo, _@foo}"),
+ fun ([{foo, X}]) -> f(X) =:= "17" end,
+ fun (_) -> 17 end},
+ fun () -> 42 end])),
+ ?_assertEqual([{foo,"17"}],
+ merl:switch(?Q("{foo, 17}"),
+ [{?Q("{foo, _@foo}"),
+ fun ([{foo, X}]) -> f(X) =:= "42" end,
+ fun (_) -> 0 end},
+ {?Q("{foo, _@foo}"), fun fe/1},
+ fun () -> 42 end])),
+ ?_assertEqual(17,
+ merl:switch(?Q("{foo, 17}"),
+ [{?Q("{foo, _@foo}"),
+ [{fun ([{foo, X}]) -> f(X) =:= "17" end,
+ fun (_) -> 17 end},
+ fun (_) -> 0 end]},
+ fun () -> 42 end])),
+ ?_assertEqual([{foo,"17"}],
+ merl:switch(?Q("{foo, 17}"),
+ [{?Q("{foo, _@foo}"),
+ [{fun ([{foo, X}]) -> f(X) =:= "42" end,
+ fun (_) -> 0 end},
+ fun fe/1]},
+ fun () -> 42 end]))
+ ].
+
+-ifndef(MERL_NO_TRANSFORM).
+
+inline_meta_test_() ->
+ [?_assertEqual("{foo}",
+ f(begin
+ Foo = ?Q("foo"),
+ ?Q("{_@Foo}")
+ end)),
+ ?_assertEqual("{foo, '@bar'}",
+ f(begin
+ Foo = ?Q("foo"),
+ ?Q("{_@Foo,_@bar}")
+ end)),
+ ?_assertEqual("{foo, '@bar'}",
+ f(begin
+ Q1 = ?Q("foo"),
+ ?Q("{90919,_@bar}")
+ end))
+ ].
+
+inline_meta_autoabstract_test_() ->
+ [?_assertEqual("{foo}",
+ f(begin
+ Foo = foo,
+ ?Q("{_@Foo@}")
+ end)),
+ ?_assertEqual("{foo, '@bar@'}",
+ f(begin
+ Foo = foo,
+ ?Q("{_@Foo@,_@bar@}")
+ end)),
+ ?_assertEqual("{foo, '@bar@'}",
+ f(begin
+ Q1 = foo,
+ ?Q("{909199,_@bar@}")
+ end))
+ ].
+
+meta_match_test_() ->
+ [?_assertEqual("{[bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ ?Q("{foo, _@Bar, '@Baz'}") = Tree,
+ ?Q("{_@Bar, _@Baz}")
+ end)),
+ ?_assertEqual("{[bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ ?Q("{foo, 90919, 90929}") = Tree,
+ ?Q("{_@Q1, _@Q2}")
+ end)),
+ ?_assertError({badmatch,error},
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ ?Q("{fie, _@Bar, '@Baz'}") = Tree,
+ ?Q("{_@Bar, _@Baz}")
+ end))
+ ].
+
+meta_case_test_() ->
+ [?_assertEqual("{[bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{foo, _@Bar, '@Baz'}") -> ?Q("{_@Bar, _@Baz}")
+ end
+ end)),
+ ?_assertEqual("{foo, [bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{fie, _@Bar, '@Baz'}") -> ?Q("{_@Bar, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertError(merl_switch_clause,
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{fie, _@Bar, '@Baz'}") -> ?Q("{_@Bar, _@Baz}")
+ end
+ end)),
+ ?_assertEqual("{foo, 4}",
+ f(begin
+ Tree = ?Q("{foo, 3}"),
+ case Tree of
+ ?Q("{foo, _@N}") ->
+ N1 = erl_syntax:concrete(N) + 1,
+ ?Q("{foo, _@N1@}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("-export([f/4]).",
+ f(begin
+ Tree = ?Q("-export([f/3])."),
+ case Tree of
+ ?Q("-export([f/90919]).") ->
+ Q2 = erl_syntax:concrete(Q1) + 1,
+ ?Q("-export([f/909299]).");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{1, [bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{foo, _@Bar, '@Baz'}") ->
+ ?Q("{1, _@Bar, _@Baz}");
+ ?Q("{fie, _@Bar, '@Baz'}") ->
+ ?Q("{2, _@Bar, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{2, [bar], baz()}",
+ f(begin
+ Tree = ?Q("{fie, [bar], baz()}"),
+ case Tree of
+ ?Q("{foo, _@Bar, '@Baz'}") ->
+ ?Q("{1, _@Bar, _@Baz}");
+ ?Q("{fie, _@Bar, '@Baz'}") ->
+ ?Q("{2, _@Bar, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{2, baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, foo) ->
+ ?Q("{1, _@Baz}");
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, bar) ->
+ ?Q("{2, _@Baz}");
+ ?Q("{foo, [_@Bar], '@Baz'}") ->
+ ?Q("{3, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{2, 42}",
+ f(begin
+ Tree = ?Q("{foo, [bar], 42}"),
+ case Tree of
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, bar),
+ erl_syntax:is_integer(Baz, 17) ->
+ ?Q("{1, _@Bar}");
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, bar),
+ erl_syntax:is_integer(Baz, 42) ->
+ ?Q("{2, _@Baz}");
+ ?Q("{foo, [_@Bar], '@Baz'}") ->
+ ?Q("{3, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{2, 42}",
+ f(begin
+ Tree = ?Q("{foo, [baz], 42}"),
+ case Tree of
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, bar),
+ erl_syntax:is_integer(Baz, 17)
+ ; erl_syntax:is_atom(Bar, baz),
+ erl_syntax:is_integer(Baz, 17) ->
+ ?Q("{1, _@Bar}");
+ ?Q("{foo, [_@Bar], '@Baz'}")
+ when erl_syntax:is_atom(Bar, bar),
+ erl_syntax:is_integer(Baz, 42)
+ ; erl_syntax:is_atom(Bar, baz),
+ erl_syntax:is_integer(Baz, 42) ->
+ ?Q("{2, _@Baz}");
+ ?Q("{foo, [_@Bar], '@Baz'}") ->
+ ?Q("{3, _@Baz}");
+ _ -> Tree
+ end
+ end)),
+ ?_assertEqual("{2, foo, Bar, Baz, Bar(), Baz()}",
+ f(begin
+ Tree = ?Q("foo(Bar, Baz) -> Bar(), Baz()."),
+ case Tree of
+ ?Q("'@Func'(_@Args) -> _@Body.") ->
+ ?Q("{1, _@Func, _@Args, _@Body}");
+ ?Q("'@Func'(_@@Args) -> _@@Body.") ->
+ ?Q("{2, _@Func, _@Args, _@Body}");
+ ?Q("'@Func'(_@Args, Baz) -> _@Body1, _@Body2.") ->
+ ?Q("{3, _@Func, _@Args, _@Body1, _@Body2}")
+ end
+ end))
+ ].
+
+-endif.
diff --git a/lib/syntax_tools/src/merl_transform.erl b/lib/syntax_tools/src/merl_transform.erl
new file mode 100644
index 0000000000..66b06c8137
--- /dev/null
+++ b/lib/syntax_tools/src/merl_transform.erl
@@ -0,0 +1,262 @@
+%% ---------------------------------------------------------------------
+%% Licensed under the Apache License, Version 2.0 (the "License"); you may
+%% not use this file except in compliance with the License. You may obtain
+%% a copy of the License at <http://www.apache.org/licenses/LICENSE-2.0>
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% @author Richard Carlsson <[email protected]>
+%% @copyright 2012-2015 Richard Carlsson
+%% @doc Parse transform for merl. Enables the use of automatic metavariables
+%% and using quasi-quotes in matches and case switches. Also optimizes calls
+%% to functions in `merl' by partially evaluating them, turning strings to
+%% templates, etc., at compile-time.
+%%
+%% Using `-include_lib("syntax_tools/include/merl.hrl").' enables this
+%% transform, unless the macro `MERL_NO_TRANSFORM' is defined first.
+
+-module(merl_transform).
+
+-export([parse_transform/2]).
+
+%% NOTE: We cannot use inline metavariables or any other parse transform
+%% features in this module, because it must be possible to compile it with
+%% the parse transform disabled!
+-include("merl.hrl").
+
+%% TODO: unroll calls to switch? it will probably get messy
+
+%% TODO: use Igor to make resulting code independent of merl at runtime?
+
+parse_transform(Forms, _Options) ->
+ erl_syntax:revert_forms(expand(erl_syntax:form_list(Forms))).
+
+expand(Tree0) ->
+ Tree = pre(Tree0),
+ post(case erl_syntax:subtrees(Tree) of
+ [] ->
+ Tree;
+ Gs ->
+ erl_syntax:update_tree(Tree,
+ [[expand(T) || T <- G] || G <- Gs])
+ end).
+
+pre(T) ->
+ merl:switch(
+ T,
+ [{?Q("merl:quote(_@line, _@text) = _@expr"),
+ fun ([{expr,_}, {line,Line}, {text,Text}]) ->
+ erl_syntax:is_literal(Text) andalso erl_syntax:is_literal(Line)
+ end,
+ fun ([{expr,Expr}, {line,Line}, {text,Text}]) ->
+ pre_expand_match(Expr, erl_syntax:concrete(Line),
+ erl_syntax:concrete(Text))
+ end},
+ {?Q(["case _@expr of",
+ " merl:quote(_@_, _@text) when _@__@_ -> _@@_; _@_@_ -> 0",
+ "end"]),
+ fun case_guard/1,
+ fun (As) -> case_body(As, T) end},
+ fun () -> T end
+ ]).
+
+case_guard([{expr,_}, {text,Text}]) ->
+ erl_syntax:is_literal(Text).
+
+case_body([{expr,Expr}, {text,_Text}], T) ->
+ pre_expand_case(Expr, erl_syntax:case_expr_clauses(T),
+ erl_syntax:get_pos(T)).
+
+post(T) ->
+ merl:switch(
+ T,
+ [{?Q("merl:_@function(_@@args)"),
+ [{fun ([{args, As}, {function, F}]) ->
+ lists:all(fun erl_syntax:is_literal/1, [F|As])
+ end,
+ fun ([{args, As}, {function, F}]) ->
+ Line = erl_syntax:get_pos(F),
+ [F1|As1] = lists:map(fun erl_syntax:concrete/1, [F|As]),
+ eval_call(Line, F1, As1, T)
+ end},
+ fun ([{args, As}, {function, F}]) ->
+ merl:switch(
+ F,
+ [{?Q("qquote"), fun ([]) -> expand_qquote(As, T, 1) end},
+ {?Q("subst"), fun ([]) -> expand_template(F, As, T) end},
+ {?Q("match"), fun ([]) -> expand_template(F, As, T) end},
+ fun () -> T end
+ ])
+ end]},
+ fun () -> T end]).
+
+expand_qquote([Line, Text, Env], T, _) ->
+ case erl_syntax:is_literal(Line) of
+ true ->
+ expand_qquote([Text, Env], T, erl_syntax:concrete(Line));
+ false ->
+ T
+ end;
+expand_qquote([Text, Env], T, Line) ->
+ case erl_syntax:is_literal(Text) of
+ true ->
+ As = [Line, erl_syntax:concrete(Text)],
+ %% expand further if possible
+ expand(merl:qquote(Line, "merl:subst(_@tree, _@env)",
+ [{tree, eval_call(Line, quote, As, T)},
+ {env, Env}]));
+ false ->
+ T
+ end;
+expand_qquote(_As, T, _StartPos) ->
+ T.
+
+expand_template(F, [Pattern | Args], T) ->
+ case erl_syntax:is_literal(Pattern) of
+ true ->
+ Line = erl_syntax:get_pos(Pattern),
+ As = [erl_syntax:concrete(Pattern)],
+ merl:qquote(Line, "merl:_@function(_@pattern, _@args)",
+ [{function, F},
+ {pattern, eval_call(Line, template, As, T)},
+ {args, Args}]);
+ false ->
+ T
+ end;
+expand_template(_F, _As, T) ->
+ T.
+
+eval_call(Line, F, As, T) ->
+ try apply(merl, F, As) of
+ T1 when F =:= quote ->
+ %% lift metavariables in a template to Erlang variables
+ Template = merl:template(T1),
+ Vars = merl:template_vars(Template),
+ case lists:any(fun is_inline_metavar/1, Vars) of
+ true when is_list(T1) ->
+ merl:qquote(Line, "merl:tree([_@template])",
+ [{template, merl:meta_template(Template)}]);
+ true ->
+ merl:qquote(Line, "merl:tree(_@template)",
+ [{template, merl:meta_template(Template)}]);
+ false ->
+ merl:term(T1)
+ end;
+ T1 ->
+ merl:term(T1)
+ catch
+ throw:_Reason -> T
+ end.
+
+pre_expand_match(Expr, Line, Text) ->
+ {Template, Out, _Vars} = rewrite_pattern(Line, Text),
+ merl:qquote(Line, "{ok, _@out} = merl:match(_@template, _@expr)",
+ [{expr, Expr},
+ {out, Out},
+ {template, erl_syntax:abstract(Template)}]).
+
+rewrite_pattern(Line, Text) ->
+ %% we must rewrite the metavariables in the pattern to use lowercase,
+ %% and then use real matching to bind the Erlang-level variables
+ T0 = merl:template(merl:quote(Line, Text)),
+ Vars = [V || V <- merl:template_vars(T0), is_inline_metavar(V)],
+ {merl:alpha(T0, [{V, var_to_tag(V)} || V <- Vars]),
+ erl_syntax:list([erl_syntax:tuple([erl_syntax:abstract(var_to_tag(V)),
+ erl_syntax:variable(var_name(V))])
+ || V <- Vars]),
+ Vars}.
+
+var_name(V) when is_integer(V) ->
+ V1 = if V > 99, (V rem 100) =:= 99 ->
+ V div 100;
+ V > 9, (V rem 10) =:= 9 ->
+ V div 10;
+ true -> V
+ end,
+ list_to_atom("Q" ++ integer_to_list(V1));
+var_name(V) -> V.
+
+var_to_tag(V) when is_integer(V) -> V;
+var_to_tag(V) ->
+ list_to_atom(string:to_lower(atom_to_list(V))).
+
+pre_expand_case(Expr, Clauses, Line) ->
+ merl:qquote(Line, "merl:switch(_@expr, _@clauses)",
+ [{clauses, erl_syntax:list([pre_expand_case_clause(C)
+ || C <- Clauses])},
+ {expr, Expr}]).
+
+pre_expand_case_clause(T) ->
+ %% note that the only allowed non ``?Q(...) -> ...'' clause is ``_ -> ...''
+ merl:switch(
+ T,
+ [{?Q("(merl:quote(_@line, _@text)) when _@__@guard -> _@@body"),
+ fun ([{body,_}, {guard,_}, {line,Line}, {text,Text}]) ->
+ erl_syntax:is_literal(Text) andalso erl_syntax:is_literal(Line)
+ end,
+ fun ([{body,Body}, {guard,Guard}, {line,Line}, {text,Text}]) ->
+ pre_expand_case_clause(Body, Guard, erl_syntax:concrete(Line),
+ erl_syntax:concrete(Text))
+ end},
+ {?Q("_ -> _@@body"),
+ fun (Env) -> merl:qquote("fun () -> _@body end", Env) end}
+ ]).
+
+pre_expand_case_clause(Body, Guard, Line, Text) ->
+ %% this is similar to a meta-match ``?Q("...") = Term''
+ %% (note that the guards may in fact be arbitrary expressions)
+ {Template, Out, Vars} = rewrite_pattern(Line, Text),
+ GuardExprs = rewrite_guard(Guard),
+ Param = [{body, Body},
+ {guard,GuardExprs},
+ {out, Out},
+ {template, erl_syntax:abstract(Template)},
+ {unused, dummy_uses(Vars)}],
+ case GuardExprs of
+ [] ->
+ merl:qquote(Line, ["{_@template, ",
+ " fun (_@out) -> _@unused, _@body end}"],
+ Param);
+ _ ->
+ merl:qquote(Line, ["{_@template, ",
+ " fun (_@out) -> _@unused, _@guard end, ",
+ " fun (_@out) -> _@unused, _@body end}"],
+ Param)
+ end.
+
+%% We have to insert dummy variable uses at the beginning of the "guard" and
+%% "body" function bodies to avoid warnings for unused variables in the
+%% generated code. (Expansions at the Erlang level can't be marked up as
+%% compiler generated to allow later compiler stages to ignore them.)
+dummy_uses(Vars) ->
+ [?Q("_ = _@var", [{var, erl_syntax:variable(var_name(V))}])
+ || V <- Vars].
+
+rewrite_guard([]) -> [];
+rewrite_guard([D]) -> [make_orelse(erl_syntax:disjunction_body(D))].
+
+make_orelse([]) -> [];
+make_orelse([C]) -> make_andalso(erl_syntax:conjunction_body(C));
+make_orelse([C | Cs]) ->
+ ?Q("_@expr orelse _@rest",
+ [{expr, make_andalso(erl_syntax:conjunction_body(C))},
+ {rest, make_orelse(Cs)}]).
+
+make_andalso([E]) -> E;
+make_andalso([E | Es]) ->
+ ?Q("_@expr andalso _@rest", [{expr, E}, {rest, make_andalso(Es)}]).
+
+is_inline_metavar(Var) when is_atom(Var) ->
+ is_erlang_var(atom_to_list(Var));
+is_inline_metavar(Var) when is_integer(Var) ->
+ Var > 9 andalso (Var rem 10) =:= 9;
+is_inline_metavar(_) -> false.
+
+is_erlang_var([C|_]) when C >= $A, C =< $Z ; C >= $À, C =< $Þ, C /= $× ->
+ true;
+is_erlang_var(_) ->
+ false.
diff --git a/lib/syntax_tools/src/syntax_tools.app.src b/lib/syntax_tools/src/syntax_tools.app.src
index 83dcb5fe23..dd4ac46055 100644
--- a/lib/syntax_tools/src/syntax_tools.app.src
+++ b/lib/syntax_tools/src/syntax_tools.app.src
@@ -11,8 +11,11 @@
erl_syntax_lib,
erl_tidy,
igor,
+ merl,
+ merl_transform,
prettypr]},
{registered,[]},
{applications, [stdlib]},
{env, []},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}]}.
+ {runtime_dependencies,
+ ["compiler-6.0","erts-6.0","kernel-3.0","stdlib-2.5"]}]}.
diff --git a/lib/syntax_tools/test/Makefile b/lib/syntax_tools/test/Makefile
index f67e3f8984..569c044b1a 100644
--- a/lib/syntax_tools/test/Makefile
+++ b/lib/syntax_tools/test/Makefile
@@ -6,7 +6,8 @@ include $(ERL_TOP)/make/$(TARGET)/otp.mk
# ----------------------------------------------------
MODULES= \
- syntax_tools_SUITE
+ syntax_tools_SUITE \
+ merl_SUITE
ERL_FILES= $(MODULES:%=%.erl)
diff --git a/lib/syntax_tools/test/merl_SUITE.erl b/lib/syntax_tools/test/merl_SUITE.erl
new file mode 100644
index 0000000000..08b0f7a696
--- /dev/null
+++ b/lib/syntax_tools/test/merl_SUITE.erl
@@ -0,0 +1,91 @@
+%% ``The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved via the world wide web at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+-module(merl_SUITE).
+
+-include_lib("test_server/include/test_server.hrl").
+
+%% include the Merl header file
+-include_lib("syntax_tools/include/merl.hrl").
+
+%% for assert macros
+-include_lib("eunit/include/eunit.hrl").
+
+%% Test server specific exports
+-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+ init_per_group/2,end_per_group/2]).
+
+%% Test cases
+-export([merl_smoke_test/1]).
+
+suite() -> [{ct_hooks,[ts_install_cth]}].
+
+all() ->
+ [merl_smoke_test].
+
+groups() ->
+ [].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(_GroupName, Config) ->
+ Config.
+
+end_per_group(_GroupName, Config) ->
+ Config.
+
+-define(tokens2str(X), ??X).
+
+merl_smoke_test(Config) when is_list(Config) ->
+ ?assertThrow({error, "1: syntax error before: '{'" ++ _},
+ f(merl:quote("{"))),
+ ?assertEqual(tuple, erl_syntax:type(merl:term({}))),
+ ?assertEqual("{foo, 42}", f(merl:term({foo, 42}))),
+ ?assertEqual("f(X) -> {ok, X}.", f(?Q("f(X) -> {ok, X}."))),
+ ?assertEqual("{foo, 42}", f(?Q("{foo, 42}"))),
+ ?assertEqual("2 + 2", f(?Q("2 + 2"))),
+ ?assertEqual("%% comment preserved\n{foo, 42}",
+ f(?Q(["%% comment preserved", "{foo, 42}"]))),
+ ?assertEqual("'@foo'", f(merl:tree(merl:template(?Q("'@foo'"))))),
+ ?assertEqual("42", f(merl:subst(?Q("_@foo"), [{foo, merl:term(42)}]))),
+ ?assertEqual({ok, []}, merl:match(?Q("foo"), ?Q("foo"))),
+ ?assertEqual(42, merl:switch(?Q("foo"), [fun () -> 42 end])),
+ ?assertEqual("{foo}", f(begin Foo = ?Q("foo"), ?Q("{_@Foo}") end)),
+ ?assertEqual("{foo}", f(begin Foo = foo, ?Q("{_@Foo@}") end)),
+ ?assertEqual("{[bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ ?Q("{foo, _@Bar, '@Baz'}") = Tree,
+ ?Q("{_@Bar, _@Baz}")
+ end)),
+ ?assertEqual("{[bar], baz()}",
+ f(begin
+ Tree = ?Q("{foo, [bar], baz()}"),
+ case Tree of
+ ?Q("{foo, _@Bar, '@Baz'}") -> ?Q("{_@Bar, _@Baz}")
+ end
+ end)),
+ ok.
+
+%% utilities
+
+f(Ts) when is_list(Ts) ->
+ lists:flatmap(fun erl_prettypr:format/1, Ts);
+f(T) ->
+ erl_prettypr:format(T).
diff --git a/lib/syntax_tools/vsn.mk b/lib/syntax_tools/vsn.mk
index 673362d01d..403e90196e 100644
--- a/lib/syntax_tools/vsn.mk
+++ b/lib/syntax_tools/vsn.mk
@@ -1 +1 @@
-SYNTAX_TOOLS_VSN = 1.6.17
+SYNTAX_TOOLS_VSN = 1.7
diff --git a/lib/test_server/doc/src/Makefile b/lib/test_server/doc/src/Makefile
index 8c7fa99886..421079ac94 100644
--- a/lib/test_server/doc/src/Makefile
+++ b/lib/test_server/doc/src/Makefile
@@ -27,6 +27,10 @@ include ../../vsn.mk
VSN=$(TEST_SERVER_VSN)
APPLICATION=test_server
+DOC_EXTRA_FRONT_PAGE_INFO=Important note: \
+The Test Server application is obsolete and will be removed \
+in the next major OTP release
+
# ----------------------------------------------------
# Release directory specification
# ----------------------------------------------------
diff --git a/lib/test_server/doc/src/example_chapter.xml b/lib/test_server/doc/src/example_chapter.xml
index 0ebc85da09..6bc0cfaebe 100644
--- a/lib/test_server/doc/src/example_chapter.xml
+++ b/lib/test_server/doc/src/example_chapter.xml
@@ -47,7 +47,7 @@
-define(default_timeout, ?t:minutes(1)).
init_per_testcase(_Case, Config) ->
- ?line Dog=?t:timetrap(?default_timeout),
+ Dog=?t:timetrap(?default_timeout),
[{watchdog, Dog}|Config].
end_per_testcase(_Case, Config) ->
Dog=?config(watchdog, Config),
@@ -72,8 +72,8 @@ not_started_func1(suite) ->
not_started_func1(doc) ->
["Testing function 1 when application is not started"].
not_started_func1(Config) when list(Config) ->
- ?line {error, not_started} = myapp:func1(dummy_ref,1),
- ?line {error, not_started} = myapp:func1(dummy_ref,2),
+ {error, not_started} = myapp:func1(dummy_ref,1),
+ {error, not_started} = myapp:func1(dummy_ref,2),
ok.
not_started_func2(suite) ->
@@ -81,8 +81,8 @@ not_started_func2(suite) ->
not_started_func2(doc) ->
["Testing function 2 when application is not started"].
not_started_func2(Config) when list(Config) ->
- ?line {error, not_started} = myapp:func2(dummy_ref,1),
- ?line {error, not_started} = myapp:func2(dummy_ref,2),
+ {error, not_started} = myapp:func2(dummy_ref,1),
+ {error, not_started} = myapp:func2(dummy_ref,2),
ok.
@@ -90,7 +90,7 @@ not_started_func2(Config) when list(Config) ->
start(doc) ->
["Testing start of my application."];
start(Config) when list(Config) ->
- ?line Ref = myapp:start(),
+ Ref = myapp:start(),
case erlang:whereis(my_main_process) of
Pid when pid(Pid) ->
[{myapp_ref,Ref}|Config];
@@ -105,9 +105,9 @@ func1(suite) ->
func1(doc) ->
["Test that func1 returns ok when argument is 1 and error if argument is 2"];
func1(Config) when list(Config) ->
- ?line Ref = ?config(myapp_ref,Config),
- ?line ok = myapp:func1(Ref,1),
- ?line error = myapp:func1(Ref,2),
+ Ref = ?config(myapp_ref,Config),
+ ok = myapp:func1(Ref,1),
+ error = myapp:func1(Ref,2),
ok.
func2(suite) ->
@@ -115,17 +115,17 @@ func2(suite) ->
func2(doc) ->
["Test that func1 returns ok when argument is 3 and error if argument is 4"];
func2(Config) when list(Config) ->
- ?line Ref = ?config(myapp_ref,Config),
- ?line ok = myapp:func2(Ref,3),
- ?line error = myapp:func2(Ref,4),
+ Ref = ?config(myapp_ref,Config),
+ ok = myapp:func2(Ref,3),
+ error = myapp:func2(Ref,4),
ok.
%% No specification clause needed for a cleanup function in a conf case!!!
stop(doc) ->
["Testing termination of my application"];
stop(Config) when list(Config) ->
- ?line Ref = ?config(myapp_ref,Config),
- ?line ok = myapp:stop(Ref),
+ Ref = ?config(myapp_ref,Config),
+ ok = myapp:stop(Ref),
case erlang:whereis(my_main_process) of
undefined ->
lists:keydelete(myapp_ref,1,Config);
diff --git a/lib/test_server/doc/src/notes.xml b/lib/test_server/doc/src/notes.xml
index 68dc1fec88..e996d2b4a3 100644
--- a/lib/test_server/doc/src/notes.xml
+++ b/lib/test_server/doc/src/notes.xml
@@ -32,6 +32,73 @@
<file>notes.xml</file>
</header>
+<section><title>Test_Server 3.8.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ If the last expression in a test case causes a timetrap
+ timeout, the stack trace is ignored and not printed to
+ the test case log file. This happens because the
+ {Suite,TestCase,Line} info is not available in the stack
+ trace in this scenario, due to tail call elimination.
+ Common Test has been modified to handle this situation by
+ inserting a {Suite,TestCase,last_expr} tuple in the
+ correct place and printing the stack trace as expected.</p>
+ <p>
+ Own Id: OTP-12697 Aux Id: seq12848 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Test_Server 3.8</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ When installing test suites in a cross compilation
+ environment, ts_install was not able to read the values
+ of the environment variables specified in the
+ configuration file. This has been fixed.</p>
+ <p>
+ Own Id: OTP-11441</p>
+ </item>
+ <item>
+ <p>
+ Printouts by means of ct:log/2/3 or ct:pal/2/3 from the
+ hook functions on_tc_fail/2 and on_tc_skip/2 would (quite
+ unexpectedly) end up in the "unexpected i/o" log file
+ instead of in the test case log file. This behaviour has
+ been changed so that now, all printouts (including stdio
+ printouts) from these hook functions will be routed to
+ the test case log file.</p>
+ <p>
+ Own Id: OTP-12468</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ The format of the information printed on top of the test
+ case (and configuration function) log file has been
+ slightly modified, mainly in order to make the start
+ configuration data easier to read and interpret.</p>
+ <p>
+ Own Id: OTP-12518 Aux Id: seq12808 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Test_Server 3.7.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/test_server/doc/src/test_server.xml b/lib/test_server/doc/src/test_server.xml
index ed5569e1fe..b98e434c03 100644
--- a/lib/test_server/doc/src/test_server.xml
+++ b/lib/test_server/doc/src/test_server.xml
@@ -811,46 +811,12 @@ Only valid for peer nodes. Note that slave nodes always
</func>
</funcs>
- <section>
- <title>TEST SUITE LINE NUMBERS</title>
- <p>If a test case fails, the test server can report the exact line
- number at which it failed. There are two ways of doing this,
- either by using the <c>line</c> macro or by using the
- <c>test_server_line</c> parse transform.
- </p>
- <p>The <c>line</c> macro is described under TEST SUITE SUPPORT
- MACROS below. The <c>line</c> macro will only report the last line
- executed when a test case failed.
- </p>
- <p>The <c>test_server_line</c> parse transform is activated by
- including the headerfile <c>test_server_line.hrl</c> in the test
- suite. When doing this, it is important that the
- <c>test_server_line</c> module is in the code path of the erlang
- node compiling the test suite. The parse transform will report a
- history of a maximum of 10 lines when a test case
- fails. Consecutive lines in the same function are not shown.
- </p>
- <p>The attribute <c>-no_lines(FuncList).</c> can be used in the
- test suite to exclude specific functions from the parse
- transform. This is necessary e.g. for functions that are executed
- on old (i.e. &lt;R10B) OTP releases. <c>FuncList = [{Func,Arity}]</c>.
- </p>
- <p>If both the <c>line</c> macro and the parse transform is used in
- the same module, the parse transform will overrule the macro.
- </p>
- </section>
<section>
<title>TEST SUITE SUPPORT MACROS</title>
<p>There are some macros defined in the <c>test_server.hrl</c>
that are quite useful for test suite programmers:
</p>
- <p>The <em>line</em> macro, is quite
- essential when writing test cases. It tells the test server
- exactly what line of code that is being executed, so that it can
- report this line back if the test case fails. Use this macro at
- the beginning of every test case line of code.
- </p>
<p>The <em>config</em> macro, is used to
retrieve information from the <c>Config</c> variable sent to all
test cases. It is used with two arguments, where the first is the
@@ -867,24 +833,20 @@ Only valid for peer nodes. Note that slave nodes always
<item>Whatever added by conf test cases or
<c>init_per_testcase/2</c></item>
</list>
- <p>Examples of the <c>line</c> and <c>config</c> macros can be
- seen in the Examples chapter in the user's guide.
- </p>
- <p>If the <c>line_trace</c> macro is defined, you will get a
- timestamp (<c>erlang:now()</c>) in your minor log for each
- <c>line</c> macro in your suite. This way you can at any time see
- which line is currently being executed, and when the line was
- called.
- </p>
- <p>The <c>line_trace</c> macro can also be used together with the
- <c>test_server_line</c> parse transform described above. A
- timestamp will then be written for each line in the suite, except
- for functions stated in the <c>-no_lines</c> attribute.
- </p>
- <p>The <c>line_trace</c> macro can e.g. be defined as a compile
- option, like this:
- <br></br>
-<c>erlc -W -Dline_trace my_SUITE.erl</c></p>
+ <p>Examples of the <c>config</c> macro can be seen in the Examples chapter
+ in the user's guide.</p>
+ <p>The <em>line</em> and <em>line_trace</em> macros are deprecated, see
+ below.</p>
+ </section>
+
+ <section>
+ <title>TEST SUITE LINE NUMBERS</title>
+ <p>In the past, ERTS did not produce line numbers when generating
+ stacktraces, test_server was thus unable to provide them when reporting
+ test failures. It had instead two different mecanisms to do it: either by
+ using the <c>line</c> macro or by using the <c>test_server_line</c> parse
+ transform. Both are deprecated and should not be used in new tests
+ anymore.</p>
</section>
</erlref>
diff --git a/lib/test_server/include/test_server.hrl b/lib/test_server/include/test_server.hrl
index 36e7e1f83d..f206374116 100644
--- a/lib/test_server/include/test_server.hrl
+++ b/lib/test_server/include/test_server.hrl
@@ -21,7 +21,7 @@
-line_trace(true).
-define(line,
io:format(lists:concat([?MODULE,",",integer_to_list(?LINE),": ~p"]),
- [erlang:now()]),).
+ [erlang:monotonic_time()-erlang:system_info(start_time)]),).
-else.
-define(line,).
-endif.
diff --git a/lib/test_server/src/erl2html2.erl b/lib/test_server/src/erl2html2.erl
index b9b45cda25..b0b5c40965 100644
--- a/lib/test_server/src/erl2html2.erl
+++ b/lib/test_server/src/erl2html2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,11 +22,11 @@
%%%------------------------------------------------------------------
-module(erl2html2).
--export([convert/2, convert/3]).
+-export([convert/3, convert/4]).
-convert([], _Dest) -> % Fake clause.
+convert([], _Dest, _InclPath) -> % Fake clause.
ok;
-convert(File, Dest) ->
+convert(File, Dest, InclPath) ->
%% The generated code uses the BGCOLOR attribute in the
%% BODY tag, which wasn't valid until HTML 3.2. Also,
%% good HTML should either override all colour attributes
@@ -48,12 +48,12 @@ convert(File, Dest) ->
"</head>\n\n"
"<body bgcolor=\"white\" text=\"black\""
" link=\"blue\" vlink=\"purple\" alink=\"red\">\n"],
- convert(File, Dest, Header).
+ convert(File, Dest, InclPath, Header).
-convert(File, Dest, Header) ->
+convert(File, Dest, InclPath, Header) ->
%% statistics(runtime),
- case parse_file(File) of
+ case parse_file(File, InclPath) of
{ok,Functions} ->
%% {_, Time1} = statistics(runtime),
%% io:format("Parsed file in ~.2f Seconds.~n",[Time1/1000]),
@@ -92,8 +92,8 @@ convert(File, Dest, Header) ->
%%% Use expanded preprocessor directives if possible (epp). Only if
%%% this fails, fall back on using non-expanded code (epp_dodger).
-parse_file(File) ->
- case epp:open(File, [], []) of
+parse_file(File, InclPath) ->
+ case epp:open(File, InclPath, []) of
{ok,Epp} ->
try parse_preprocessed_file(Epp,File,false) of
Forms ->
@@ -109,25 +109,26 @@ parse_file(File) ->
Error
end.
-parse_preprocessed_file(Epp,File,InCorrectFile) ->
+parse_preprocessed_file(Epp, File, InCorrectFile) ->
case epp:parse_erl_form(Epp) of
{ok,Form} ->
case Form of
{attribute,_,file,{File,_}} ->
- parse_preprocessed_file(Epp,File,true);
+ parse_preprocessed_file(Epp, File, true);
{attribute,_,file,{_OtherFile,_}} ->
- parse_preprocessed_file(Epp,File,false);
- {function,L,F,A,[_|C]} when InCorrectFile ->
- Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C],
- [{atom_to_list(F),A,L} | Clauses] ++
- parse_preprocessed_file(Epp,File,true);
+ parse_preprocessed_file(Epp, File, false);
+ {function,L,F,A,Cs} when InCorrectFile ->
+ {CLs,LastCL} = find_clause_lines(Cs, []),
+ %% tl(CLs) cause we know the start line already
+ [{atom_to_list(F),A,get_line(L),LastCL} | tl(CLs)] ++
+ parse_preprocessed_file(Epp, File, true);
_ ->
- parse_preprocessed_file(Epp,File,InCorrectFile)
+ parse_preprocessed_file(Epp, File, InCorrectFile)
end;
{error,Reason={_L,epp,{undefined,_Macro,none}}} ->
throw({error,Reason,InCorrectFile});
{error,_Reason} ->
- parse_preprocessed_file(Epp,File,InCorrectFile);
+ parse_preprocessed_file(Epp, File, InCorrectFile);
{eof,_Location} ->
[]
end.
@@ -145,13 +146,16 @@ parse_non_preprocessed_file(File) ->
parse_non_preprocessed_file(Epp, File, Location) ->
case epp_dodger:parse_form(Epp, Location) of
{ok,Tree,Location1} ->
- case erl_syntax:revert(Tree) of
- {function,L,F,A,[_|C]} ->
- Clauses = [{clause,CL} || {clause,CL,_,_,_} <- C],
- [{atom_to_list(F),A,L} | Clauses] ++
+ try erl_syntax:revert(Tree) of
+ {function,L,F,A,Cs} ->
+ {CLs,LastCL} = find_clause_lines(Cs, []),
+ %% tl(CLs) cause we know the start line already
+ [{atom_to_list(F),A,get_line(L),LastCL} | tl(CLs)] ++
parse_non_preprocessed_file(Epp, File, Location1);
_ ->
parse_non_preprocessed_file(Epp, File, Location1)
+ catch
+ _:_ -> parse_non_preprocessed_file(Epp, File, Location1)
end;
{error,_E,Location1} ->
parse_non_preprocessed_file(Epp, File, Location1);
@@ -159,23 +163,56 @@ parse_non_preprocessed_file(Epp, File, Location) ->
[]
end.
+get_line(Anno) ->
+ erl_anno:line(Anno).
+
+%%%-----------------------------------------------------------------
+%%% Find the line number of the last expression in the function
+find_clause_lines([{clause,CL,_Params,_Op,Exprs}], CLs) -> % last clause
+ try tuple_to_list(lists:last(Exprs)) of
+ [_Type,ExprLine | _] when is_integer(ExprLine) ->
+ {lists:reverse([{clause,get_line(CL)}|CLs]), get_line(ExprLine)};
+ [tree,_ | Exprs1] ->
+ find_clause_lines([{clause,CL,undefined,undefined,Exprs1}], CLs);
+ [macro,{_var,ExprLine,_MACRO} | _] when is_integer(ExprLine) ->
+ {lists:reverse([{clause,get_line(CL)}|CLs]), get_line(ExprLine)};
+ _ ->
+ {lists:reverse([{clause,get_line(CL)}|CLs]), get_line(CL)}
+ catch
+ _:_ ->
+ {lists:reverse([{clause,get_line(CL)}|CLs]), get_line(CL)}
+ end;
+
+find_clause_lines([{clause,CL,_Params,_Op,_Exprs} | Cs], CLs) ->
+ find_clause_lines(Cs, [{clause,get_line(CL)}|CLs]).
+
%%%-----------------------------------------------------------------
%%% Add a link target for each line and one for each function definition.
-build_html(SFd,DFd,Encoding,Functions) ->
- build_html(SFd,DFd,Encoding,file:read_line(SFd),1,Functions,false).
+build_html(SFd,DFd,Encoding,FuncsAndCs) ->
+ build_html(SFd,DFd,Encoding,file:read_line(SFd),1,FuncsAndCs,
+ false,undefined).
-build_html(SFd,DFd,Encoding,{ok,Str},L,[{F,A,L}|Functions],_IsFuncDef) ->
+%% line of last expression in function found
+build_html(SFd,DFd,Enc,{ok,Str},LastL,FuncsAndCs,_IsFuncDef,{F,LastL}) ->
+ LastLineLink = test_server_ctrl:uri_encode(F++"-last_expr",utf8),
+ file:write(DFd,["<a name=\"",
+ to_raw_list(LastLineLink,Enc),"\"/>"]),
+ build_html(SFd,DFd,Enc,{ok,Str},LastL,FuncsAndCs,true,undefined);
+%% function start line found
+build_html(SFd,DFd,Enc,{ok,Str},L0,[{F,A,L0,LastL}|FuncsAndCs],
+ _IsFuncDef,_FAndLastL) ->
FALink = test_server_ctrl:uri_encode(F++"-"++integer_to_list(A),utf8),
- file:write(DFd,["<a name=\"",to_raw_list(FALink,Encoding),"\"/>"]),
- build_html(SFd,DFd,Encoding,{ok,Str},L,Functions,true);
-build_html(SFd,DFd,Encoding,{ok,Str},L,[{clause,L}|Functions],_IsFuncDef) ->
- build_html(SFd,DFd,Encoding,{ok,Str},L,Functions,true);
-build_html(SFd,DFd,Encoding,{ok,Str},L,Functions,IsFuncDef) ->
+ file:write(DFd,["<a name=\"",to_raw_list(FALink,Enc),"\"/>"]),
+ build_html(SFd,DFd,Enc,{ok,Str},L0,FuncsAndCs,true,{F,LastL});
+build_html(SFd,DFd,Enc,{ok,Str},L,[{clause,L}|FuncsAndCs],
+ _IsFuncDef,FAndLastL) ->
+ build_html(SFd,DFd,Enc,{ok,Str},L,FuncsAndCs,true,FAndLastL);
+build_html(SFd,DFd,Enc,{ok,Str},L,FuncsAndCs,IsFuncDef,FAndLastL) ->
LStr = line_number(L),
Str1 = line(Str,IsFuncDef),
file:write(DFd,[LStr,Str1]),
- build_html(SFd,DFd,Encoding,file:read_line(SFd),L+1,Functions,false);
-build_html(_SFd,_DFd,_Encoding,eof,L,_Functions,_IsFuncDef) ->
+ build_html(SFd,DFd,Enc,file:read_line(SFd),L+1,FuncsAndCs,false,FAndLastL);
+build_html(_SFd,_DFd,_Enc,eof,L,_FuncsAndCs,_IsFuncDef,_FAndLastL) ->
L.
line_number(L) ->
diff --git a/lib/test_server/src/test_server.app.src b/lib/test_server/src/test_server.app.src
index 173f7075db..bdd9d28444 100644
--- a/lib/test_server/src/test_server.app.src
+++ b/lib/test_server/src/test_server.app.src
@@ -32,7 +32,7 @@
test_server_break_process]},
{applications, [kernel,stdlib]},
{env, []},
- {runtime_dependencies, ["tools-2.6.14","stdlib-2.0","runtime_tools-1.8.14",
- "observer-2.0","kernel-3.0","inets-5.10",
- "syntax_tools-1.6.16","erts-6.0"]}]}.
+ {runtime_dependencies, ["tools-2.8","stdlib-2.5","runtime_tools-1.8.16",
+ "observer-2.1","kernel-4.0","inets-6.0",
+ "syntax_tools-1.7","erts-7.0"]}]}.
diff --git a/lib/test_server/src/test_server.erl b/lib/test_server/src/test_server.erl
index 9192a76a17..785e687b92 100644
--- a/lib/test_server/src/test_server.erl
+++ b/lib/test_server/src/test_server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2014. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -130,7 +130,8 @@ cover_compile(CoverInfo=#cover{app=App,excl=all,incl=Include,cross=Cross}) ->
io:fwrite("done\n\n",[]),
{ok,CoverInfo#cover{mods=Include}}
end;
-cover_compile(CoverInfo=#cover{app=App,excl=Exclude,incl=Include,cross=Cross}) ->
+cover_compile(CoverInfo=#cover{app=App,excl=Exclude,
+ incl=Include,cross=Cross}) ->
CrossMods = lists:flatmap(fun({_,M}) -> M end,Cross),
case code:lib_dir(App) of
{error,bad_name} ->
@@ -177,68 +178,35 @@ module_names(Beams) ->
do_cover_compile(Modules) ->
cover:start(),
- pmap1(fun(M) -> do_cover_compile1(M) end,lists:usort(Modules)),
+ Sticky = prepare_cover_compile(Modules,[]),
+ R = cover:compile_beam(Modules),
+ [warn_compile(Error) || Error <- R,element(1,Error)=/=ok],
+ [code:stick_mod(M) || M <- Sticky],
ok.
-do_cover_compile1(M) ->
+warn_compile({error,{Reason,Module}}) ->
+ io:fwrite("\nWARNING: Could not cover compile ~ts: ~p\n",
+ [Module,{error,Reason}]).
+
+%% Make sure all modules are loaded and unstick if sticky
+prepare_cover_compile([M|Ms],Sticky) ->
case {code:is_sticky(M),code:is_loaded(M)} of
{true,_} ->
code:unstick_mod(M),
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end,
- code:stick_mod(M);
+ prepare_cover_compile(Ms,[M|Sticky]);
{false,false} ->
case code:load_file(M) of
{module,_} ->
- do_cover_compile1(M);
+ prepare_cover_compile([M|Ms],Sticky);
Error ->
- io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error])
+ io:fwrite("\nWARNING: Could not load ~w: ~p\n",[M,Error]),
+ prepare_cover_compile(Ms,Sticky)
end;
{false,_} ->
- case cover:compile_beam(M) of
- {ok,_} ->
- ok;
- Error ->
- io:fwrite("\nWARNING: Could not cover compile ~w: ~p\n",
- [M,Error])
- end
- end.
-
-pmap1(Fun,List) ->
- NTot = length(List),
- NProcs = erlang:system_info(schedulers) * 2,
- NPerProc = (NTot div NProcs) + 1,
-
- {[],Pids} =
- lists:foldr(
- fun(_,{L,Ps}) ->
- {L1,L2} = if length(L)>=NPerProc -> lists:split(NPerProc,L);
- true -> {L,[]} % last chunk
- end,
- {P,_Ref} =
- spawn_monitor(fun() ->
- exit(lists:map(Fun,L1))
- end),
- {L2,[P|Ps]}
- end,
- {List,[]},
- lists:seq(1,NProcs)),
- collect(Pids,[]).
-
-collect([],Acc) ->
- lists:append(Acc);
-collect([Pid|Pids],Acc) ->
- receive
- {'DOWN', _Ref, process, Pid, Result} ->
- %% collect(lists:delete(Pid,Pids),[Result|Acc])
- collect(Pids,[Result|Acc])
- end.
-
+ prepare_cover_compile(Ms,Sticky)
+ end;
+prepare_cover_compile([],Sticky) ->
+ Sticky.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop) ->
@@ -268,45 +236,40 @@ collect([Pid|Pids],Acc) ->
%% after the test is completed.
cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
io:fwrite(user, "Cover analysing... ", []),
- DetailsFun =
+ {ATFOk,ATFFail} =
case Analyse of
details ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(M) ->
- OutFile = filename:join(Dir,
- atom_to_list(M) ++
- ".COVER.html"),
- case cover:analyse_to_file(M,OutFile,[html]) of
- {ok,_} ->
- {file,OutFile};
- Error ->
- Error
- end
- end;
+ {result,Ok1,Fail1} =
+ cover:analyse_to_file(Modules,[{outdir,Dir},html]),
+ {lists:map(fun(OutFile) ->
+ M = list_to_atom(
+ filename:basename(
+ filename:rootname(OutFile,
+ ".COVER.html")
+ )
+ ),
+ {M,{file,OutFile}}
+ end, Ok1),
+ lists:map(fun({Reason,M}) ->
+ {M,{error,Reason}}
+ end, Fail1)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end;
overview ->
case cover:export(filename:join(Dir,"all.coverdata")) of
ok ->
- fun(_) -> undefined end;
+ {[],lists:map(fun(M) -> {M,undefined} end, Modules)};
Error ->
- fun(_) -> Error end
+ {[],lists:map(fun(M) -> {M,Error} end, Modules)}
end
end,
- R = pmap2(
- fun(M) ->
- case cover:analyse(M,module) of
- {ok,{M,{Cov,NotCov}}} ->
- {M,{Cov,NotCov,DetailsFun(M)}};
- Err ->
- io:fwrite(user,
- "\nWARNING: Analysis failed for ~w. Reason: ~p\n",
- [M,Err]),
- {M,Err}
- end
- end, Modules),
+ {result,AOk,AFail} = cover:analyse(Modules,module),
+ R0 = merge_analysis_results(AOk,ATFOk++ATFFail,[]) ++
+ [{M,{error,Reason}} || {Reason,M} <- AFail],
+ R = lists:sort(R0),
io:fwrite(user, "done\n\n", []),
case Stop of
@@ -319,19 +282,15 @@ cover_analyse(Dir,#cover{level=Analyse,mods=Modules,stop=Stop}) ->
end,
R.
-pmap2(Fun,List) ->
- Collector = self(),
- Pids = lists:map(fun(E) ->
- spawn(fun() ->
- Collector ! {res,self(),Fun(E)}
- end)
- end, List),
- lists:map(fun(Pid) ->
- receive
- {res,Pid,Res} ->
- Res
- end
- end, Pids).
+merge_analysis_results([{M,{Cov,NotCov}}|T],ATF,Acc) ->
+ case lists:keytake(M,1,ATF) of
+ {value,{_,R},ATF1} ->
+ merge_analysis_results(T,ATF1,[{M,{Cov,NotCov,R}}|Acc]);
+ false ->
+ merge_analysis_results(T,ATF,Acc)
+ end;
+merge_analysis_results([],_,Acc) ->
+ Acc.
do_cover_for_node(Node,CoverFunc) ->
do_cover_for_node(Node,CoverFunc,true).
@@ -445,15 +404,6 @@ run_test_case_apply({CaseNum,Mod,Func,Args,Name,
}).
run_test_case_apply(Mod, Func, Args, Name, RunInit, TimetrapData) ->
- {ok,Cwd} = file:get_cwd(),
- Args2Print = case Args of
- [Args1] when is_list(Args1) ->
- lists:keydelete(tc_group_result, 1, Args1);
- _ ->
- Args
- end,
- print(minor, "Test case started with:\n~w:~w(~tp)\n", [Mod,Func,Args2Print]),
- print(minor, "Current directory is ~tp\n", [Cwd]),
print_timestamp(minor,"Started at "),
print(minor, "", [], internal_raw),
TCCallback = get(test_server_testcase_callback),
@@ -728,7 +678,7 @@ handle_tc_exit(Reason, #st{status=tc,config=Config0,mf={Mod,Func},pid=Pid}=St)
Msg = {E,AbortReason},
{Msg,Loc0,Msg};
Other ->
- {Other,unknown,Other}
+ {{'EXIT',Other},unknown,Other}
end,
Timeout = end_conf_timeout(Reason, St),
Config = [{tc_status,{failed,F}}|Config0],
@@ -742,7 +692,7 @@ handle_tc_exit(Reason, #st{config=Config,mf={Mod,Func0},pid=Pid,
{testcase_aborted=E,AbortReason,Loc0} ->
{{E,AbortReason},Loc0};
Other ->
- {Other,St#st.last_known_loc}
+ {{'EXIT',Other},St#st.last_known_loc}
end,
Func = case Status of
init_per_testcase=F -> {F,Func0};
@@ -779,16 +729,16 @@ do_call_end_conf(Starter,Mod,Func,Data,Conf,TVal) ->
EndConfApply =
fun() ->
timetrap(TVal),
- case catch apply(Mod,end_per_testcase,[Func,Conf]) of
- {'EXIT',Why} ->
+ try apply(Mod,end_per_testcase,[Func,Conf]) of
+ _ -> ok
+ catch
+ _:Why ->
timer:sleep(1),
group_leader() ! {printout,12,
"WARNING! "
"~w:end_per_testcase(~w, ~p)"
" crashed!\n\tReason: ~p\n",
- [Mod,Func,Conf,Why]};
- _ ->
- ok
+ [Mod,Func,Conf,Why]}
end,
Supervisor ! {self(),end_conf}
end,
@@ -817,11 +767,11 @@ spawn_fw_call(Mod,{init_per_testcase,Func},CurrConf,Pid,
Skip = {skip,{failed,{Mod,init_per_testcase,Why}}},
%% if init_per_testcase fails, the test case
%% should be skipped
- case catch do_end_tc_call(Mod,Func, {Pid,Skip,[CurrConf]}, Why) of
- {'EXIT',FwEndTCErr} ->
- exit({fw_notify_done,end_tc,FwEndTCErr});
- _ ->
- ok
+ try do_end_tc_call(Mod,Func, {Pid,Skip,[CurrConf]}, Why) of
+ _ -> ok
+ catch
+ _:FwEndTCErr ->
+ exit({fw_notify_done,end_tc,FwEndTCErr})
end,
%% finished, report back
SendTo ! {self(),fw_notify_done,
@@ -849,12 +799,12 @@ spawn_fw_call(Mod,{end_per_testcase,Func},EndConf,Pid,
" failed!\n\tReason: timetrap timeout"
" after ~w ms!\n", [Mod,Func,EndConf,TVal]},
FailLoc = proplists:get_value(tc_fail_loc, EndConf),
- case catch do_end_tc_call(Mod,Func,
+ try do_end_tc_call(Mod,Func,
{Pid,Report,[EndConf]}, Why) of
- {'EXIT',FwEndTCErr} ->
- exit({fw_notify_done,end_tc,FwEndTCErr});
- _ ->
- ok
+ _ -> ok
+ catch
+ _:FwEndTCErr ->
+ exit({fw_notify_done,end_tc,FwEndTCErr})
end,
Warn = "<font color=\"red\">"
"WARNING: end_per_testcase timed out!</font>",
@@ -890,21 +840,21 @@ spawn_fw_call(Mod,Func,CurrConf,Pid,Error,Loc,SendTo) ->
end,
FwCall =
fun() ->
- case catch fw_error_notify(Mod,Func1,[],
- Error,Loc) of
- {'EXIT',FwErrorNotifyErr} ->
+ try fw_error_notify(Mod,Func1,[],
+ Error,Loc) of
+ _ -> ok
+ catch
+ _:FwErrorNotifyErr ->
exit({fw_notify_done,error_notification,
- FwErrorNotifyErr});
- _ ->
- ok
+ FwErrorNotifyErr})
end,
Conf = [{tc_status,{failed,Error}}|CurrConf],
- case catch do_end_tc_call(Mod,Func1,
- {Pid,Error,[Conf]},Error) of
- {'EXIT',FwEndTCErr} ->
- exit({fw_notify_done,end_tc,FwEndTCErr});
- _ ->
- ok
+ try do_end_tc_call(Mod,Func1,
+ {Pid,Error,[Conf]},Error) of
+ _ -> ok
+ catch
+ _:FwEndTCErr ->
+ exit({fw_notify_done,end_tc,FwEndTCErr})
end,
%% finished, report back
SendTo ! {self(),fw_notify_done,{died,Error,Loc,[],undefined}}
@@ -984,12 +934,15 @@ run_test_case_eval(Mod, Func, Args0, Name, Ref, RunInit,
NewResult = do_end_tc_call(Mod,Func, {{error,Reason},[Conf]},
{fail,Reason}),
{{0,NewResult},Where,[]};
- Skip = {skip,_Reason} ->
- NewResult = do_end_tc_call(Mod,Func, {Skip,Args0}, Skip),
+ Skip = {SkipType,_Reason} when SkipType == skip;
+ SkipType == skipped ->
+ NewResult = do_end_tc_call(Mod,Func,
+ {Skip,Args0}, Skip),
{{0,NewResult},Where,[]};
AutoSkip = {auto_skip,_Reason} ->
%% special case where a conf case "pretends" to be skipped
- NewResult = do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip),
+ NewResult =
+ do_end_tc_call(Mod,Func, {AutoSkip,Args0}, AutoSkip),
{{0,NewResult},Where,[]}
end,
exit({Ref,Time,Value,Loc,Opts}).
@@ -1000,10 +953,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
set_tc_state(init_per_testcase, hd(Args)),
ensure_timetrap(Args),
case init_per_testcase(Mod, Func, Args) of
- Skip = {skip,Reason} ->
+ Skip = {SkipType,Reason} when SkipType == skip;
+ SkipType == skipped ->
Line = get_loc(),
Conf = [{tc_status,{skipped,Reason}}|hd(Args)],
- NewRes = do_end_tc_call(Mod,Func, {Skip,[Conf]}, Skip),
+ NewRes = do_end_tc_call(Mod,Func,
+ {Skip,[Conf]}, Skip),
{{0,NewRes},Line,[]};
{skip_and_save,Reason,SaveCfg} ->
Line = get_loc(),
@@ -1021,11 +976,12 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{{0,NewRes},[{Mod,Func}],[]};
{ok,NewConf} ->
%% call user callback function if defined
- NewConf1 = user_callback(TCCallback, Mod, Func, init, NewConf),
+ NewConf1 =
+ user_callback(TCCallback, Mod, Func, init, NewConf),
%% save current state in controller loop
set_tc_state(tc, NewConf1),
%% execute the test case
- {{T,Return},Loc} = {ts_tc(Mod, Func, [NewConf1]),get_loc()},
+ {{T,Return},Loc} = {ts_tc(Mod,Func,[NewConf1]), get_loc()},
{EndConf,TSReturn,FWReturn} =
case Return of
{E,TCError} when E=='EXIT' ; E==failed ->
@@ -1041,30 +997,39 @@ run_test_case_eval1(Mod, Func, Args, Name, RunInit, TCCallback) ->
{[{tc_status,{skipped,Why}},
{save_config,SaveCfg}|NewConf1],
Skip,Skip};
- {skip,Why} ->
- {[{tc_status,{skipped,Why}}|NewConf1],Return,Return};
+ {SkipType,Why} when SkipType == skip;
+ SkipType == skipped ->
+ {[{tc_status,{skipped,Why}}|NewConf1],Return,
+ Return};
_ ->
{[{tc_status,ok}|NewConf1],Return,ok}
end,
%% call user callback function if defined
- EndConf1 = user_callback(TCCallback, Mod, Func, 'end', EndConf),
+ EndConf1 =
+ user_callback(TCCallback, Mod, Func, 'end', EndConf),
%% update current state in controller loop
{FWReturn1,TSReturn1,EndConf2} =
case end_per_testcase(Mod, Func, EndConf1) of
SaveCfg1={save_config,_} ->
- {FWReturn,TSReturn,[SaveCfg1|lists:keydelete(save_config,1,
- EndConf1)]};
+ {FWReturn,TSReturn,
+ [SaveCfg1|lists:keydelete(save_config,1,
+ EndConf1)]};
{fail,ReasonToFail} ->
%% user has failed the testcase
- fw_error_notify(Mod, Func, EndConf1, ReasonToFail),
- {{error,ReasonToFail},{failed,ReasonToFail},EndConf1};
- {failed,{_,end_per_testcase,_}} = Failure when FWReturn == ok ->
+ fw_error_notify(Mod, Func, EndConf1,
+ ReasonToFail),
+ {{error,ReasonToFail},
+ {failed,ReasonToFail},
+ EndConf1};
+ {failed,{_,end_per_testcase,_}} = Failure when
+ FWReturn == ok ->
%% unexpected termination in end_per_testcase
%% report this as the result to the framework
{Failure,TSReturn,EndConf1};
_ ->
- %% test case result should be reported to framework
- %% no matter the status of end_per_testcase
+ %% test case result should be reported to
+ %% framework no matter the status of
+ %% end_per_testcase
{FWReturn,TSReturn,EndConf1}
end,
%% clear current state in controller loop
@@ -1131,7 +1096,8 @@ process_return_val([Return], M,F,A, Loc, Final) when is_list(Return) ->
ReturnTags = [skip,skip_and_save,save_config,comment,return_group_result],
%% check if all elements in the list are valid end conf return value tuples
case lists:all(fun(Val) when is_tuple(Val) ->
- lists:any(fun(T) -> T == element(1, Val) end, ReturnTags);
+ lists:any(fun(T) -> T == element(1, Val) end,
+ ReturnTags);
(ok) ->
true;
(_) ->
@@ -1165,14 +1131,19 @@ process_return_val1([Failed={E,TCError}|_], M,F,A=[Args], Loc, _, SaveOpts)
NewReturn ->
{NewReturn,SaveOpts}
end;
-process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args], Loc, Final, SaveOpts) ->
+process_return_val1([SaveCfg={save_config,_}|Opts], M,F,[Args],
+ Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,[[SaveCfg|Args]], Loc, Final, SaveOpts);
-process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args], Loc, _, SaveOpts) ->
- process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]], Loc, {skip,Why}, SaveOpts);
-process_return_val1([GR={return_group_result,_}|Opts], M,F,A, Loc, Final, SaveOpts) ->
+process_return_val1([{skip_and_save,Why,SaveCfg}|Opts], M,F,[Args],
+ Loc, _, SaveOpts) ->
+ process_return_val1(Opts, M,F,[[{save_config,SaveCfg}|Args]],
+ Loc, {skip,Why}, SaveOpts);
+process_return_val1([GR={return_group_result,_}|Opts], M,F,A,
+ Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,A, Loc, Final, [GR|SaveOpts]);
-process_return_val1([RetVal={Tag,_}|Opts], M,F,A, Loc, _, SaveOpts) when Tag==skip;
- Tag==comment ->
+process_return_val1([RetVal={Tag,_}|Opts], M,F,A,
+ Loc, _, SaveOpts) when Tag==skip;
+ Tag==comment ->
process_return_val1(Opts, M,F,A, Loc, RetVal, SaveOpts);
process_return_val1([_|Opts], M,F,A, Loc, Final, SaveOpts) ->
process_return_val1(Opts, M,F,A, Loc, Final, SaveOpts);
@@ -1186,7 +1157,8 @@ process_return_val1([], M,F,A, _Loc, Final, SaveOpts) ->
user_callback(undefined, _, _, _, Args) ->
Args;
-user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd, [Args]) when is_list(Args) ->
+user_callback({CBMod,CBFunc}, Mod, Func, InitOrEnd,
+ [Args]) when is_list(Args) ->
case catch apply(CBMod, CBFunc, [InitOrEnd,Mod,Func,Args]) of
Args1 when is_list(Args1) ->
[Args1];
@@ -1341,12 +1313,30 @@ get_loc(Pid) ->
Stk = [rewrite_loc_item(Loc) || Loc <- Stk0],
case get(test_server_loc) of
[{Suite,Case}] ->
- %% location info unknown, check if {Suite,Case,Line}
- %% is available in stacktrace. and if so, use stacktrace
- %% instead of current test_server_loc
+ %% Location info unknown, check if {Suite,Case,Line}
+ %% is available in stacktrace and if so, use stacktrace
+ %% instead of current test_server_loc.
+ %% If location is the last expression in a test case
+ %% function, the info is not available due to tail call
+ %% elimination. We need to check if the test case has been
+ %% called by ts_tc/3 and, if so, insert the test case info
+ %% at that position.
case [match || {S,C,_L} <- Stk, S == Suite, C == Case] of
- [match|_] -> put(test_server_loc, Stk);
- _ -> ok
+ [match|_] ->
+ put(test_server_loc, Stk);
+ _ ->
+ {PreTC,PostTC} =
+ lists:splitwith(fun({test_server,ts_tc,_}) ->
+ false;
+ (_) ->
+ true
+ end, Stk),
+ if PostTC == [] ->
+ ok;
+ true ->
+ put(test_server_loc,
+ PreTC++[{Suite,Case,last_expr} | PostTC])
+ end
end;
_ ->
put(test_server_loc, Stk)
@@ -1373,8 +1363,8 @@ fw_error_notify(Mod, Func, Args, Error, Loc) ->
%% Just like io:format, except that depending on the Detail value, the output
%% is directed to console, major and/or minor log files.
-print(Detail,Format,Args) ->
- test_server_ctrl:print(Detail, Format, Args).
+%% print(Detail,Format,Args) ->
+%% test_server_ctrl:print(Detail, Format, Args).
print(Detail,Format,Args,Printer) ->
test_server_ctrl:print(Detail, Format, Args, Printer).
@@ -1408,9 +1398,12 @@ lookup_config(Key,Config) ->
undefined
end.
-%% timer:tc/3
+%%
+%% IMPORTANT: get_loc/1 uses the name of this function when analysing
+%% stack traces. If the name changes, get_loc/1 must be updated!
+%%
ts_tc(M, F, A) ->
- Before = erlang:now(),
+ Before = erlang:monotonic_time(),
Result = try
apply(M, F, A)
catch
@@ -1430,12 +1423,8 @@ ts_tc(M, F, A) ->
{'EXIT',Reason}
end
end,
- After = erlang:now(),
- Elapsed =
- (element(1,After)*1000000000000
- +element(2,After)*1000000+element(3,After)) -
- (element(1,Before)*1000000000000
- +element(2,Before)*1000000+element(3,Before)),
+ After = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(After-Before, native, micro_seconds),
{Elapsed, Result}.
set_loc(Stk) ->
@@ -1778,7 +1767,8 @@ timetrap(Timeout0, TimeToReport0, TCPid, MultAndScale = {Multiplier,Scale}) ->
put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}]);
List ->
List1 = lists:delete({infinity,TCPid,{infinity,false}}, List),
- put(test_server_timetraps,[{Handle,TCPid,{TimeToReport,Scale}}|List1])
+ put(test_server_timetraps,[{Handle,TCPid,
+ {TimeToReport,Scale}}|List1])
end,
Handle.
@@ -1837,7 +1827,9 @@ time_ms(Ms, _, _) when is_integer(Ms) -> Ms;
time_ms(infinity, _, _) -> infinity;
time_ms(Fun, TCPid, MultAndScale) when is_function(Fun) ->
time_ms_apply(Fun, TCPid, MultAndScale);
-time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M), is_atom(F), is_list(A) ->
+time_ms({M,F,A}=MFA, TCPid, MultAndScale) when is_atom(M),
+ is_atom(F),
+ is_list(A) ->
time_ms_apply(MFA, TCPid, MultAndScale);
time_ms(Other, _, _) -> exit({invalid_time_format,Other}).
@@ -1851,7 +1843,7 @@ time_ms_check(Other) ->
time_ms_apply(Func, TCPid, MultAndScale) ->
{_,GL} = process_info(TCPid, group_leader),
WhoAmI = self(), % either TC or IO server
- T0 = now(),
+ T0 = erlang:monotonic_time(),
UserTTSup =
spawn(fun() ->
user_timetrap_supervisor(Func, WhoAmI, TCPid,
@@ -1884,7 +1876,8 @@ user_timetrap_supervisor(Func, Spawner, TCPid, GL, T0, MultAndScale) ->
receive
{UserTT,Result} ->
demonitor(MonRef, [flush]),
- Elapsed = trunc(timer:now_diff(now(), T0) / 1000),
+ T1 = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(T1-T0, native, milli_seconds),
try time_ms_check(Result) of
TimeVal ->
%% this is the new timetrap value to set (return value
@@ -1952,7 +1945,7 @@ update_user_timetraps(TCPid, StartTime) ->
proplists:delete(TCPid, UserTTs)),
proceed;
{OtherUserTTSup,OtherStartTime} ->
- case timer:now_diff(OtherStartTime, StartTime) of
+ case OtherStartTime - StartTime of
Diff when Diff >= 0 ->
ignore;
_ ->
@@ -2407,9 +2400,8 @@ is_release_available(Release) ->
%%
run_on_shielded_node(Fun, CArgs) when is_function(Fun), is_list(CArgs) ->
- {A,B,C} = now(),
- Name = "shielded_node-" ++ integer_to_list(A) ++ "-" ++ integer_to_list(B)
- ++ "-" ++ integer_to_list(C),
+ Nr = erlang:unique_integer([positive]),
+ Name = "shielded_node-" ++ integer_to_list(Nr),
Node = case start_node(Name, slave, [{args, "-hidden " ++ CArgs}]) of
{ok, N} -> N;
Err -> fail({failed_to_start_shielded_node, Err})
@@ -2468,9 +2460,8 @@ is_cover(Name) ->
%% A filename of the form <Stem><Number> is generated, and the
%% function checks that that file doesn't already exist.
temp_name(Stem) ->
- {A,B,C} = erlang:now(),
- RandomNum = A bxor B bxor C,
- RandomName = Stem ++ integer_to_list(RandomNum),
+ Num = erlang:unique_integer([positive]),
+ RandomName = Stem ++ integer_to_list(Num),
{ok,Files} = file:list_dir(filename:dirname(Stem)),
case lists:member(RandomName,Files) of
true ->
@@ -2500,11 +2491,7 @@ appup_test(App) ->
%% Checks wether the module is natively compiled or not.
is_native(Mod) ->
- case catch Mod:module_info(native_addresses) of
- [_|_] -> true;
- _Other -> false
- end.
-
+ (catch Mod:module_info(native)) =:= true.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% comment(String) -> ok
diff --git a/lib/test_server/src/test_server_ctrl.erl b/lib/test_server/src/test_server_ctrl.erl
index af8921fe75..d0c8a1ebe8 100644
--- a/lib/test_server/src/test_server_ctrl.erl
+++ b/lib/test_server/src/test_server_ctrl.erl
@@ -99,7 +99,7 @@
-define(last_link, "last_link").
-define(last_test, "last_test").
-define(html_ext, ".html").
--define(now, erlang:now()).
+-define(now, os:timestamp()).
-define(void_fun, fun() -> ok end).
-define(mod_result(X), if X == skip -> skipped;
@@ -1204,19 +1204,14 @@ init_tester(Mod, Func, Args, Dir, Name, {_,_,MinLev}=Levels,
report_severe_error(Reason) ->
test_server_sup:framework_call(report, [severe_error,Reason]).
-%% timer:tc/3
-ts_tc(M, F, A) ->
- Before = ?now,
- Val = (catch apply(M, F, A)),
- After = ?now,
- Elapsed = elapsed_time(Before, After),
- {Elapsed,Val}.
-
-elapsed_time(Before, After) ->
- (element(1,After)*1000000000000 +
- element(2,After)*1000000 + element(3,After)) -
- (element(1,Before)*1000000000000 +
- element(2,Before)*1000000 + element(3,Before)).
+ts_tc(M,F,A) ->
+ Before = erlang:monotonic_time(),
+ Result = (catch apply(M, F, A)),
+ After = erlang:monotonic_time(),
+ Elapsed = erlang:convert_time_unit(After-Before,
+ native,
+ micro_seconds),
+ {Elapsed, Result}.
start_extra_tools(ExtraTools) ->
start_extra_tools(ExtraTools, []).
@@ -1808,20 +1803,37 @@ start_minor_log_file1(Mod, Func, LogDir, AbsName, MFA) ->
put(test_server_minor_footer, Footer),
io:put_chars(Fd, Header),
+ io:put_chars(Fd, "<a name=\"top\"></a>"),
+ io:put_chars(Fd, "<pre>\n"),
+
SrcListing = downcase(atom_to_list(Mod)) ++ ?src_listing_ext,
- case {filelib:is_file(filename:join(LogDir, SrcListing)),
- lists:member(no_src, get(test_server_logopts))} of
- {true,false} ->
- print(Lev, "<a href=\"~ts#~ts\">source code for ~w:~w/1</a>\n",
- [uri_encode(SrcListing),
- uri_encode(atom_to_list(Func)++"-1",utf8),
- Mod,Func]);
+
+ case get_fw_mod(?MODULE) of
+ Mod when Func == error_in_suite ->
+ ok;
_ ->
- ok
+ {Info,Arity} =
+ if Func == init_per_suite; Func == end_per_suite ->
+ {"Config function: ", 1};
+ Func == init_per_group; Func == end_per_group ->
+ {"Config function: ", 2};
+ true ->
+ {"Test case: ", 1}
+ end,
+
+ case {filelib:is_file(filename:join(LogDir, SrcListing)),
+ lists:member(no_src, get(test_server_logopts))} of
+ {true,false} ->
+ print(Lev, Info ++ "<a href=\"~ts#~ts\">~w:~w/~w</a> "
+ "(click for source code)\n",
+ [uri_encode(SrcListing),
+ uri_encode(atom_to_list(Func)++"-1",utf8),
+ Mod,Func,Arity]);
+ _ ->
+ print(Lev, Info ++ "~w:~w/~w\n", [Mod,Func,Arity])
+ end
end,
- io:put_chars(Fd, "<pre>\n"),
-
AbsName.
stop_minor_log_file() ->
@@ -1927,15 +1939,20 @@ html_possibly_convert(Src, SrcInfo, Dest) ->
{ok,DestInfo} when DestInfo#file_info.mtime >= SrcInfo#file_info.mtime ->
ok; % dest file up to date
_ ->
+ InclPath = case application:get_env(test_server, include) of
+ {ok,Incls} -> Incls;
+ _ -> []
+ end,
+
OutDir = get(test_server_log_dir_base),
case test_server_sup:framework_call(get_html_wrapper,
["Module "++Src,false,
OutDir,undefined,
encoding(Src)], "") of
Empty when (Empty == "") ; (element(2,Empty) == "") ->
- erl2html2:convert(Src, Dest);
+ erl2html2:convert(Src, Dest, InclPath);
{_,Header,_} ->
- erl2html2:convert(Src, Dest, Header)
+ erl2html2:convert(Src, Dest, InclPath, Header)
end
end.
@@ -2008,7 +2025,7 @@ add_init_and_end_per_suite([{conf,_,_,{Mod,_}}=Case|Cases], LastMod,
PreCases ++ [Case|add_init_and_end_per_suite(Cases, NextMod,
NextRef, FwMod)];
add_init_and_end_per_suite([SkipCase|Cases], LastMod, LastRef, FwMod)
- when element(1,SkipCase) == skip_case ->
+ when element(1,SkipCase) == skip_case; element(1,SkipCase) == auto_skip_case->
[SkipCase|add_init_and_end_per_suite(Cases, LastMod, LastRef, FwMod)];
add_init_and_end_per_suite([{conf,_,_,_}=Case|Cases], LastMod, LastRef, FwMod) ->
[Case|add_init_and_end_per_suite(Cases, LastMod, LastRef, FwMod)];
@@ -2473,7 +2490,7 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
file:set_cwd(filename:dirname(get(test_server_dir))),
After = ?now,
Before = get(test_server_parallel_start_time),
- Elapsed = elapsed_time(Before, After)/1000000,
+ Elapsed = timer:now_diff(After, Before)/1000000,
put(test_server_total_time, Elapsed),
{false,tl(Mode0),undefined,Elapsed,
update_status(Ref, OkSkipFail, Status)};
@@ -2482,7 +2499,7 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
%% parallel group (io buffering is active)
OkSkipFail = wait_for_cases(Ref),
queue_test_case_io(Ref, self(), 0, Mod, Func),
- Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
+ Elapsed = timer:now_diff(?now, conf_start(Ref, Mode0))/1000000,
case CurrIOHandler of
{Ref,_} ->
%% current_io_handler was set by start conf of this
@@ -2499,12 +2516,12 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
%% this is an end conf for a non-parallel group that's not
%% nested under a parallel group, so no need to buffer io
{false,tl(Mode0),undefined,
- elapsed_time(conf_start(Ref, Mode0),?now)/1000000, Status};
+ timer:now_diff(?now, conf_start(Ref, Mode0))/1000000, Status};
{Ref,_} ->
%% this is an end conf for a non-parallel group nested under
%% a parallel group (io buffering is active)
queue_test_case_io(Ref, self(), 0, Mod, Func),
- Elapsed = elapsed_time(conf_start(Ref, Mode0),?now)/1000000,
+ Elapsed = timer:now_diff(?now, conf_start(Ref, Mode0))/1000000,
case CurrIOHandler of
{Ref,_} ->
%% current_io_handler was set by start conf of this
@@ -2559,7 +2576,7 @@ run_test_cases_loop([{conf,Ref,Props,{Mod,Func}}|_Cases]=Cs0,
%% 1. check the TS_RANDOM_SEED env variable
%% 2. check random_seed in process state
%% 3. use value provided with shuffle option
- %% 4. use now() values for seed
+ %% 4. use timestamp() values for seed
case os:getenv("TS_RANDOM_SEED") of
Undef when Undef == false ; Undef == "undefined" ->
case get(test_server_random_seed) of
@@ -3071,13 +3088,11 @@ print_conf_time(ConfTime) ->
print(major, "=group_time ~.3fs", [ConfTime]),
print(minor, "~n=== Total execution time of group: ~.3fs~n", [ConfTime]).
-print_props(_, []) ->
+print_props([]) ->
ok;
-print_props(true, Props) ->
+print_props(Props) ->
print(major, "=group_props ~p", [Props]),
- print(minor, "Group properties: ~p~n", [Props]);
-print_props(_, _) ->
- ok.
+ print(minor, "Group properties: ~p~n", [Props]).
%% repeat N times: {repeat,N}
%% repeat N times or until all successful: {repeat_until_all_ok,N}
@@ -3682,7 +3697,6 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit,
print(major, "=case ~w:~w", [Mod, Func]),
MinorName = start_minor_log_file(Mod, Func, self() /= Main),
- print(minor, "<a name=\"top\"></a>", [], internal_raw),
MinorBase = filename:basename(MinorName),
print(major, "=logfile ~ts", [filename:basename(MinorName)]),
@@ -3696,8 +3710,8 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit,
RunDir = filename:dirname(MinorName),
Ext =
if Num == 0 ->
- {_,S,Us} = now(),
- lists:flatten(io_lib:format(".~w.~w", [S,Us]));
+ Nr = erlang:unique_integer([positive]),
+ lists:flatten(io_lib:format(".~w", [Nr]));
true ->
lists:flatten(io_lib:format(".~w", [Num]))
end,
@@ -3715,7 +3729,20 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit,
[tc_start,{{Mod,{Func,GrName}},
MinorName}]),
- print_props((RunInit==skip_init), get_props(Mode)),
+ {ok,Cwd} = file:get_cwd(),
+ Args2Print = if is_list(UpdatedArgs) ->
+ lists:keydelete(tc_group_result, 1, UpdatedArgs);
+ true ->
+ UpdatedArgs
+ end,
+ if RunInit == skip_init ->
+ print_props(get_props(Mode));
+ true ->
+ ok
+ end,
+ print(minor, "Config value:\n\n ~tp\n", [Args2Print]),
+ print(minor, "Current directory is ~tp\n", [Cwd]),
+
GrNameStr = case GrName of
undefined -> "";
Name -> cast_to_list(Name)
@@ -3924,8 +3951,8 @@ progress(skip, CaseNum, Mod, Func, GrName, Loc, Reason, Time,
"<td>~ts~ts</td></tr>\n",
[Time,Color,ReasonStr2,Comment1]),
FormatLoc = test_server_sup:format_loc(Loc),
- print(minor, "=== location ~ts", [FormatLoc]),
- print(minor, "=== reason = ~ts", [ReasonStr1]),
+ print(minor, "=== Location: ~ts", [FormatLoc]),
+ print(minor, "=== Reason: ~ts", [ReasonStr1]),
Ret;
progress(failed, CaseNum, Mod, Func, GrName, Loc, timetrap_timeout, T,
@@ -3950,8 +3977,8 @@ progress(failed, CaseNum, Mod, Func, GrName, Loc, timetrap_timeout, T,
"<td>~ts</td></tr>\n",
[T/1000,Comment]),
FormatLoc = test_server_sup:format_loc(Loc),
- print(minor, "=== location ~ts", [FormatLoc]),
- print(minor, "=== reason = timetrap timeout", []),
+ print(minor, "=== Location: ~ts", [FormatLoc]),
+ print(minor, "=== Reason: timetrap timeout", []),
failed;
progress(failed, CaseNum, Mod, Func, GrName, Loc, {testcase_aborted,Reason}, _T,
@@ -3976,13 +4003,13 @@ progress(failed, CaseNum, Mod, Func, GrName, Loc, {testcase_aborted,Reason}, _T,
"<td>~ts</td></tr>\n",
[Comment]),
FormatLoc = test_server_sup:format_loc(Loc),
- print(minor, "=== location ~ts", [FormatLoc]),
- print(minor, "=== reason = {testcase_aborted,~p}", [Reason]),
+ print(minor, "=== Location: ~ts", [FormatLoc]),
+ print(minor, "=== Reason: {testcase_aborted,~p}", [Reason]),
failed;
progress(failed, CaseNum, Mod, Func, GrName, unknown, Reason, Time,
Comment0, {St0,St1}) ->
- print(major, "=result failed: ~p, ~w", [Reason,unknown]),
+ print(major, "=result failed: ~p, ~w", [Reason,unknown_location]),
print(1, "*** FAILED ~ts ***",
[get_info_str(Mod,Func, CaseNum, get(test_server_cases))]),
test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName},
@@ -4011,14 +4038,21 @@ progress(failed, CaseNum, Mod, Func, GrName, unknown, Reason, Time,
"<td><font color=\"red\">FAILED</font></td>"
"<td>~ts</td></tr>\n",
[TimeStr,Comment]),
- print(minor, "=== location ~w", [unknown]),
+ print(minor, "=== Location: ~w", [unknown]),
{FStr,FormattedReason} = format_exception(Reason),
- print(minor, "=== reason = " ++ FStr, [FormattedReason]),
+ print(minor, "=== Reason: " ++ FStr, [FormattedReason]),
failed;
progress(failed, CaseNum, Mod, Func, GrName, Loc, Reason, Time,
Comment0, {St0,St1}) ->
- print(major, "=result failed: ~p, ~p", [Reason,Loc]),
+ {LocMaj,LocMin} = if Func == error_in_suite ->
+ case get_fw_mod(undefined) of
+ Mod -> {unknown_location,unknown};
+ _ -> {Loc,Loc}
+ end;
+ true -> {Loc,Loc}
+ end,
+ print(major, "=result failed: ~p, ~p", [Reason,LocMaj]),
print(1, "*** FAILED ~ts ***",
[get_info_str(Mod,Func, CaseNum, get(test_server_cases))]),
test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName},
@@ -4031,16 +4065,16 @@ progress(failed, CaseNum, Mod, Func, GrName, Loc, Reason, Time,
"" -> "";
_ -> xhtml("<br>","<br />") ++ to_string(Comment0)
end,
- FormatLastLoc = test_server_sup:format_loc(get_last_loc(Loc)),
+ FormatLastLoc = test_server_sup:format_loc(get_last_loc(LocMaj)),
print(html,
"<td>" ++ St0 ++ "~ts" ++ St1 ++ "</td>"
"<td><font color=\"red\">FAILED</font></td>"
"<td><font color=\"red\">~ts</font>~ts</td></tr>\n",
[TimeStr,FormatLastLoc,Comment]),
- FormatLoc = test_server_sup:format_loc(Loc),
- print(minor, "=== location ~ts", [FormatLoc]),
+ FormatLoc = test_server_sup:format_loc(LocMin),
+ print(minor, "=== Location: ~ts", [FormatLoc]),
{FStr,FormattedReason} = format_exception(Reason),
- print(minor, "=== reason = " ++ FStr, [FormattedReason]),
+ print(minor, "=== Reason: " ++ FStr, [FormattedReason]),
failed;
progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time,
@@ -4069,7 +4103,7 @@ progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time,
"<td><font color=\"green\">Ok</font></td>"
"~ts</tr>\n",
[Time,Comment]),
- print(minor, "=== returned value = ~p", [RetVal]),
+ print(minor, "=== Returned value: ~p", [RetVal]),
ok.
%%--------------------------------------------------------------------
@@ -4657,10 +4691,10 @@ collect_cases({make,InitMFA,CaseList,FinMFA}, St0, Mode) ->
collect_cases({Module, Cases}, St, Mode) when is_list(Cases) ->
case (catch collect_case(Cases, St#cc{mod=Module}, [], Mode)) of
- {ok, NewCases, NewSt} ->
- {ok, NewCases, NewSt};
+ Result = {ok,_,_} ->
+ Result;
Other ->
- {error, Other}
+ {error,Other}
end;
collect_cases({_Mod,_Case}=Spec, St, Mode) ->
@@ -4678,9 +4712,9 @@ collect_case({Mod,{conf,_,_,_,_}=Conf}, St, Mode) ->
collect_case(MFA, St, Mode) ->
case in_skip_list(MFA, St#cc.skip) of
- {true,Comment} ->
+ {true,Comment} when Comment /= make_failed ->
{ok,[{skip_case,{MFA,Comment},Mode}],St};
- false ->
+ _ ->
case MFA of
{Mod,Case} -> collect_case_invoke(Mod, Case, MFA, St, Mode);
{_Mod,_Case,_Args} -> {ok,[MFA],St}
@@ -4742,17 +4776,25 @@ collect_case_subcases(Mod, Case, SubCases, St0, Mode) ->
collect_files(Dir, Pattern, St, Mode) ->
{ok,Cwd} = file:get_cwd(),
Dir1 = filename:join(Cwd, Dir),
- Wc = filename:join([Dir1,Pattern++code:objfile_extension()]),
+ Wc = filename:join([Dir1,Pattern++"{.erl,"++code:objfile_extension()++"}"]),
case catch filelib:wildcard(Wc) of
{'EXIT', Reason} ->
io:format("Could not collect files: ~p~n", [Reason]),
{error,{collect_fail,Dir,Pattern}};
- Mods0 ->
- Mods = [{path_to_module(Mod),all} || Mod <- lists:sort(Mods0)],
- collect_cases(Mods, St, Mode)
+ Files ->
+ %% convert to module names and remove duplicates
+ Mods = lists:foldl(fun(File, Acc) ->
+ Mod = fullname_to_mod(File),
+ case lists:member(Mod, Acc) of
+ true -> Acc;
+ false -> [Mod | Acc]
+ end
+ end, [], Files),
+ Tests = [{Mod,all} || Mod <- lists:sort(Mods)],
+ collect_cases(Tests, St, Mode)
end.
-path_to_module(Path) when is_list(Path) ->
+fullname_to_mod(Path) when is_list(Path) ->
%% If this is called with a binary, then we are probably in +fnu
%% mode and have found a beam file with name encoded as latin1. We
%% will let this crash since it can not work to load such a module
diff --git a/lib/test_server/src/test_server_node.erl b/lib/test_server/src/test_server_node.erl
index acd47788db..9d87eca07e 100644
--- a/lib/test_server/src/test_server_node.erl
+++ b/lib/test_server/src/test_server_node.erl
@@ -618,9 +618,8 @@ do_quote_progname([Prog,Arg|Args]) ->
end.
random_element(L) ->
- {A,B,C} = now(),
- E = lists:sum([A,B,C]) rem length(L),
- lists:nth(E+1, L).
+ random:seed(os:timestamp()),
+ lists:nth(random:uniform(length(L)), L).
find_release(latest) ->
"/usr/local/otp/releases/latest/bin/erl";
diff --git a/lib/test_server/src/test_server_sup.erl b/lib/test_server/src/test_server_sup.erl
index 96e369a138..7d92bc902a 100644
--- a/lib/test_server/src/test_server_sup.erl
+++ b/lib/test_server/src/test_server_sup.erl
@@ -61,33 +61,37 @@ timetrap(Timeout0, ReportTVal, Scale, Pid) ->
TruncTO = trunc(Timeout),
receive
after TruncTO ->
- case is_process_alive(Pid) of
- true ->
- TimeToReport = if Timeout0 == ReportTVal -> TruncTO;
- true -> ReportTVal end,
- MFLs = test_server:get_loc(Pid),
- Mon = erlang:monitor(process, Pid),
- Trap = {timetrap_timeout,TimeToReport,MFLs},
- exit(Pid, Trap),
- receive
- {'DOWN', Mon, process, Pid, _} ->
- ok
- after 10000 ->
- %% Pid is probably trapping exits, hit it harder...
- catch error_logger:warning_msg(
- "Testcase process ~w not "
- "responding to timetrap "
- "timeout:~n"
- " ~p.~n"
- "Killing testcase...~n",
- [Pid, Trap]),
- exit(Pid, kill)
- end;
- false ->
+ kill_the_process(Pid, Timeout0, TruncTO, ReportTVal)
+ end.
+
+kill_the_process(Pid, Timeout0, TruncTO, ReportTVal) ->
+ case is_process_alive(Pid) of
+ true ->
+ TimeToReport = if Timeout0 == ReportTVal -> TruncTO;
+ true -> ReportTVal end,
+ MFLs = test_server:get_loc(Pid),
+ Mon = erlang:monitor(process, Pid),
+ Trap = {timetrap_timeout,TimeToReport,MFLs},
+ exit(Pid, Trap),
+ receive
+ {'DOWN', Mon, process, Pid, _} ->
ok
- end
+ after 10000 ->
+ %% Pid is probably trapping exits, hit it harder...
+ catch error_logger:warning_msg(
+ "Testcase process ~w not "
+ "responding to timetrap "
+ "timeout:~n"
+ " ~p.~n"
+ "Killing testcase...~n",
+ [Pid, Trap]),
+ exit(Pid, kill)
+ end;
+ false ->
+ ok
end.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% timetrap_cancel(Handle) -> ok
%% Handle = term()
@@ -121,14 +125,8 @@ messages_get(Msgs) ->
end.
timecall(M, F, A) ->
- Befor = erlang:now(),
- Val = apply(M, F, A),
- After = erlang:now(),
- Elapsed =
- (element(1,After)*1000000+element(2,After)+element(3,After)/1000000)-
- (element(1,Befor)*1000000+element(2,Befor)+element(3,Befor)/1000000),
- {Elapsed, Val}.
-
+ {Elapsed, Val} = timer:tc(M, F, A),
+ {Elapsed / 1000000, Val}.
call_crash(Time,Crash,M,F,A) ->
@@ -812,10 +810,19 @@ format_loc1({Mod,Func,Line}) ->
case {lists:member(no_src, get(test_server_logopts)),
lists:reverse(ModStr)} of
{false,[$E,$T,$I,$U,$S,$_|_]} ->
- io_lib:format("{~w,~w,<a href=\"~ts~ts#~w\">~w</a>}",
+ Link = if is_integer(Line) ->
+ integer_to_list(Line);
+ Line == last_expr ->
+ list_to_atom(atom_to_list(Func)++"-last_expr");
+ is_atom(Line) ->
+ atom_to_list(Line);
+ true ->
+ Line
+ end,
+ io_lib:format("{~w,~w,<a href=\"~ts~ts#~s\">~w</a>}",
[Mod,Func,
test_server_ctrl:uri_encode(downcase(ModStr)),
- ?src_listing_ext,Line,Line]);
+ ?src_listing_ext,Link,Line]);
_ ->
io_lib:format("{~w,~w,~w}",[Mod,Func,Line])
end.
@@ -874,9 +881,8 @@ unique_name() ->
util_loop(State) ->
receive
{From,unique_name} ->
- {_,S,Us} = now(),
- Ms = trunc(Us/1000),
- Name = lists:flatten(io_lib:format("~w.~w", [S,Ms])),
+ Nr = erlang:unique_integer([positive]),
+ Name = integer_to_list(Nr),
if Name == State#util_state.latest_name ->
timer:sleep(1),
self() ! {From,unique_name},
diff --git a/lib/test_server/src/ts.erl b/lib/test_server/src/ts.erl
index d6d2e865e2..85f97656ff 100644
--- a/lib/test_server/src/ts.erl
+++ b/lib/test_server/src/ts.erl
@@ -24,15 +24,20 @@
-module(ts).
--export([run/0, run/1, run/2, run/3, run/4, run/5,
- tests/0, tests/1,
+-export([cl_run/1,
+ run/0, run/1, run/2, run/3, run/4, run/5,
+ run_category/1, run_category/2, run_category/3,
+ tests/0, tests/1, suites/1, categories/1,
install/0, install/1,
- bench/0, bench/1, bench/2, benchmarks/0,
- smoke_test/0, smoke_test/1,smoke_test/2, smoke_tests/0,
estone/0, estone/1,
cross_cover_analyse/1,
compile_testcases/0, compile_testcases/1,
help/0]).
+
+%% Functions kept for backwards compatibility
+-export([bench/0, bench/1, bench/2, benchmarks/0,
+ smoke_test/0, smoke_test/1,smoke_test/2, smoke_tests/0]).
+
-export([i/0, l/1, r/0, r/1, r/2, r/3]).
%%%----------------------------------------------------------------------
@@ -82,10 +87,13 @@
-define(
install_help,
[
- " ts:install() - Install TS with no Options.\n"
- " ts:install([Options]) - Install TS with Options\n"
+ " ts:install()\n",
+ " Install ts with no options.\n",
+ "\n",
+ " ts:install(Options)\n",
+ " Install ts with a list of options, see below.\n",
"\n",
- "Installation options supported:\n",
+ "Installation options supported:\n\n",
" {longnames, true} - Use fully qualified hostnames\n",
" {verbose, Level} - Sets verbosity level for TS output (0,1,2), 0 is\n"
" quiet(default).\n"
@@ -110,21 +118,64 @@ help() ->
end.
help(uninstalled) ->
- H = ["TS is not installed yet. To install use:\n\n"],
+ H = ["ts is not yet installed. To install use:\n\n"],
show_help([H,?install_help]);
help(installed) ->
- H = ["Run functions:\n",
- " ts:run() - Run all available tests.\n",
- " ts:run(Spec) - Run all tests in given test spec file.\n",
- " The spec file is actually ../*_test/Spec.spec\n",
- " ts:run([Specs]) - Run all tests in all given test spec files.\n",
- " ts:run(Spec, Mod) - Run a single test suite.\n",
- " ts:run(Spec, Mod, Case)\n",
- " - Run a single test case.\n",
- " All above run functions can have an additional Options argument\n",
- " which is a list of options.\n",
+ H = ["\n",
+ "Run functions:\n\n",
+ " ts:run()\n",
+ " Run the tests for all apps. The tests are defined by the\n",
+ " main test specification for each app: ../App_test/App.spec.\n",
+ "\n",
+ " ts:run(Apps)\n",
+ " Apps = atom() | [atom()]\n",
+ " Run the tests for an app, or set of apps. The tests are\n",
+ " defined by the main test specification for each app:\n",
+ " ../App_test/App.spec.\n",
+ "\n",
+ " ts:run(App, Suites)\n",
+ " App = atom(), Suites = atom() | [atom()]\n",
+ " Run one or more test suites for App (i.e. modules named\n",
+ " *_SUITE.erl, located in ../App_test/).\n",
+ "\n",
+ " ts:run(App, Suite, TestCases)\n",
+ " App = atom(), Suite = atom(),\n",
+ " TestCases = TCs | {testcase,TCs}, TCs = atom() | [atom()]\n",
+ " Run one or more test cases (functions) in Suite.\n",
+ "\n",
+ " ts:run(App, Suite, {group,Groups})\n",
+ " App = atom(), Suite = atom(), Groups = atom() | [atom()]\n",
+ " Run one or more test case groups in Suite.\n",
+ "\n",
+ " ts:run(App, Suite, {group,Group}, {testcase,TestCases})\n",
+ " App = atom(), Suite = atom(), Group = atom(),\n",
+ " TestCases = atom() | [atom()]\n",
+ " Run one or more test cases in a test case group in Suite.\n",
+ "\n",
+ " ts:run_category(TestCategory)\n",
+ " TestCategory = smoke | essential | bench | atom()\n",
+ " Run the specified category of tests for all apps.\n",
+ " For each app, the tests are defined by the specification:\n",
+ " ../App_test/App_TestCategory.spec.\n",
+ "\n",
+ " ts:run_category(Apps, TestCategory)\n",
+ " Apps = atom() | [atom()],\n",
+ " TestCategory = smoke | essential | bench | atom()\n",
+ " Run the specified category of tests for the given app or apps.\n",
+ "\n",
+ " Note that the test category parameter may have arbitrary value,\n",
+ " but should correspond to an existing test specification with file\n",
+ " name: ../App_test/App_TestCategory.spec.\n",
+ " Predefined categories exist for smoke tests, essential tests and\n",
+ " benchmark tests. The corresponding specs are:\n",
+ " ../*_test/Spec_smoke.spec, ../*_test/Spec_essential.spec and\n",
+ " ../*_test/Spec_bench.spec.\n",
"\n",
- "Run options supported:\n",
+ " All above run functions can take an additional last argument,\n",
+ " Options, which is a list of options (e.g. ts:run(App, Options),\n",
+ " or ts:run_category(Apps, TestCategory, Options)).\n",
+ "\n",
+ "Run options supported:\n\n",
" batch - Do not start a new xterm\n",
" {verbose, Level} - Same as the verbosity option for install\n",
" verbose - Same as {verbose, 1}\n",
@@ -143,47 +194,46 @@ help(installed) ->
" files are. The default location is\n"
" tests/test_server/.\n"
"\n",
- "Supported trace information elements\n",
+ "Supported trace information elements:\n\n",
" {tp | tpl, Mod, [] | match_spec()}\n",
" {tp | tpl, Mod, Func, [] | match_spec()}\n",
" {tp | tpl, Mod, Func, Arity, [] | match_spec()}\n",
" {ctp | ctpl, Mod}\n",
" {ctp | ctpl, Mod, Func}\n",
" {ctp | ctpl, Mod, Func, Arity}\n",
+ "\n\n",
+ "Support functions:\n\n",
+ " ts:tests()\n",
+ " Returns all apps available for testing.\n",
+ "\n",
+ " ts:tests(TestCategory)\n",
+ " Returns all apps that provide tests in the given category.\n",
+ "\n",
+ " ts:suites(App)\n",
+ " Returns all available test suites for App,\n",
+ " i.e. ../App_test/*_SUITE.erl\n",
+ "\n",
+ " ts:categories(App)\n",
+ " Returns all test categories available for App.\n",
+ "\n",
+ " ts:estone()\n",
+ " Runs estone_SUITE in the kernel application with no run options\n",
"\n",
- "Support functions:\n",
- " ts:tests() - Shows all available families of tests.\n",
- " ts:tests(Spec) - Shows all available test modules in Spec,\n",
- " i.e. ../Spec_test/*_SUITE.erl\n",
- " ts:estone() - Run estone_SUITE in kernel application with\n"
- " no run options\n",
- " ts:estone(Opts) - Run estone_SUITE in kernel application with\n"
- " the given run options\n",
- " ts:cross_cover_analyse(Level)\n"
- " - Used after ts:run with option cover or \n"
- " cover_details. Analyses modules specified with\n"
- " a 'cross' statement in the cover spec file.\n"
- " Level can be 'overview' or 'details'.\n",
- " ts:compile_testcases()~n"
- " ts:compile_testcases(Apps)~n"
- " - Compile all testcases for usage in a cross ~n"
- " compile environment."
- " \n"
- "Benchmark functions:\n"
- " ts:benchmarks() - Get all available families of benchmarks\n"
- " ts:bench() - Runs all benchmarks\n"
- " ts:bench(Spec) - Runs all benchmarks in the given spec file.\n"
- " The spec file is actually ../*_test/Spec_bench.spec\n\n"
- " ts:bench can take the same Options argument as ts:run.\n"
- "Smoke test functions:\n"
- " ts:smoke_tests() - Get all available families of smoke tests\n"
- " ts:smoke_test() - Runs all smoke tests\n"
- " ts:smoke_test(Spec)\n"
- " - Runs all smoke tests in the given spec file.\n"
- " The spec file is actually ../*_test/Spec_smoke.spec\n\n"
- " ts:smoke_test can take the same Options argument as ts:run.\n"
- "\n"
- "Installation (already done):\n"
+ " ts:estone(Opts)\n",
+ " Runs estone_SUITE in the kernel application with the given\n",
+ " run options\n",
+ "\n",
+ " ts:cross_cover_analyse(Level)\n",
+ " Use after ts:run with option cover or cover_details. Analyses\n",
+ " modules specified with a 'cross' statement in the cover spec file.\n",
+ " Level can be 'overview' or 'details'.\n",
+ "\n",
+ " ts:compile_testcases()\n",
+ " ts:compile_testcases(Apps)\n",
+ " Compiles all test cases for the given apps, for usage in a\n",
+ " cross compilation environment.\n",
+ "\n\n",
+ "Installation (already done):\n\n"
],
show_help([H,?install_help]).
@@ -212,86 +262,138 @@ run_all(_Vars) ->
run_some([], _Opts) ->
ok;
-run_some([{Spec,Mod}|Specs], Opts) ->
- case run(Spec, Mod, Opts) of
+run_some(Apps, Opts) ->
+ case proplists:get_value(test_category, Opts) of
+ bench ->
+ check_and_run(fun(Vars) -> ts_benchmark:run(Apps, Opts, Vars) end);
+ _Other ->
+ run_some1(Apps, Opts)
+ end.
+
+run_some1([], _Opts) ->
+ ok;
+run_some1([{App,Mod}|Apps], Opts) ->
+ case run(App, Mod, Opts) of
ok -> ok;
- Error -> io:format("~p: ~p~n",[{Spec,Mod},Error])
+ Error -> io:format("~p: ~p~n",[{App,Mod},Error])
end,
- run_some(Specs, Opts);
-run_some([Spec|Specs], Opts) ->
- case run(Spec, Opts) of
+ run_some1(Apps, Opts);
+run_some1([App|Apps], Opts) ->
+ case run(App, Opts) of
ok -> ok;
- Error -> io:format("~p: ~p~n",[Spec,Error])
+ Error -> io:format("~p: ~p~n",[App,Error])
end,
- run_some(Specs, Opts).
+ run_some1(Apps, Opts).
+
+%% This can be used from command line. Both App and
+%% TestCategory must be specified. App may be 'all'
+%% and TestCategory may be 'main'. Examples:
+%% erl -s ts cl_run kernel smoke <options>
+%% erl -s ts cl_run kernel main <options>
+%% erl -s ts cl_run all essential <options>
+%% erl -s ts cl_run all main <options>
+%% When using the 'main' category and running with cover,
+%% one can also use the cross_cover_analysis flag.
+cl_run([App,Cat|Options0]) when is_atom(App) ->
-%% Runs one test spec (interactive).
-run(Testspec) when is_atom(Testspec) ->
- Options=check_test_get_opts(Testspec, []),
- File = atom_to_list(Testspec),
- run_test(File, [{spec,[File++".spec"]}], Options);
-
-%% This can be used from command line, e.g.
-%% erl -s ts run all_tests <config>
-%% When using the all_tests flag and running with cover, one can also
-%% use the cross_cover_analysis flag.
-run([all_tests|Config0]) ->
AllAtomsFun = fun(X) when is_atom(X) -> true;
(_) -> false
end,
- Config1 =
- case lists:all(AllAtomsFun,Config0) of
+ Options1 =
+ case lists:all(AllAtomsFun, Options0) of
true ->
%% Could be from command line
- lists:map(fun(Conf)->to_erlang_term(Conf) end,Config0)--[batch];
+ lists:map(fun(Opt) ->
+ to_erlang_term(Opt)
+ end, Options0) -- [batch];
false ->
- Config0--[batch]
+ Options0 -- [batch]
end,
%% Make sure there is exactly one occurence of 'batch'
- Config2 = [batch|Config1],
-
- R = run(tests(),Config2),
-
- case check_for_cross_cover_analysis_flag(Config2) of
+ Options2 = [batch|Options1],
+
+ Result =
+ case {App,Cat} of
+ {all,main} ->
+ run(tests(), Options2);
+ {all,Cat} ->
+ run_category(Cat, Options2);
+ {_,main} ->
+ run(App, Options2);
+ {_,Cat} ->
+ run_category(App, Cat, Options2)
+ end,
+ case check_for_cross_cover_analysis_flag(Options2) of
false ->
ok;
Level ->
cross_cover_analyse(Level)
end,
+ Result.
- R;
+%% run/1
+%% Runs tests for one app (interactive).
+run(App) when is_atom(App) ->
+ Options = check_test_get_opts(App, []),
+ File = atom_to_list(App),
+ run_test(File, [{spec,[File++".spec"]},{allow_user_terms,true}], Options);
-%% ts:run(ListOfTests)
-run(List) when is_list(List) ->
- run(List, [batch]).
-
-run(List, Opts) when is_list(List), is_list(Opts) ->
- run_some(List, Opts);
+%% This can be used from command line, e.g.
+%% erl -s ts run all <options>
+%% erl -s ts run main <options>
+run([all,main|Opts]) ->
+ cl_run([all,main|Opts]);
+run([all|Opts]) ->
+ cl_run([all,main|Opts]);
+run([main|Opts]) ->
+ cl_run([all,main|Opts]);
+%% Backwards compatible
+run([all_tests|Opts]) ->
+ cl_run([all,main|Opts]);
+
+%% run/1
+%% Runs the main tests for all available apps
+run(Apps) when is_list(Apps) ->
+ run(Apps, [batch]).
%% run/2
-%% Runs one test spec with list of suites or with options
-run(Testspec, ModsOrConfig) when is_atom(Testspec),
- is_list(ModsOrConfig) ->
- case is_list_of_suites(ModsOrConfig) of
+%% Runs the main tests for all available apps
+run(Apps, Opts) when is_list(Apps), is_list(Opts) ->
+ run_some(Apps, Opts);
+
+%% Runs tests for one app with list of suites or with options
+run(App, ModsOrOpts) when is_atom(App),
+ is_list(ModsOrOpts) ->
+ case is_list_of_suites(ModsOrOpts) of
false ->
- run(Testspec, {config_list,ModsOrConfig});
+ run(App, {opts_list,ModsOrOpts});
true ->
- run_some([{Testspec,M} || M <- ModsOrConfig],
+ run_some([{App,M} || M <- ModsOrOpts],
[batch])
end;
-run(Testspec, {config_list,Config}) ->
- Options=check_test_get_opts(Testspec, Config),
- IsSmoke=proplists:get_value(smoke,Config),
- File=atom_to_list(Testspec),
+
+run(App, {opts_list,Opts}) ->
+ Options = check_test_get_opts(App, Opts),
+ File = atom_to_list(App),
+
+ %% check if other test category than main has been specified
+ {CatSpecName,TestCat} =
+ case proplists:get_value(test_category, Opts) of
+ undefined ->
+ {"",main};
+ Cat ->
+ {"_" ++ atom_to_list(Cat),Cat}
+ end,
+
WhatToDo =
- case Testspec of
+ case App of
%% Known to exist but fails generic tests below
emulator -> test;
system -> test;
erl_interface -> test;
epmd -> test;
_ ->
- case code:lib_dir(Testspec) of
+ case code:lib_dir(App) of
{error,bad_name} ->
%% Application does not exist
skip;
@@ -313,92 +415,167 @@ run(Testspec, {config_list,Config}) ->
end
end
end,
- Spec =
- case WhatToDo of
- skip ->
- create_skip_spec(Testspec, tests(Testspec));
- test when IsSmoke ->
- File++"_smoke.spec";
- test ->
- File++".spec"
- end,
- run_test(File, [{spec,[Spec]}], Options);
-%% Runs one module in a spec (interactive)
-run(Testspec, Mod) when is_atom(Testspec), is_atom(Mod) ->
- run_test({atom_to_list(Testspec),Mod},
+ case WhatToDo of
+ skip ->
+ SkipSpec = create_skip_spec(App, suites(App)),
+ run_test(File, [{spec,[SkipSpec]}], Options);
+ test when TestCat == bench ->
+ check_and_run(fun(Vars) ->
+ ts_benchmark:run([App], Options, Vars)
+ end);
+ test ->
+ Spec = File ++ CatSpecName ++ ".spec",
+ run_test(File, [{spec,[Spec]},{allow_user_terms,true}], Options)
+ end;
+
+%% Runs one module for an app (interactive)
+run(App, Mod) when is_atom(App), is_atom(Mod) ->
+ run_test({atom_to_list(App),Mod},
[{suite,Mod}],
[interactive]).
%% run/3
-%% Run one module in a spec with Config
-run(Testspec, Mod, Config) when is_atom(Testspec),
- is_atom(Mod),
- is_list(Config) ->
- Options=check_test_get_opts(Testspec, Config),
- run_test({atom_to_list(Testspec),Mod},
+%% Run one module for an app with Opts
+run(App, Mod, Opts) when is_atom(App),
+ is_atom(Mod),
+ is_list(Opts) ->
+ Options = check_test_get_opts(App, Opts),
+ run_test({atom_to_list(App),Mod},
[{suite,Mod}], Options);
-%% Run multiple modules with Config
-run(Testspec, Mods, Config) when is_atom(Testspec),
- is_list(Mods),
- is_list(Config) ->
- run_some([{Testspec,M} || M <- Mods], Config);
+
+%% Run multiple modules with Opts
+run(App, Mods, Opts) when is_atom(App),
+ is_list(Mods),
+ is_list(Opts) ->
+ run_some([{App,M} || M <- Mods], Opts);
+
%% Runs one test case in a module.
-run(Testspec, Mod, Case) when is_atom(Testspec),
- is_atom(Mod),
- is_atom(Case) ->
- Options=check_test_get_opts(Testspec, []),
+run(App, Mod, Case) when is_atom(App),
+ is_atom(Mod),
+ is_atom(Case) ->
+ Options = check_test_get_opts(App, []),
Args = [{suite,Mod},{testcase,Case}],
- run_test(atom_to_list(Testspec), Args, Options);
+ run_test(atom_to_list(App), Args, Options);
+
%% Runs one or more groups in a module.
-run(Testspec, Mod, Grs={group,_Groups}) when is_atom(Testspec),
- is_atom(Mod) ->
- Options=check_test_get_opts(Testspec, []),
+run(App, Mod, Grs={group,_Groups}) when is_atom(App),
+ is_atom(Mod) ->
+ Options = check_test_get_opts(App, []),
Args = [{suite,Mod},Grs],
- run_test(atom_to_list(Testspec), Args, Options);
+ run_test(atom_to_list(App), Args, Options);
+
%% Runs one or more test cases in a module.
-run(Testspec, Mod, TCs={testcase,_Cases}) when is_atom(Testspec),
- is_atom(Mod) ->
- Options=check_test_get_opts(Testspec, []),
+run(App, Mod, TCs={testcase,_Cases}) when is_atom(App),
+ is_atom(Mod) ->
+ Options = check_test_get_opts(App, []),
Args = [{suite,Mod},TCs],
- run_test(atom_to_list(Testspec), Args, Options).
+ run_test(atom_to_list(App), Args, Options).
%% run/4
%% Run one test case in a module with Options.
-run(Testspec, Mod, Case, Config) when is_atom(Testspec),
- is_atom(Mod),
- is_atom(Case),
- is_list(Config) ->
- Options=check_test_get_opts(Testspec, Config),
+run(App, Mod, Case, Opts) when is_atom(App),
+ is_atom(Mod),
+ is_atom(Case),
+ is_list(Opts) ->
+ Options = check_test_get_opts(App, Opts),
Args = [{suite,Mod},{testcase,Case}],
- run_test(atom_to_list(Testspec), Args, Options);
+ run_test(atom_to_list(App), Args, Options);
+
%% Run one or more test cases in a module with Options.
-run(Testspec, Mod, {testcase,Cases}, Config) when is_atom(Testspec),
- is_atom(Mod) ->
- run(Testspec, Mod, Cases, Config);
-run(Testspec, Mod, Cases, Config) when is_atom(Testspec),
- is_atom(Mod),
- is_list(Cases),
- is_list(Config) ->
- Options=check_test_get_opts(Testspec, Config),
+run(App, Mod, {testcase,Cases}, Opts) when is_atom(App),
+ is_atom(Mod) ->
+ run(App, Mod, Cases, Opts);
+run(App, Mod, Cases, Opts) when is_atom(App),
+ is_atom(Mod),
+ is_list(Cases),
+ is_list(Opts) ->
+ Options = check_test_get_opts(App, Opts),
Args = [{suite,Mod},Cases],
- run_test(atom_to_list(Testspec), Args, Options);
+ run_test(atom_to_list(App), Args, Options);
+
+%% Run one or more test cases in a group.
+run(App, Mod, Gr={group,_Group}, {testcase,Cases}) when is_atom(App),
+ is_atom(Mod) ->
+ run(App, Mod, Gr, Cases, [batch]);
+
+
%% Run one or more groups in a module with Options.
-run(Testspec, Mod, Grs={group,_Groups}, Config) when is_atom(Testspec),
- is_atom(Mod) ->
- Options=check_test_get_opts(Testspec, Config),
+run(App, Mod, Grs={group,_Groups}, Opts) when is_atom(App),
+ is_atom(Mod),
+ is_list(Opts) ->
+ Options = check_test_get_opts(App, Opts),
Args = [{suite,Mod},Grs],
- run_test(atom_to_list(Testspec), Args, Options).
+ run_test(atom_to_list(App), Args, Options).
%% run/5
%% Run one or more test cases in a group with Options.
-run(Testspec, Mod, Group, Cases, Config) when is_atom(Testspec),
- is_atom(Mod),
- is_list(Config) ->
+run(App, Mod, Group, Cases, Opts) when is_atom(App),
+ is_atom(Mod),
+ is_list(Opts) ->
Group1 = if is_tuple(Group) -> Group; true -> {group,Group} end,
Cases1 = if is_tuple(Cases) -> Cases; true -> {testcase,Cases} end,
- Options=check_test_get_opts(Testspec, Config),
+ Options = check_test_get_opts(App, Opts),
Args = [{suite,Mod},Group1,Cases1],
- run_test(atom_to_list(Testspec), Args, Options).
+ run_test(atom_to_list(App), Args, Options).
+
+%% run_category/1
+run_category(TestCategory) when is_atom(TestCategory) ->
+ run_category(TestCategory, [batch]).
+
+%% run_category/2
+run_category(TestCategory, Opts) when is_atom(TestCategory),
+ is_list(Opts) ->
+ case ts:tests(TestCategory) of
+ [] ->
+ {error, no_tests_available};
+ Apps ->
+ Opts1 = [{test_category,TestCategory} | Opts],
+ run_some(Apps, Opts1)
+ end;
+
+run_category(Apps, TestCategory) when is_atom(TestCategory) ->
+ run_category(Apps, TestCategory, [batch]).
+
+%% run_category/3
+run_category(App, TestCategory, Opts) ->
+ Apps = if is_atom(App) -> [App];
+ is_list(App) -> App
+ end,
+ Opts1 = [{test_category,TestCategory} | Opts],
+ run_some(Apps, Opts1).
+
+%%-----------------------------------------------------------------
+%% Functions kept for backwards compatibility
+
+bench() ->
+ run_category(bench, []).
+bench(Opts) when is_list(Opts) ->
+ run_category(bench, Opts);
+bench(App) ->
+ run_category(App, bench, []).
+bench(App, Opts) when is_atom(App) ->
+ run_category(App, bench, Opts);
+bench(Apps, Opts) when is_list(Apps) ->
+ run_category(Apps, bench, Opts).
+
+benchmarks() ->
+ tests(bench).
+
+smoke_test() ->
+ run_category(smoke, []).
+smoke_test(Opts) when is_list(Opts) ->
+ run_category(smoke, Opts);
+smoke_test(App) ->
+ run_category(App, smoke, []).
+smoke_test(App, Opts) when is_atom(App) ->
+ run_category(App, smoke, Opts);
+smoke_test(Apps, Opts) when is_list(Apps) ->
+ run_category(Apps, smoke, Opts).
+
+smoke_tests() ->
+ tests(smoke).
+
+%%-----------------------------------------------------------------
is_list_of_suites(List) ->
lists:all(fun(Suite) ->
@@ -416,29 +593,29 @@ is_list_of_suites(List) ->
%% Create a spec to skip all SUITES, this is used when the application
%% to be tested is not part of the OTP release to be tested.
-create_skip_spec(Testspec, SuitesToSkip) ->
+create_skip_spec(App, SuitesToSkip) ->
{ok,Cwd} = file:get_cwd(),
- TestspecString = atom_to_list(Testspec),
- Specname = TestspecString++"_skip.spec",
+ AppString = atom_to_list(App),
+ Specname = AppString++"_skip.spec",
{ok,D} = file:open(filename:join([filename:dirname(Cwd),
- TestspecString++"_test",Specname]),
+ AppString++"_test",Specname]),
[write]),
- TestDir = "\"../"++TestspecString++"_test\"",
+ TestDir = "\"../"++AppString++"_test\"",
io:format(D,"{suites, "++TestDir++", all}.~n",[]),
io:format(D,"{skip_suites, "++TestDir++", ~w, \"Skipped as application"
" is not in path!\"}.",[SuitesToSkip]),
Specname.
-%% Check testspec to be valid and get possible Options
-%% from the config.
-check_test_get_opts(Testspec, Config) ->
- validate_test(Testspec),
- Mode = configmember(batch, {batch, interactive}, Config),
- Vars = configvars(Config),
- Trace = get_config(trace,Config),
- ConfigPath = get_config(config,Config),
- KeepTopcase = configmember(keep_topcase, {keep_topcase,[]}, Config),
- Cover = configcover(Testspec,Config),
+%% Check testspec for App to be valid and get possible options
+%% from the list.
+check_test_get_opts(App, Opts) ->
+ validate_test(App),
+ Mode = configmember(batch, {batch, interactive}, Opts),
+ Vars = configvars(Opts),
+ Trace = get_config(trace,Opts),
+ ConfigPath = get_config(config,Opts),
+ KeepTopcase = configmember(keep_topcase, {keep_topcase,[]}, Opts),
+ Cover = configcover(App,Opts),
lists:flatten([Vars,Mode,Trace,KeepTopcase,Cover,ConfigPath]).
to_erlang_term(Atom) ->
@@ -447,7 +624,7 @@ to_erlang_term(Atom) ->
{ok, Term} = erl_parse:parse_term(Tokens),
Term.
-%% Validate that a Testspec really is a testspec,
+%% Validate that Testspec really is a testspec,
%% and exit if not.
validate_test(Testspec) ->
case lists:member(Testspec, tests()) of
@@ -460,10 +637,10 @@ validate_test(Testspec) ->
exit(self(), {error, test_not_available})
end.
-configvars(Config) ->
- case lists:keysearch(vars, 1, Config) of
+configvars(Opts) ->
+ case lists:keysearch(vars, 1, Opts) of
{value, {vars, List}} ->
- List0 = special_vars(Config),
+ List0 = special_vars(Opts),
Key = fun(T) -> element(1,T) end,
DelDupList =
lists:filter(fun(V) ->
@@ -474,17 +651,17 @@ configvars(Config) ->
end, List),
{vars, [List0|DelDupList]};
_ ->
- {vars, special_vars(Config)}
+ {vars, special_vars(Opts)}
end.
-%% Allow some shortcuts in the Options...
-special_vars(Config) ->
+%% Allow some shortcuts in the options...
+special_vars(Opts) ->
SpecVars =
- case lists:member(verbose, Config) of
+ case lists:member(verbose, Opts) of
true ->
[{verbose, 1}];
false ->
- case lists:keysearch(verbose, 1, Config) of
+ case lists:keysearch(verbose, 1, Opts) of
{value, {verbose, Lvl}} ->
[{verbose, Lvl}];
_ ->
@@ -492,13 +669,13 @@ special_vars(Config) ->
end
end,
SpecVars1 =
- case lists:keysearch(diskless, 1, Config) of
+ case lists:keysearch(diskless, 1, Opts) of
{value,{diskless, true}} ->
[{diskless, true} | SpecVars];
_ ->
SpecVars
end,
- case lists:keysearch(testcase_callback, 1, Config) of
+ case lists:keysearch(testcase_callback, 1, Opts) of
{value,{testcase_callback, CBM, CBF}} ->
[{ts_testcase_callback, {CBM,CBF}} | SpecVars1];
{value,{testcase_callback, CB}} ->
@@ -566,50 +743,31 @@ check_for_cross_cover_analysis_flag([_|Config],Level,CrossFlag) ->
check_for_cross_cover_analysis_flag([],_,_) ->
false.
-%% Returns a list of available test suites.
+%% Returns all available apps.
tests() ->
{ok, Cwd} = file:get_cwd(),
ts_lib:specs(Cwd).
-tests(Spec) ->
+%% Returns all apps that provide tests in the given test category
+tests(main) ->
{ok, Cwd} = file:get_cwd(),
- ts_lib:suites(Cwd, atom_to_list(Spec)).
-
-%% Benchmark related functions
-
-bench() ->
- bench([]).
-
-bench(Opts) when is_list(Opts) ->
- bench(benchmarks(),Opts);
-bench(Spec) ->
- bench([Spec],[]).
-
-bench(Spec, Opts) when is_atom(Spec) ->
- bench([Spec],Opts);
-bench(Specs, Opts) ->
- check_and_run(fun(Vars) -> ts_benchmark:run(Specs, Opts, Vars) end).
-
-benchmarks() ->
- ts_benchmark:benchmarks().
-
-smoke_test() ->
- smoke_test([]).
-
-smoke_test(Opts) when is_list(Opts) ->
- smoke_test(smoke_tests(),Opts);
-smoke_test(Spec) ->
- smoke_test([Spec],[]).
-
-smoke_test(Spec, Opts) when is_atom(Spec) ->
- smoke_test([Spec],Opts);
-smoke_test(Specs, Opts) ->
- run(Specs, [{smoke,true}|Opts]).
+ ts_lib:specs(Cwd);
+tests(bench) ->
+ ts_benchmark:benchmarks();
+tests(TestCategory) ->
+ {ok, Cwd} = file:get_cwd(),
+ ts_lib:specialized_specs(Cwd, atom_to_list(TestCategory)).
+
+%% Returns a list of available test suites for App.
+suites(App) ->
+ {ok, Cwd} = file:get_cwd(),
+ ts_lib:suites(Cwd, atom_to_list(App)).
-smoke_tests() ->
+%% Returns all available test categories for App
+categories(App) ->
{ok, Cwd} = file:get_cwd(),
- ts_lib:specialized_specs(Cwd,"smoke").
+ ts_lib:test_categories(Cwd, atom_to_list(App)).
%%
%% estone/0, estone/1
diff --git a/lib/test_server/src/ts_install.erl b/lib/test_server/src/ts_install.erl
index bc62015ac3..594e619fbc 100644
--- a/lib/test_server/src/ts_install.erl
+++ b/lib/test_server/src/ts_install.erl
@@ -18,7 +18,6 @@
%%
-module(ts_install).
-
-export([install/2, platform_id/1]).
-include("ts.hrl").
@@ -135,15 +134,63 @@ unix_autoconf(XConf) ->
case filelib:is_file(Configure) of
true ->
OSXEnv = macosx_cflags(),
+ UnQuotedEnv = assign_vars(unquote(Env++OSXEnv)),
io:format("Running ~s~nEnv: ~p~n",
- [lists:flatten(Configure ++ Args),Env++OSXEnv]),
+ [lists:flatten(Configure ++ Args),UnQuotedEnv]),
Port = open_port({spawn, lists:flatten(["\"",Configure,"\"",Args])},
- [stream, eof, {env,Env++OSXEnv}]),
+ [stream, eof, {env,UnQuotedEnv}]),
ts_lib:print_data(Port);
false ->
{error, no_configure_script}
end.
+unquote([{Var,Val}|T]) ->
+ [{Var,unquote(Val)}|unquote(T)];
+unquote([]) ->
+ [];
+unquote("\""++Rest) ->
+ lists:reverse(tl(lists:reverse(Rest)));
+unquote(String) ->
+ String.
+
+assign_vars([]) ->
+ [];
+assign_vars([{VAR,FlagsStr} | VARs]) ->
+ [{VAR,assign_vars(FlagsStr)} | assign_vars(VARs)];
+assign_vars(FlagsStr) ->
+ Flags = [assign_all_vars(Str,[]) || Str <- string:tokens(FlagsStr, [$ ])],
+ string:strip(lists:flatten(lists:map(fun(Flag) ->
+ Flag ++ " "
+ end, Flags)), right).
+
+assign_all_vars([$$ | Rest], FlagSoFar) ->
+ {VarName,Rest1} = get_var_name(Rest, []),
+ assign_all_vars(Rest1, FlagSoFar ++ assign_var(VarName));
+assign_all_vars([Char | Rest], FlagSoFar) ->
+ assign_all_vars(Rest, FlagSoFar ++ [Char]);
+assign_all_vars([], Flag) ->
+ Flag.
+
+get_var_name([Ch | Rest] = Str, VarR) ->
+ case valid_char(Ch) of
+ true -> get_var_name(Rest, [Ch | VarR]);
+ false -> {lists:reverse(VarR),Str}
+ end;
+get_var_name([], VarR) ->
+ {lists:reverse(VarR),[]}.
+
+assign_var(VarName) ->
+ case os:getenv(VarName) of
+ false -> "";
+ Val -> Val
+ end.
+
+valid_char(Ch) when Ch >= $a, Ch =< $z -> true;
+valid_char(Ch) when Ch >= $A, Ch =< $Z -> true;
+valid_char(Ch) when Ch >= $0, Ch =< $9 -> true;
+valid_char($_) -> true;
+valid_char(_) -> false.
+
get_xcomp_flag(Flag, Flags) ->
get_xcomp_flag(Flag, Flag, Flags).
get_xcomp_flag(Flag, Tag, Flags) ->
diff --git a/lib/test_server/src/ts_install_cth.erl b/lib/test_server/src/ts_install_cth.erl
index 7746bbed6f..54ca69637e 100644
--- a/lib/test_server/src/ts_install_cth.erl
+++ b/lib/test_server/src/ts_install_cth.erl
@@ -238,12 +238,15 @@ generate_nodenames2(0, _Hosts, Acc) ->
Acc;
generate_nodenames2(N, Hosts, Acc) ->
Host=lists:nth((N rem (length(Hosts)))+1, Hosts),
- Name=list_to_atom(temp_nodename("nod", []) ++ "@" ++ Host),
+ Name=list_to_atom(temp_nodename("nod",N) ++ "@" ++ Host),
generate_nodenames2(N-1, Hosts, [Name|Acc]).
-temp_nodename([], Acc) ->
- lists:flatten(Acc);
-temp_nodename([Chr|Base], Acc) ->
- {A,B,C} = erlang:now(),
- New = [Chr | integer_to_list(Chr bxor A bxor B+A bxor C+B)],
- temp_nodename(Base, [New|Acc]).
+%% We cannot use erlang:unique_integer([positive])
+%% here since this code in run on older test releases as well.
+temp_nodename(Base,I) ->
+ {A,B,C} = os:timestamp(),
+ Nstr = integer_to_list(I),
+ Astr = integer_to_list(A),
+ Bstr = integer_to_list(B),
+ Cstr = integer_to_list(C),
+ Base++Nstr++Astr++Bstr++Cstr.
diff --git a/lib/test_server/src/ts_lib.erl b/lib/test_server/src/ts_lib.erl
index 5368960446..d27bc55b3a 100644
--- a/lib/test_server/src/ts_lib.erl
+++ b/lib/test_server/src/ts_lib.erl
@@ -27,7 +27,7 @@
erlang_type/1,
initial_capital/1,
specs/1, suites/2,
- specialized_specs/2,
+ test_categories/2, specialized_specs/2,
subst_file/3, subst/2, print_data/1,
make_non_erlang/2,
maybe_atom_to_list/1, progress/4,
@@ -96,26 +96,47 @@ specialized_specs(Dir,PostFix) ->
Specs = filelib:wildcard(filename:join([filename:dirname(Dir),
"*_test", "*_"++PostFix++".spec"])),
sort_tests([begin
- Base = filename:basename(Name),
- list_to_atom(string:substr(Base,1,string:rstr(Base,"_")-1))
+ DirPart = filename:dirname(Name),
+ AppTest = hd(lists:reverse(filename:split(DirPart))),
+ list_to_atom(string:substr(AppTest, 1, length(AppTest)-5))
end || Name <- Specs]).
specs(Dir) ->
Specs = filelib:wildcard(filename:join([filename:dirname(Dir),
"*_test", "*.{dyn,}spec"])),
- % Filter away all spec which end with {_bench,_smoke}.spec
- NoBench = fun(SpecName) ->
- case lists:reverse(SpecName) of
- "ceps.hcneb_"++_ -> false;
- "ceps.ekoms_"++_ -> false;
- _ -> true
- end
- end,
-
- sort_tests([filename_to_atom(Name) || Name <- Specs, NoBench(Name)]).
-
-suites(Dir, Spec) ->
- Glob=filename:join([filename:dirname(Dir), Spec++"_test",
+ %% Make sure only to include the main spec for each application
+ MainSpecs =
+ lists:flatmap(fun(FullName) ->
+ [Spec,TestDir|_] =
+ lists:reverse(filename:split(FullName)),
+ [_TestSuffix|TDParts] =
+ lists:reverse(string:tokens(TestDir,[$_,$.])),
+ [_SpecSuffix|SParts] =
+ lists:reverse(string:tokens(Spec,[$_,$.])),
+ if TDParts == SParts ->
+ [filename_to_atom(FullName)];
+ true ->
+ []
+ end
+ end, Specs),
+ sort_tests(MainSpecs).
+
+test_categories(Dir, App) ->
+ Specs = filelib:wildcard(filename:join([filename:dirname(Dir),
+ App++"_test", "*.spec"])),
+ lists:flatmap(fun(FullName) ->
+ [Spec,_TestDir|_] =
+ lists:reverse(filename:split(FullName)),
+ case filename:rootname(Spec -- App) of
+ "" ->
+ [];
+ [_Sep | Cat] ->
+ [list_to_atom(Cat)]
+ end
+ end, Specs).
+
+suites(Dir, App) ->
+ Glob=filename:join([filename:dirname(Dir), App++"_test",
"*_SUITE.erl"]),
Suites=filelib:wildcard(Glob),
[filename_to_atom(Name) || Name <- Suites].
diff --git a/lib/test_server/src/ts_make.erl b/lib/test_server/src/ts_make.erl
index 8727f7ebfe..9cb77ecb12 100644
--- a/lib/test_server/src/ts_make.erl
+++ b/lib/test_server/src/ts_make.erl
@@ -67,7 +67,17 @@ get_port_data(Port, Last0, Complete0) ->
end.
update_last([C|Rest], Line, true) ->
- io:put_chars(list_to_binary(Line)), %% Utf-8 list to utf-8 binary
+ try
+ %% Utf-8 list to utf-8 binary
+ %% (e.g. we assume utf-8 bytes from port)
+ io:put_chars(list_to_binary(Line))
+ catch
+ error:badarg ->
+ %% io:put_chars/1 badarged
+ %% this likely means we had unicode code points
+ %% in our bytes buffer (e.g warning from gcc with åäö)
+ io:put_chars(unicode:characters_to_binary(Line))
+ end,
io:nl(),
update_last([C|Rest], [], false);
update_last([$\r|Rest], Result, Complete) ->
diff --git a/lib/test_server/test/erl2html2_SUITE.erl b/lib/test_server/test/erl2html2_SUITE.erl
index 37c2b74d8e..796b84dedd 100644
--- a/lib/test_server/test/erl2html2_SUITE.erl
+++ b/lib/test_server/test/erl2html2_SUITE.erl
@@ -130,15 +130,7 @@ groups() ->
%% @end
%%--------------------------------------------------------------------
all() ->
- [m1].
-
-%%--------------------------------------------------------------------
-%% @spec TestCase() -> Info
-%% Info = [tuple()]
-%% @end
-%%--------------------------------------------------------------------
-m1() ->
- [].
+ [macros_defined, macros_undefined].
%%--------------------------------------------------------------------
%% @spec TestCase(Config0) ->
@@ -149,19 +141,29 @@ m1() ->
%% Comment = term()
%% @end
%%--------------------------------------------------------------------
-m1(Config) ->
- {Src,Dst} = convert_module("m1",Config),
+macros_defined(Config) ->
+ %% let erl2html2 use epp as parser
+ DataDir = ?config(data_dir,Config),
+ InclDir = filename:join(DataDir, "include"),
+ {Src,Dst} = convert_module("m1",[InclDir],Config),
{true,L} = check_line_numbers(Src,Dst),
- ok = check_link_targets(Src,Dst,L,[{baz,0}]),
+ ok = check_link_targets(Src,Dst,L,[{baz,0}],[]),
ok.
-convert_module(Mod,Config) ->
+macros_undefined(Config) ->
+ %% let erl2html2 use epp_dodger as parser
+ {Src,Dst} = convert_module("m1",[],Config),
+ {true,L} = check_line_numbers(Src,Dst),
+ ok = check_link_targets(Src,Dst,L,[{baz,0}],[{quux,0}]),
+ ok.
+
+convert_module(Mod,InclDirs,Config) ->
DataDir = ?config(data_dir,Config),
PrivDir = ?config(priv_dir,Config),
Src = filename:join(DataDir,Mod++".erl"),
Dst = filename:join(PrivDir,Mod++".erl.html"),
io:format("<a href=\"~s\">~s</a>\n",[Src,filename:basename(Src)]),
- ok = erl2html2:convert(Src, Dst, "<html><body>"),
+ ok = erl2html2:convert(Src, Dst, InclDirs, "<html><body>"),
io:format("<a href=\"~s\">~s</a>\n",[Dst,filename:basename(Dst)]),
{Src,Dst}.
@@ -229,36 +231,46 @@ check_line_number(Last,Line,OrigLine) ->
%% function.
%% The test module has -compile(export_all), so all functions are
%% found by listing the exported ones.
-check_link_targets(Src,Dst,L,RmFncs) ->
+check_link_targets(Src,Dst,L,RmFncs,ShouldRemain) ->
Mod = list_to_atom(filename:basename(filename:rootname(Src))),
Exports = Mod:module_info(exports)--[{module_info,0},{module_info,1}|RmFncs],
- {ok,{[],L},_} = xmerl_sax_parser:file(Dst,
- [{event_fun,fun sax_event/3},
- {event_state,{Exports,0}}]),
+ LastExprFuncs = [Func || {Func,_A} <- Exports],
+ {ok,{FAs,Fs,L},_} =
+ xmerl_sax_parser:file(Dst,
+ [{event_fun,fun sax_event/3},
+ {event_state,{Exports,LastExprFuncs,0}}]),
+ true = (length(FAs) == length(ShouldRemain)),
+ [] = [FA || FA <- FAs, not lists:member(FA,ShouldRemain)],
+ [] = [F || F <- Fs, not lists:keymember(F,1,ShouldRemain)],
ok.
sax_event(Event,_Loc,State) ->
sax_event(Event,State).
-sax_event({startElement,_Uri,"a",_QN,Attrs},{Exports,PrevLine}) ->
+sax_event({startElement,_Uri,"a",_QN,Attrs},{Exports,LastExprFuncs,PrevLine}) ->
{_,_,"name",Name} = lists:keyfind("name",3,Attrs),
case catch list_to_integer(Name) of
Line when is_integer(Line) ->
case PrevLine + 1 of
Line ->
-% erlang:display({found_line,Line}),
- {Exports,Line};
+ {Exports,LastExprFuncs,Line};
Other ->
ct:fail({unexpected_line_number_target,Other})
end;
{'EXIT',_} ->
- {match,[FStr,AStr]} =
- re:run(Name,"^(.*)-([0-9]+)$",[{capture,all_but_first,list}]),
+ {match,[FStr,EndStr]} =
+ re:run(Name,"^(.*)-(last_expr|[0-9]+)$",
+ [{capture,all_but_first,list}]),
F = list_to_atom(http_uri:decode(FStr)),
- A = list_to_integer(AStr),
-% erlang:display({found_fnc,F,A}),
- A = proplists:get_value(F,Exports),
- {lists:delete({F,A},Exports),PrevLine}
+ case EndStr of
+ "last_expr" ->
+ true = lists:member(F,LastExprFuncs),
+ {Exports,lists:delete(F,LastExprFuncs),PrevLine};
+ _ ->
+ A = list_to_integer(EndStr),
+ A = proplists:get_value(F,Exports),
+ {lists:delete({F,A},Exports),LastExprFuncs,PrevLine}
+ end
end;
sax_event(_,State) ->
State.
diff --git a/lib/test_server/test/erl2html2_SUITE_data/include/header3.hrl b/lib/test_server/test/erl2html2_SUITE_data/include/header3.hrl
new file mode 100644
index 0000000000..2a20850a3a
--- /dev/null
+++ b/lib/test_server/test/erl2html2_SUITE_data/include/header3.hrl
@@ -0,0 +1 @@
+-define(EPP_SWITCH, on).
diff --git a/lib/test_server/test/erl2html2_SUITE_data/m1.erl b/lib/test_server/test/erl2html2_SUITE_data/m1.erl
index 156f1d0a51..1d405963a5 100644
--- a/lib/test_server/test/erl2html2_SUITE_data/m1.erl
+++ b/lib/test_server/test/erl2html2_SUITE_data/m1.erl
@@ -7,9 +7,15 @@
-include("header1.hrl").
-include("header2.hrl").
+-include("header3.hrl").
-define(MACRO1,value).
+%% This macro is used to select parser in erl2html2.
+%% If EPP_SWITCH is defined epp is used, else epp_dodger.
+epp_switch() ->
+ ?EPP_SWITCH.
+
%%% Comment
foo(x) ->
%% Comment
diff --git a/lib/test_server/vsn.mk b/lib/test_server/vsn.mk
index 18d7583c35..fd9e4e6d74 100644
--- a/lib/test_server/vsn.mk
+++ b/lib/test_server/vsn.mk
@@ -1 +1 @@
-TEST_SERVER_VSN = 3.7.2
+TEST_SERVER_VSN = 3.9
diff --git a/lib/tools/doc/src/cover.xml b/lib/tools/doc/src/cover.xml
index 07ffa65e3d..914baa7977 100644
--- a/lib/tools/doc/src/cover.xml
+++ b/lib/tools/doc/src/cover.xml
@@ -5,7 +5,7 @@
<header>
<copyright>
<year>2001</year>
- <year>2013</year>
+ <year>2015</year>
<holder>Ericsson AB, All Rights Reserved</holder>
</copyright>
<legalnotice>
@@ -138,17 +138,18 @@
</desc>
</func>
<func>
- <name>compile(ModFile) -> Result</name>
- <name>compile(ModFile, Options) -> Result</name>
- <name>compile_module(ModFile) -> Result</name>
- <name>compile_module(ModFile, Options) -> Result</name>
- <fsummary>Compile a module for Cover analysis.</fsummary>
+ <name>compile(ModFiles) -> Result | [Result]</name>
+ <name>compile(ModFiles, Options) -> Result | [Result]</name>
+ <name>compile_module(ModFiles) -> Result | [Result]</name>
+ <name>compile_module(ModFiles, Options) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis.</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | File</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;File = string()</v>
<v>Options = [Option]</v>
- <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value}</v>
+ <v>&nbsp;Option = {i,Dir} | {d,Macro} | {d,Macro,Value} | export_all</v>
<d>See <c>compile:file/2.</c></d>
<v>Result = {ok,Module} | {error,File} | {error,not_main_node}</v>
</type>
@@ -165,6 +166,9 @@
returns <c>{ok,Module}</c>. Otherwise the function returns
<c>{error,File}</c>. Errors and warnings are printed as they
occur.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
<p>Note that the internal database is (re-)initiated during
the compilation, meaning any previously collected coverage data
for the module will be lost.</p>
@@ -194,9 +198,10 @@
</desc>
</func>
<func>
- <name>compile_beam(ModFile) -> Result</name>
- <fsummary>Compile a module for Cover analysis, using an existing beam.</fsummary>
+ <name>compile_beam(ModFiles) -> Result | [Result]</name>
+ <fsummary>Compile one or more modules for Cover analysis, using existing beam(s).</fsummary>
<type>
+ <v>ModFiles = ModFile | [ModFile]</v>
<v>ModFile = Module | BeamFile</v>
<v>&nbsp;Module = atom()</v>
<v>&nbsp;BeamFile = string()</v>
@@ -229,6 +234,9 @@
returned.</p>
<p><c>{error,BeamFile}</c> is returned if the compiled code
can not be loaded on the node.</p>
+ <p>If a list of <c>ModFiles</c> is given as input, a list
+ of <c>Result</c> will be returned. The order of the returned
+ list is undefined.</p>
</desc>
</func>
<func>
@@ -251,16 +259,21 @@
</desc>
</func>
<func>
- <name>analyse(Module) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Level) -> {ok,Answer} | {error,Error}</name>
- <name>analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}</name>
- <fsummary>Analyse a Cover compiled module.</fsummary>
+ <name>analyse() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Analysis, Level) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse(Modules, Analysis, Level) -> OneResult | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Analyse one or more Cover compiled modules.</fsummary>
<type>
- <v>Module = atom()</v>
+ <v>Modules = Module | [Module]</v>
+ <v>Module = atom() </v>
<v>Analysis = coverage | calls</v>
<v>Level = line | clause | function | module</v>
- <v>Answer = {Module,Value} | [{Item,Value}]</v>
+ <v>OneResult = {ok,{Module,Value}} | {ok,[{Item,Value}]} | {error, Error}</v>
<v>&nbsp;Item = Line | Clause | Function</v>
<v>&nbsp;&nbsp;Line = {M,N}</v>
<v>&nbsp;&nbsp;Clause = {M,F,A,C}</v>
@@ -269,49 +282,67 @@
<v>&nbsp;&nbsp;&nbsp;N = A = C = integer()</v>
<v>&nbsp;Value = {Cov,NotCov} | Calls</v>
<v>&nbsp;&nbsp;Cov = NotCov = Calls = integer()</v>
- <v>Error = {not_cover_compiled,Module} | not_main_node</v>
+ <v>&nbsp;Error = {not_cover_compiled,Module}</v>
+ <v>Ok = [{Module,Value}] | [{Item,Value}]</v>
+ <v>Fail = [Error]</v>
</type>
<desc>
- <p>Performs analysis of a Cover compiled module <c>Module</c>, as
+ <p>Performs analysis of one or more Cover compiled modules, as
specified by <c>Analysis</c> and <c>Level</c> (see above), by
examining the contents of the internal database.</p>
<p><c>Analysis</c> defaults to <c>coverage</c> and <c>Level</c>
defaults to <c>function</c>.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>OneResult</c>, else the return will be
+ <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a given module is not Cover compiled, this is indicated
+ by the error reason <c>{not_cover_compiled,Module}</c>.</p>
</desc>
</func>
<func>
- <name>analyse_to_file(Module) -> </name>
- <name>analyse_to_file(Module,Options) -> </name>
- <name>analyse_to_file(Module, OutFile) -> </name>
- <name>analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}</name>
- <fsummary>Detailed coverage analysis of a Cover compiled module.</fsummary>
+ <name>analyse_to_file() -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Options) -> {result,Ok,Fail} | {error,not_main_node}</name>
+ <name>analyse_to_file(Modules,Options) -> Answer | {result,Ok,Fail} | {error,not_main_node}</name>
+ <fsummary>Detailed coverage analysis of one or more Cover compiled modules.</fsummary>
<type>
+ <v>Modules = Module | [Module]</v>
<v>Module = atom()</v>
- <v>OutFile = string()</v>
+ <v>OutFile = OutDir = string()</v>
<v>Options = [Option]</v>
- <v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Option = html | {outfile,OutFile} | {outdir,OutDir}</v>
+ <v>Answer = {ok,OutFile} | {error,Error}</v>
+ <v>Ok = [OutFile]</v>
+ <v>Fail = [Error]</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module}</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
<desc>
- <p>Makes a copy <c>OutFile</c> of the source file for a module
- <c>Module</c>, where it for each executable line is specified
+ <p>Makes copies of the source file for the given modules,
+ where it for each executable line is specified
how many times it has been executed.</p>
<p>The output file <c>OutFile</c> defaults to
<c>Module.COVER.out</c>, or <c>Module.COVER.html</c> if the
option <c>html</c> was used.</p>
- <p>If <c>Module</c> is not Cover compiled, the function returns
- <c>{error,{not_cover_compiled,Module}}</c>.</p>
+ <p>If <c>Modules</c> is an atom (one module), the return will
+ be <c>Answer</c>, else the return will be a
+ list, <c>{result,Ok,Fail}</c>.</p>
+ <p>If <c>Modules</c> is not given, all modules that have data
+ in the cover data table, are analysed. Note that this
+ includes both cover compiled modules and imported
+ modules.</p>
+ <p>If a module is not Cover compiled, this is indicated by the
+ error reason <c>{not_cover_compiled,Module}</c>.</p>
<p>If the source file and/or the output file cannot be opened using
<c>file:open/2</c>, the function returns
<c>{error,{file,File,Reason}}</c> where <c>File</c> is the file
name and <c>Reason</c> is the error reason.</p>
- <p>If the module was cover compiled from the <c>.beam</c>
+ <p>If a module was cover compiled from the <c>.beam</c>
file, i.e. using <c>compile_beam/1</c> or
<c>compile_beam_directory/0,1</c>, it is assumed that the
source code can be found in the same directory as the
@@ -322,10 +353,8 @@
joining <c>../src</c> and the tail of the compiled path
below a trailing <c>src</c> component, then the compiled
path itself.
- If no source code is found,
- <c>{error,no_source_code_found}</c> is returned.</p>
- <p>HINT: It is possible to issue multiple analyse_to_file commands at
- the same time. </p>
+ If no source code is found, this is indicated by the error reason
+ <c>{no_source_code_found,Module}</c>.</p>
</desc>
</func>
<func>
@@ -339,7 +368,7 @@
<v>OutFile = string()</v>
<v>Options = [Option]</v>
<v>Option = html</v>
- <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | no_source_code_found | not_main_node</v>
+ <v>Error = {not_cover_compiled,Module} | {file,File,Reason} | {no_source_code_found,Module} | not_main_node</v>
<v>&nbsp;File = string()</v>
<v>&nbsp;Reason = term()</v>
</type>
diff --git a/lib/tools/doc/src/cprof.xml b/lib/tools/doc/src/cprof.xml
index 553597837e..bfddb9f5a8 100644
--- a/lib/tools/doc/src/cprof.xml
+++ b/lib/tools/doc/src/cprof.xml
@@ -66,7 +66,7 @@
<func>
<name>analyse() -> {AllCallCount, ModAnalysisList}</name>
<name>analyse(Limit) -> {AllCallCount, ModAnalysisList}</name>
- <name>analyse(Mod) -> ModAnlysis</name>
+ <name>analyse(Mod) -> ModAnalysis</name>
<name>analyse(Mod, Limit) -> ModAnalysis</name>
<fsummary>Collect and analyse call counters.</fsummary>
<type>
diff --git a/lib/tools/doc/src/notes.xml b/lib/tools/doc/src/notes.xml
index 6f9563bb68..38b57b73a9 100644
--- a/lib/tools/doc/src/notes.xml
+++ b/lib/tools/doc/src/notes.xml
@@ -30,6 +30,32 @@
</header>
<p>This document describes the changes made to the Tools application.</p>
+<section><title>Tools 2.7.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix lcnt sorting and printout of histograms.</p>
+ <p>
+ Own Id: OTP-12364</p>
+ </item>
+ <item>
+ <p> Fix a Unicode bug in the <c>tags</c> module. </p>
+ <p>
+ Own Id: OTP-12567</p>
+ </item>
+ <item>
+ <p>
+ Fix tags completion in erlang.el for GNU Emacs 23+</p>
+ <p>
+ Own Id: OTP-12583</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Tools 2.7.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el
index c56759ebb9..3610356355 100644
--- a/lib/tools/emacs/erlang.el
+++ b/lib/tools/emacs/erlang.el
@@ -2444,7 +2444,10 @@ This is automagically called by the user level function `indent-region'."
;; Parse the Erlang code from the beginning of the clause to
;; the beginning of the region.
(while (< (point) indent-point)
- (setq state (erlang-partial-parse (point) indent-point state)))
+ (let ((pt (point)))
+ (setq state (erlang-partial-parse pt indent-point state))
+ (if (= pt (point))
+ (error "Illegal syntax"))))
;; Indent every line in the region
(while continue
(goto-char indent-point)
@@ -2480,8 +2483,11 @@ This is automagically called by the user level function `indent-region'."
(if (>= from-end (- (point-max) indent-point))
(setq continue nil)
(while (< (point) indent-point)
- (setq state (erlang-partial-parse
- (point) indent-point state))))))))
+ (let ((pt (point)))
+ (setq state (erlang-partial-parse
+ pt indent-point state))
+ (if (= pt (point))
+ (error "Illegal syntax")))))))))
(defun erlang-indent-current-buffer ()
@@ -2528,7 +2534,10 @@ Return nil if line starts inside string, t if in a comment."
(goto-char parse-start)
(erlang-beginning-of-clause))
(while (< (point) indent-point)
- (setq state (erlang-partial-parse (point) indent-point state)))
+ (let ((pt (point)))
+ (setq state (erlang-partial-parse pt indent-point state))
+ (if (= pt (point))
+ (error "Illegal syntax"))))
(erlang-calculate-stack-indent indent-point state))))
(defun erlang-show-syntactic-information ()
@@ -2698,12 +2707,13 @@ Value is list (stack token-start token-type in-what)."
(erlang-push (list '|| token (current-column)) stack)
(forward-char 2))
- ;; Bit-syntax open paren
- ((looking-at "<<")
+ ;; Bit-syntax open. Note that map syntax allows "<<" to follow ":="
+ ;; or "=>" without intervening whitespace, so handle that case here
+ ((looking-at "\\(:=\\|=>\\)?<<")
(erlang-push (list '<< token (current-column)) stack)
- (forward-char 2))
+ (forward-char (- (match-end 0) (match-beginning 0))))
- ;; Bbit-syntax close paren
+ ;; Bit-syntax close
((looking-at ">>")
(while (memq (car (car stack)) '(|| ->))
(erlang-pop stack))
@@ -4188,7 +4198,10 @@ This function is designed to be a member of a criteria list."
;; Do not return `stop' when inside a list comprehension
;; construction. (The point must be after `||').
(while (< (point) orig-point)
- (setq state (erlang-partial-parse (point) orig-point state)))
+ (let ((pt (point)))
+ (setq state (erlang-partial-parse pt orig-point state))
+ (if (= pt (point))
+ (error "Illegal syntax"))))
(if (and (car state) (eq (car (car (car state))) '||))
nil
'stop)))
@@ -4743,6 +4756,23 @@ for a tag on the form `module:tag'."
;;; `module:tag'.
+(when (and (fboundp 'etags-tags-completion-table)
+ (fboundp 'tags-lazy-completion-table)) ; Emacs 23.1+
+ (if (fboundp 'advice-add)
+ ;; Emacs 24.4+
+ (advice-add 'etags-tags-completion-table :around
+ (lambda (oldfun)
+ (if (eq find-tag-default-function 'erlang-find-tag-for-completion)
+ (erlang-etags-tags-completion-table)
+ (funcall oldfun)))
+ (list :name 'erlang-replace-tags-table))
+ ;; Emacs 23.1-24.3
+ (defadvice etags-tags-completion-table (around erlang-replace-tags-table activate)
+ (if (eq find-tag-default-function 'erlang-find-tag-for-completion)
+ (setq ad-return-value (erlang-etags-tags-completion-table))
+ ad-do-it))))
+
+
(defun erlang-complete-tag ()
"Perform tags completion on the text around point.
Completes to the set of names listed in the current tags table.
@@ -4754,7 +4784,17 @@ about Erlang modules."
(require 'etags)
(error nil))
(cond ((and erlang-tags-installed
- (fboundp 'complete-tag)) ; Emacs 19
+ (fboundp 'etags-tags-completion-table)
+ (fboundp 'tags-lazy-completion-table)) ; Emacs 23.1+
+ ;; This depends on the advice called erlang-replace-tags-table
+ ;; above. It is not enough to let-bind
+ ;; tags-completion-table-function since that will not override
+ ;; the buffer-local value in the TAGS buffer.
+ (let ((find-tag-default-function 'erlang-find-tag-for-completion))
+ (complete-tag)))
+ ((and erlang-tags-installed
+ (fboundp 'complete-tag)
+ (fboundp 'tags-complete-tag)) ; Emacs 19
(let ((orig-tags-complete-tag (symbol-function 'tags-complete-tag)))
(fset 'tags-complete-tag
(symbol-function 'erlang-tags-complete-tag))
@@ -4769,6 +4809,15 @@ about Erlang modules."
(error "This version of Emacs can't complete tags"))))
+(defun erlang-find-tag-for-completion ()
+ (let ((start (save-excursion
+ (skip-chars-backward "[:word:][:digit:]_:'")
+ (point))))
+ (unless (eq start (point))
+ (buffer-substring-no-properties start (point)))))
+
+
+
;; Based on `tags-complete-tag', but this one uses
;; `erlang-tags-completion-table' instead of `tags-completion-table'.
;;
@@ -4816,7 +4865,12 @@ about Erlang modules."
;; the only format supported by Emacs, so far.)
(defun erlang-etags-tags-completion-table ()
(let ((table (make-vector 511 0))
- (file nil))
+ (file nil)
+ (progress-reporter
+ (when (fboundp 'make-progress-reporter)
+ (make-progress-reporter
+ (format "Making erlang tags completion table for %s..." buffer-file-name)
+ (point-min) (point-max)))))
(save-excursion
(goto-char (point-min))
;; This monster regexp matches an etags tag line.
@@ -4828,31 +4882,33 @@ about Erlang modules."
;; \6 is the line to start searching at;
;; \7 is the char to start searching at.
(while (progn
- (while (and
- (eq (following-char) ?\f)
- (looking-at "\f\n\\([^,\n]*\\),.*\n"))
- (setq file (buffer-substring
- (match-beginning 1) (match-end 1)))
- (goto-char (match-end 0)))
- (re-search-forward
- "\
+ (while (and
+ (eq (following-char) ?\f)
+ (looking-at "\f\n\\([^,\n]*\\),.*\n"))
+ (setq file (buffer-substring
+ (match-beginning 1) (match-end 1)))
+ (goto-char (match-end 0)))
+ (re-search-forward
+ "\
^\\(\\([^\177]+[^-a-zA-Z0-9_$\177]+\\)?\\([-a-zA-Z0-9_$?:]+\\)\
\[^-a-zA-Z0-9_$?:\177]*\\)\177\\(\\([^\n\001]+\\)\001\\)?\
\\([0-9]+\\)?,\\([0-9]+\\)?\n"
- nil t))
- (let ((tag (if (match-beginning 5)
- ;; There is an explicit tag name.
- (buffer-substring (match-beginning 5) (match-end 5))
- ;; No explicit tag name. Best guess.
- (buffer-substring (match-beginning 3) (match-end 3))))
- (module (and file
- (erlang-get-module-from-file-name file))))
- (intern tag table)
- (if (stringp module)
- (progn
- (intern (concat module ":" tag) table)
- ;; Only the first one will be stored in the table.
- (intern (concat module ":") table))))))
+ nil t))
+ (let ((tag (if (match-beginning 5)
+ ;; There is an explicit tag name.
+ (buffer-substring (match-beginning 5) (match-end 5))
+ ;; No explicit tag name. Best guess.
+ (buffer-substring (match-beginning 3) (match-end 3))))
+ (module (and file
+ (erlang-get-module-from-file-name file))))
+ (intern tag table)
+ (when (stringp module)
+ (intern (concat module ":" tag) table)
+ ;; Only the first ones will be stored in the table.
+ (intern (concat module ":") table)
+ (intern (concat module ":module_info") table))
+ (when progress-reporter
+ (progress-reporter-update progress-reporter (point))))))
table))
;;;
diff --git a/lib/tools/emacs/test.erl.indented b/lib/tools/emacs/test.erl.indented
index 1c1086ca58..abb05fd59b 100644
--- a/lib/tools/emacs/test.erl.indented
+++ b/lib/tools/emacs/test.erl.indented
@@ -32,6 +32,14 @@
-module(test).
-compile(export_all).
+%% Used to cause an "Unbalanced parentheses" error.
+foo(M) ->
+ M#{a :=<<"a">>
+ ,b:=1}.
+foo() ->
+ #{a =><<"a">>
+ ,b=>1}.
+
%% Module attributes should be highlighted
-export([t/1]).
diff --git a/lib/tools/emacs/test.erl.orig b/lib/tools/emacs/test.erl.orig
index a9d09000d2..3d8f29fe18 100644
--- a/lib/tools/emacs/test.erl.orig
+++ b/lib/tools/emacs/test.erl.orig
@@ -32,6 +32,14 @@
-module(test).
-compile(export_all).
+%% Used to cause an "Unbalanced parentheses" error.
+foo(M) ->
+M#{a :=<<"a">>
+,b:=1}.
+foo() ->
+#{a =><<"a">>
+,b=>1}.
+
%% Module attributes should be highlighted
-export([t/1]).
diff --git a/lib/tools/src/cover.erl b/lib/tools/src/cover.erl
index 31754015f7..71e17e0ba1 100644
--- a/lib/tools/src/cover.erl
+++ b/lib/tools/src/cover.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,8 +77,11 @@
compile/1, compile/2, compile_module/1, compile_module/2,
compile_directory/0, compile_directory/1, compile_directory/2,
compile_beam/1, compile_beam_directory/0, compile_beam_directory/1,
- analyse/1, analyse/2, analyse/3, analyze/1, analyze/2, analyze/3,
+ analyse/0, analyse/1, analyse/2, analyse/3,
+ analyze/0, analyze/1, analyze/2, analyze/3,
+ analyse_to_file/0,
analyse_to_file/1, analyse_to_file/2, analyse_to_file/3,
+ analyze_to_file/0,
analyze_to_file/1, analyze_to_file/2, analyze_to_file/3,
async_analyse_to_file/1,async_analyse_to_file/2,
async_analyse_to_file/3, async_analyze_to_file/1,
@@ -109,6 +112,7 @@
line = '_' % integer()
}).
-define(BUMP_REC_NAME,bump).
+-define(CHUNK_SIZE, 20000).
-record(vars, {module, % atom() Module name
@@ -132,7 +136,7 @@
-define(SERVER, cover_server).
%% Line doesn't matter.
--define(BLOCK(Expr), {block,0,[Expr]}).
+-define(BLOCK(Expr), {block,erl_anno:new(0),[Expr]}).
-define(BLOCK1(Expr),
if
element(1, Expr) =:= block ->
@@ -181,10 +185,11 @@ start(Node) when is_atom(Node) ->
start(Nodes) ->
call({start_nodes,remove_myself(Nodes,[])}).
-%% compile(ModFile) ->
-%% compile(ModFile, Options) ->
-%% compile_module(ModFile) -> Result
-%% compile_module(ModFile, Options) -> Result
+%% compile(ModFiles) ->
+%% compile(ModFiles, Options) ->
+%% compile_module(ModFiles) -> Result
+%% compile_module(ModFiles, Options) -> Result
+%% ModFiles = ModFile | [ModFile]
%% ModFile = Module | File
%% Module = atom()
%% File = string()
@@ -198,18 +203,27 @@ compile(ModFile, Options) ->
compile_module(ModFile) when is_atom(ModFile);
is_list(ModFile) ->
compile_module(ModFile, []).
-compile_module(Module, Options) when is_atom(Module), is_list(Options) ->
- compile_module(atom_to_list(Module), Options);
-compile_module(File, Options) when is_list(File), is_list(Options) ->
- WithExt = case filename:extension(File) of
- ".erl" ->
- File;
- _ ->
- File++".erl"
- end,
- AbsFile = filename:absname(WithExt),
- [R] = compile_modules([AbsFile], Options),
- R.
+compile_module(ModFile, Options) when is_atom(ModFile);
+ is_list(ModFile), is_integer(hd(ModFile)) ->
+ [R] = compile_module([ModFile], Options),
+ R;
+compile_module(ModFiles, Options) when is_list(Options) ->
+ AbsFiles =
+ [begin
+ File =
+ case ModFile of
+ _ when is_atom(ModFile) -> atom_to_list(ModFile);
+ _ when is_list(ModFile) -> ModFile
+ end,
+ WithExt = case filename:extension(File) of
+ ".erl" ->
+ File;
+ _ ->
+ File++".erl"
+ end,
+ filename:absname(WithExt)
+ end || ModFile <- ModFiles],
+ compile_modules(AbsFiles, Options).
%% compile_directory() ->
%% compile_directory(Dir) ->
@@ -240,13 +254,14 @@ compile_directory(Dir, Options) when is_list(Dir), is_list(Options) ->
compile_modules(Files,Options) ->
Options2 = filter_options(Options),
- compile_modules(Files,Options2,[]).
+ %% compile_modules(Files,Options2,[]).
+ call({compile, Files, Options2}).
-compile_modules([File|Files], Options, Result) ->
- R = call({compile, File, Options}),
- compile_modules(Files,Options,[R|Result]);
-compile_modules([],_Opts,Result) ->
- lists:reverse(Result).
+%% compile_modules([File|Files], Options, Result) ->
+%% R = call({compile, File, Options}),
+%% compile_modules(Files,Options,[R|Result]);
+%% compile_modules([],_Opts,Result) ->
+%% lists:reverse(Result).
filter_options(Options) ->
lists:filter(fun(Option) ->
@@ -264,30 +279,17 @@ filter_options(Options) ->
%% ModFile - see compile/1
%% Result - see compile/1
%% Reason = non_existing | already_cover_compiled
-compile_beam(Module) when is_atom(Module) ->
- case code:which(Module) of
- non_existing ->
+compile_beam(ModFile0) when is_atom(ModFile0);
+ is_list(ModFile0), is_integer(hd(ModFile0)) ->
+ case compile_beams([ModFile0]) of
+ [{error,{non_existing,_}}] ->
+ %% Backwards compatibility
{error,non_existing};
- ?TAG ->
- compile_beam(Module,?TAG);
- File ->
- compile_beam(Module,File)
+ [Result] ->
+ Result
end;
-compile_beam(File) when is_list(File) ->
- {WithExt,WithoutExt}
- = case filename:rootname(File,".beam") of
- File ->
- {File++".beam",File};
- Rootname ->
- {File,Rootname}
- end,
- AbsFile = filename:absname(WithExt),
- Module = list_to_atom(filename:basename(WithoutExt)),
- compile_beam(Module,AbsFile).
-
-compile_beam(Module,File) ->
- call({compile_beam,Module,File}).
-
+compile_beam(ModFiles) when is_list(ModFiles) ->
+ compile_beams(ModFiles).
%% compile_beam_directory(Dir) -> [Result] | {error,Reason}
@@ -312,19 +314,52 @@ compile_beam_directory(Dir) when is_list(Dir) ->
Error
end.
-compile_beams(Files) ->
- compile_beams(Files,[]).
-compile_beams([File|Files],Result) ->
- R = compile_beam(File),
- compile_beams(Files,[R|Result]);
-compile_beams([],Result) ->
- lists:reverse(Result).
+compile_beams(ModFiles0) ->
+ ModFiles = get_mods_and_beams(ModFiles0,[]),
+ call({compile_beams,ModFiles}).
-
-%% analyse(Module) ->
-%% analyse(Module, Analysis) ->
-%% analyse(Module, Level) ->
-%% analyse(Module, Analysis, Level) -> {ok,Answer} | {error,Error}
+get_mods_and_beams([Module|ModFiles],Acc) when is_atom(Module) ->
+ case code:which(Module) of
+ non_existing ->
+ get_mods_and_beams(ModFiles,[{error,{non_existing,Module}}|Acc]);
+ File ->
+ get_mods_and_beams([{Module,File}|ModFiles],Acc)
+ end;
+get_mods_and_beams([File|ModFiles],Acc) when is_list(File) ->
+ {WithExt,WithoutExt}
+ = case filename:rootname(File,".beam") of
+ File ->
+ {File++".beam",File};
+ Rootname ->
+ {File,Rootname}
+ end,
+ AbsFile = filename:absname(WithExt),
+ Module = list_to_atom(filename:basename(WithoutExt)),
+ get_mods_and_beams([{Module,AbsFile}|ModFiles],Acc);
+get_mods_and_beams([{Module,File}|ModFiles],Acc) ->
+ %% Check for duplicates
+ case lists:keyfind(Module,2,Acc) of
+ {ok,Module,File} ->
+ %% Duplicate, but same file so ignore
+ get_mods_and_beams(ModFiles,Acc);
+ {ok,Module,_OtherFile} ->
+ %% Duplicate and differnet file - error
+ get_mods_and_beams(ModFiles,[{error,{duplicate,Module}}|Acc]);
+ _ ->
+ get_mods_and_beams(ModFiles,[{ok,Module,File}|Acc])
+ end;
+get_mods_and_beams([],Acc) ->
+ lists:reverse(Acc).
+
+
+%% analyse(Modules) ->
+%% analyse(Analysis) ->
+%% analyse(Level) ->
+%% analyse(Modules, Analysis) ->
+%% analyse(Modules, Level) ->
+%% analyse(Analysis, Level)
+%% analyse(Modules, Analysis, Level) -> {ok,Answer} | {error,Error}
+%% Modules = Module | [Module]
%% Module = atom()
%% Analysis = coverage | calls
%% Level = line | clause | function | module
@@ -337,48 +372,74 @@ compile_beams([],Result) ->
%% N = A = C = integer()
%% Value = {Cov,NotCov} | Calls
%% Cov = NotCov = Calls = integer()
-%% Error = {not_cover_compiled,Module}
+%% Error = {not_cover_compiled,Module} | not_main_node
+-define(is_analysis(__A__),
+ (__A__=:=coverage orelse __A__=:=calls)).
+-define(is_level(__L__),
+ (__L__=:=line orelse __L__=:=clause orelse
+ __L__=:=function orelse __L__=:=module)).
+analyse() ->
+ analyse('_').
+
+analyse(Analysis) when ?is_analysis(Analysis) ->
+ analyse('_', Analysis);
+analyse(Level) when ?is_level(Level) ->
+ analyse('_', Level);
analyse(Module) ->
analyse(Module, coverage).
-analyse(Module, Analysis) when Analysis=:=coverage; Analysis=:=calls ->
+
+analyse(Analysis, Level) when ?is_analysis(Analysis) andalso
+ ?is_level(Level) ->
+ analyse('_', Analysis, Level);
+analyse(Module, Analysis) when ?is_analysis(Analysis) ->
analyse(Module, Analysis, function);
-analyse(Module, Level) when Level=:=line; Level=:=clause; Level=:=function;
- Level=:=module ->
+analyse(Module, Level) when ?is_level(Level) ->
analyse(Module, coverage, Level).
-analyse(Module, Analysis, Level) when is_atom(Module),
- Analysis=:=coverage; Analysis=:=calls,
- Level=:=line; Level=:=clause;
- Level=:=function; Level=:=module ->
+
+analyse(Module, Analysis, Level) when ?is_analysis(Analysis),
+ ?is_level(Level) ->
call({{analyse, Analysis, Level}, Module}).
+analyze() -> analyse( ).
analyze(Module) -> analyse(Module).
analyze(Module, Analysis) -> analyse(Module, Analysis).
analyze(Module, Analysis, Level) -> analyse(Module, Analysis, Level).
-%% analyse_to_file(Module) ->
-%% analyse_to_file(Module, Options) ->
-%% analyse_to_file(Module, OutFile) ->
-%% analyse_to_file(Module, OutFile, Options) -> {ok,OutFile} | {error,Error}
+%% analyse_to_file() ->
+%% analyse_to_file(Modules) ->
+%% analyse_to_file(Modules, Options) ->
+%% Modules = Module | [Module]
%% Module = atom()
%% OutFile = string()
%% Options = [Option]
-%% Option = html
+%% Option = html | {outfile,filename()} | {outdir,dirname()}
%% Error = {not_cover_compiled,Module} | no_source_code_found |
%% {file,File,Reason}
%% File = string()
%% Reason = term()
-analyse_to_file(Module) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []).
-analyse_to_file(Module, []) when is_atom(Module) ->
- analyse_to_file(Module, outfilename(Module,[]), []);
-analyse_to_file(Module, Options) when is_atom(Module),
- is_list(Options), is_atom(hd(Options)) ->
- analyse_to_file(Module, outfilename(Module,Options), Options);
-analyse_to_file(Module, OutFile) when is_atom(Module), is_list(OutFile) ->
- analyse_to_file(Module, OutFile, []).
-analyse_to_file(Module, OutFile, Options) when is_atom(Module), is_list(OutFile) ->
- call({{analyse_to_file, OutFile, Options}, Module}).
-
+%%
+%% Kept for backwards compatibility:
+%% analyse_to_file(Modules, OutFile) ->
+%% analyse_to_file(Modules, OutFile, Options) -> {ok,OutFile} | {error,Error}
+analyse_to_file() ->
+ analyse_to_file('_').
+analyse_to_file(Arg) ->
+ case is_options(Arg) of
+ true ->
+ analyse_to_file('_',Arg);
+ false ->
+ analyse_to_file(Arg,[])
+ end.
+analyse_to_file(Module, OutFile) when is_list(OutFile), is_integer(hd(OutFile)) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module, [{outfile,OutFile}]);
+analyse_to_file(Module, Options) when is_list(Options) ->
+ call({{analyse_to_file, Options}, Module}).
+analyse_to_file(Module, OutFile, Options) when is_list(OutFile) ->
+ %% Kept for backwards compatibility
+ analyse_to_file(Module,[{outfile,OutFile}|Options]).
+
+analyze_to_file() -> analyse_to_file().
analyze_to_file(Module) -> analyse_to_file(Module).
analyze_to_file(Module, OptOrOut) -> analyse_to_file(Module, OptOrOut).
analyze_to_file(Module, OutFile, Options) ->
@@ -391,6 +452,15 @@ async_analyse_to_file(Module, OutFileOrOpts) ->
async_analyse_to_file(Module, OutFile, Options) ->
do_spawn(?MODULE, analyse_to_file, [Module, OutFile, Options]).
+is_options([html]) ->
+ true; % this is not 100% safe - could be a module named html...
+is_options([html|Opts]) ->
+ is_options(Opts);
+is_options([{Opt,_}|_]) when Opt==outfile; Opt==outdir ->
+ true;
+is_options(_) ->
+ false.
+
do_spawn(M,F,A) ->
spawn_link(fun() ->
case apply(M,F,A) of
@@ -408,13 +478,16 @@ async_analyze_to_file(Module, OutFileOrOpts) ->
async_analyze_to_file(Module, OutFile, Options) ->
async_analyse_to_file(Module, OutFile, Options).
-outfilename(Module,Opts) ->
- case lists:member(html,Opts) of
- true ->
- atom_to_list(Module)++".COVER.html";
- false ->
- atom_to_list(Module)++".COVER.out"
- end.
+outfilename(undefined, Module, HTML) ->
+ outfilename(Module, HTML);
+outfilename(OutDir, Module, HTML) ->
+ filename:join(OutDir, outfilename(Module, HTML)).
+
+outfilename(Module, true) ->
+ atom_to_list(Module)++".COVER.html";
+outfilename(Module, false) ->
+ atom_to_list(Module)++".COVER.out".
+
%% export(File)
%% export(File,Module) -> ok | {error,Reason}
@@ -559,7 +632,7 @@ init_main(Starter) ->
,{write_concurrency, true}
]),
ets:new(?COVER_CLAUSE_TABLE, [set, public, named_table]),
- ets:new(?BINARY_TABLE, [set, named_table]),
+ ets:new(?BINARY_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_TABLE, [set, public, named_table]),
ets:new(?COLLECTION_CLAUSE_TABLE, [set, public, named_table]),
net_kernel:monitor_nodes(true),
@@ -573,55 +646,19 @@ main_process_loop(State) ->
reply(From, {ok,StartedNodes}),
main_process_loop(State1);
- {From, {compile, File, Options}} ->
- case do_compile(File, Options) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,[{Module,File}]),
- reply(From, {ok, Module}),
- Compiled = add_compiled(Module, File,
- State#main_state.compiled),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- error ->
- reply(From, {error, File}),
- main_process_loop(State)
- end;
+ {From, {compile, Files, Options}} ->
+ {R,S} = do_compile(Files, Options, State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
- {From, {compile_beam, Module, BeamFile0}} ->
- Compiled0 = State#main_state.compiled,
- case get_beam_file(Module,BeamFile0,Compiled0) of
- {ok,BeamFile} ->
- UserOptions = get_compile_options(Module,BeamFile),
- {Reply,Compiled} =
- case do_compile_beam(Module,BeamFile,UserOptions) of
- {ok, Module} ->
- remote_load_compiled(State#main_state.nodes,
- [{Module,BeamFile}]),
- C = add_compiled(Module,BeamFile,Compiled0),
- {{ok,Module},C};
- error ->
- {{error, BeamFile}, Compiled0};
- {error,Reason} -> % no abstract code
- {{error, {Reason, BeamFile}}, Compiled0}
- end,
- reply(From,Reply),
- Imported = remove_imported(Module,State#main_state.imported),
- NewState = State#main_state{compiled = Compiled,
- imported = Imported},
- %% This module (cover) could have been reloaded. Make
- %% sure we run the new code.
- ?MODULE:main_process_loop(NewState);
- {error,no_beam} ->
- %% The module has first been compiled from .erl, and now
- %% someone tries to compile it from .beam
- reply(From,
- {error,{already_cover_compiled,no_beam_found,Module}}),
- main_process_loop(State)
- end;
+ {From, {compile_beams, ModsAndFiles}} ->
+ {R,S} = do_compile_beams(ModsAndFiles,State),
+ reply(From,R),
+ %% This module (cover) could have been reloaded. Make
+ %% sure we run the new code.
+ ?MODULE:main_process_loop(S);
{From, {export,OutFile,Module}} ->
spawn(fun() ->
@@ -706,6 +743,16 @@ main_process_loop(State) ->
unregister(?SERVER),
reply(From, ok);
+ {From, {{analyse, Analysis, Level}, '_'}} ->
+ R = analyse_all(Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
+ {From, {{analyse, Analysis, Level}, Modules}} when is_list(Modules) ->
+ R = analyse_list(Modules, Analysis, Level, State),
+ reply(From, R),
+ main_process_loop(State);
+
{From, {{analyse, Analysis, Level}, Module}} ->
S = try
Loaded = is_loaded(Module, State),
@@ -722,15 +769,23 @@ main_process_loop(State) ->
end,
main_process_loop(S);
- {From, {{analyse_to_file, OutFile, Opts},Module}} ->
+ {From, {{analyse_to_file, Opts},'_'}} ->
+ R = analyse_all_to_file(Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Modules}} when is_list(Modules) ->
+ R = analyse_list_to_file(Modules, Opts, State),
+ reply(From,R),
+ main_process_loop(State);
+
+ {From, {{analyse_to_file, Opts},Module}} ->
S = try
Loaded = is_loaded(Module, State),
spawn(fun() ->
- ?SPAWN_DBG(analyse_to_file,
- {Module,OutFile, Opts}),
+ ?SPAWN_DBG(analyse_to_file,{Module,Opts}),
do_parallel_analysis_to_file(
- Module, OutFile, Opts,
- Loaded, From, State)
+ Module, Opts, Loaded, From, State)
end),
State
catch throw:Reason ->
@@ -848,11 +903,15 @@ remote_process_loop(State) ->
{remote,collect,Module,CollectorPid} ->
self() ! {remote,collect,Module,CollectorPid, ?SERVER};
- {remote,collect,Module,CollectorPid,From} ->
+ {remote,collect,Modules0,CollectorPid,From} ->
+ Modules = case Modules0 of
+ '_' -> [M || {M,_} <- State#remote_state.compiled];
+ _ -> Modules0
+ end,
spawn(fun() ->
?SPAWN_DBG(remote_collect,
- {Module, CollectorPid, From}),
- do_collect(Module, CollectorPid, From)
+ {Modules, CollectorPid, From}),
+ do_collect(Modules, CollectorPid, From)
end),
remote_process_loop(State);
@@ -893,39 +952,51 @@ remote_process_loop(State) ->
end.
-do_collect(Module, CollectorPid, From) ->
- AllMods =
- case Module of
- '_' -> ets:tab2list(?COVER_CLAUSE_TABLE);
- _ -> ets:lookup(?COVER_CLAUSE_TABLE, Module)
- end,
-
- %% Sending clause by clause in order to avoid large lists
+do_collect(Modules, CollectorPid, From) ->
pmap(
- fun({_Mod,Clauses}) ->
- lists:map(fun(Clause) ->
- send_collected_data(Clause, CollectorPid)
- end,Clauses)
- end,AllMods),
+ fun(Module) ->
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ send_chunks(Match, CollectorPid, [])
+ end,Modules),
CollectorPid ! done,
remote_reply(From, ok).
-send_collected_data({M,F,A,C,_L}, CollectorPid) ->
- Pattern =
- {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
- %% Reset
- lists:foreach(fun({Bump,_N}) ->
- ets:insert(?COVER_TABLE, {Bump,0})
- end,
- Bumps),
- CollectorPid ! {chunk,Bumps}.
+send_chunks('$end_of_table', _CollectorPid, Mons) ->
+ get_downs(Mons);
+send_chunks({Chunk,Continuation}, CollectorPid, Mons) ->
+ Mon = spawn_monitor(
+ fun() ->
+ lists:foreach(fun({Bump,_N}) ->
+ ets:insert(?COVER_TABLE, {Bump,0})
+ end,
+ Chunk) end),
+ send_chunk(CollectorPid,Chunk),
+ send_chunks(ets:select(Continuation), CollectorPid, [Mon|Mons]).
+
+send_chunk(CollectorPid,Chunk) ->
+ CollectorPid ! {chunk,Chunk,self()},
+ receive continue -> ok end.
+
+get_downs([]) ->
+ ok;
+get_downs(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, _Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ get_downs(lists:delete({Pid,Ref},Mons));
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs(Mons)
+ end
+ end.
-reload_originals([{Module,_File}|Compiled]) ->
- do_reload_original(Module),
- reload_originals(Compiled);
-reload_originals([]) ->
- ok.
+reload_originals(Compiled) ->
+ Modules = [M || {M,_} <- Compiled],
+ pmap(fun do_reload_original/1, Modules).
do_reload_original(Module) ->
case code:which(Module) of
@@ -1068,15 +1139,40 @@ remote_load_compiled(_Nodes, [], [], _ModNum) ->
ok;
remote_load_compiled(Nodes, Compiled, Acc, ModNum)
when Compiled == []; ModNum == ?MAX_MODS ->
+ RemoteLoadData = get_downs_r(Acc),
lists:foreach(
fun(Node) ->
- remote_call(Node,{remote,load_compiled,Acc})
+ remote_call(Node,{remote,load_compiled,RemoteLoadData})
end,
Nodes),
remote_load_compiled(Nodes, Compiled, [], 0);
remote_load_compiled(Nodes, [MF | Rest], Acc, ModNum) ->
remote_load_compiled(
- Nodes, Rest, [get_data_for_remote_loading(MF) | Acc], ModNum + 1).
+ Nodes, Rest,
+ [spawn_job_r(fun() -> get_data_for_remote_loading(MF) end) | Acc],
+ ModNum + 1).
+
+spawn_job_r(Fun) ->
+ spawn_monitor(fun() -> exit(Fun()) end).
+
+get_downs_r([]) ->
+ [];
+get_downs_r(Mons) ->
+ receive
+ {'DOWN', Ref, _Type, Pid, R={_,_,_,_}} ->
+ [R|get_downs_r(lists:delete({Pid,Ref},Mons))];
+ {'DOWN', Ref, _Type, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ get_downs_r(Mons)
+ end
+ end.
+
%% Read all data needed for loading a cover compiled module on a remote node
%% Binary is the beam code for the module and InitialTable is the initial
@@ -1113,11 +1209,11 @@ remote_reset(Module,Nodes) ->
Nodes).
%% Collect data from remote nodes - used for analyse or stop(Node)
-remote_collect(Module,Nodes,Stop) ->
+remote_collect(Modules,Nodes,Stop) ->
pmap(fun(Node) ->
?SPAWN_DBG(remote_collect,
- {Module, Nodes, Stop}),
- do_collection(Node, Module, Stop)
+ {Modules, Nodes, Stop}),
+ do_collection(Node, Modules, Stop)
end,
Nodes).
@@ -1138,8 +1234,9 @@ do_collection(Node, Module, Stop) ->
collector_proc() ->
?SPAWN_DBG(collector_proc, []),
receive
- {chunk,Chunk} ->
+ {chunk,Chunk,From} ->
insert_in_collection_table(Chunk),
+ From ! continue,
collector_proc();
done ->
ok
@@ -1259,6 +1356,19 @@ add_compiled(Module, File, [H|Compiled]) ->
add_compiled(Module, File, []) ->
[{Module,File}].
+are_loaded([Module|Modules], State, Loaded, Imported, Error) ->
+ try is_loaded(Module,State) of
+ {loaded,File} ->
+ are_loaded(Modules, State, [{Module,File}|Loaded], Imported, Error);
+ {imported,File,_} ->
+ are_loaded(Modules, State, Loaded, [{Module,File}|Imported], Error)
+ catch throw:_ ->
+ are_loaded(Modules, State, Loaded, Imported,
+ [{not_cover_compiled,Module}|Error])
+ end;
+are_loaded([], _State, Loaded, Imported, Error) ->
+ {Loaded, Imported, Error}.
+
is_loaded(Module, State) ->
case get_file(Module, State#main_state.compiled) of
{ok, File} ->
@@ -1333,18 +1443,75 @@ get_compiled_still_loaded(Nodes,Compiled0) ->
%%%--Compilation---------------------------------------------------------
-%% do_compile(File, Options) -> {ok,Module} | {error,Error}
-do_compile(File, UserOptions) ->
+do_compile_beams(ModsAndFiles, State) ->
+ Result0 = pmap(fun({ok,Module,File}) ->
+ do_compile_beam(Module,File,State);
+ (Error) ->
+ Error
+ end,
+ ModsAndFiles),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile_beam(Module,BeamFile0,State) ->
+ case get_beam_file(Module,BeamFile0,State#main_state.compiled) of
+ {ok,BeamFile} ->
+ UserOptions = get_compile_options(Module,BeamFile),
+ case do_compile_beam1(Module,BeamFile,UserOptions) of
+ {ok, Module} ->
+ {ok,Module,BeamFile};
+ error ->
+ {error, BeamFile};
+ {error,Reason} -> % no abstract code
+ {error, {Reason, BeamFile}}
+ end;
+ {error,no_beam} ->
+ %% The module has first been compiled from .erl, and now
+ %% someone tries to compile it from .beam
+ {error,{already_cover_compiled,no_beam_found,Module}}
+ end.
+
+fix_state_and_result([{ok,Module,BeamFile}|Rest],State,Acc) ->
+ Compiled = add_compiled(Module,BeamFile,State#main_state.compiled),
+ Imported = remove_imported(Module,State#main_state.imported),
+ NewState = State#main_state{compiled=Compiled,imported=Imported},
+ fix_state_and_result(Rest,NewState,[{ok,Module}|Acc]);
+fix_state_and_result([Error|Rest],State,Acc) ->
+ fix_state_and_result(Rest,State,[Error|Acc]);
+fix_state_and_result([],State,Acc) ->
+ {lists:reverse(Acc),State}.
+
+
+do_compile(Files, Options, State) ->
+ Result0 = pmap(fun(File) ->
+ do_compile(File, Options)
+ end,
+ Files),
+ Compiled = [{M,F} || {ok,M,F} <- Result0],
+ remote_load_compiled(State#main_state.nodes,Compiled),
+ fix_state_and_result(Result0,State,[]).
+
+do_compile(File, Options) ->
+ case do_compile1(File, Options) of
+ {ok, Module} ->
+ {ok,Module,File};
+ error ->
+ {error,File}
+ end.
+
+%% do_compile1(File, Options) -> {ok,Module} | error
+do_compile1(File, UserOptions) ->
Options = [debug_info,binary,report_errors,report_warnings] ++ UserOptions,
case compile:file(File, Options) of
{ok, Module, Binary} ->
- do_compile_beam(Module,Binary,UserOptions);
+ do_compile_beam1(Module,Binary,UserOptions);
error ->
error
end.
%% Beam is a binary or a .beam file name
-do_compile_beam(Module,Beam,UserOptions) ->
+do_compile_beam1(Module,Beam,UserOptions) ->
%% Clear database
do_clear(Module),
@@ -1459,18 +1626,18 @@ expand({clause,Line,Pattern,Guards,Body}, Vs, N) ->
expand({op,_Line,'andalso',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
- LineL = element(2, ExpandedExprL),
+ Anno = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
ExpandedExprR,
- {atom,LineL,false},
+ {atom,Anno,false},
Vs, N3),
N3 + 1};
expand({op,_Line,'orelse',ExprL,ExprR}, Vs, N) ->
{ExpandedExprL,N2} = expand(ExprL, Vs, N),
{ExpandedExprR,N3} = expand(ExprR, Vs, N2),
- LineL = element(2, ExpandedExprL),
+ Anno = element(2, ExpandedExprL),
{bool_switch(ExpandedExprL,
- {atom,LineL,true},
+ {atom,Anno,true},
ExpandedExprR,
Vs, N3),
N3 + 1};
@@ -1579,7 +1746,7 @@ munge_body(Expr, Vars) ->
munge_body([Expr|Body], Vars, MungedBody, LastExprBumpLines) ->
%% Here is the place to add a call to cover:bump/6!
- Line = element(2, Expr),
+ Line = erl_anno:line(element(2, Expr)),
Lines = Vars#vars.lines,
case lists:member(Line,Lines) of
true -> % already a bump at this line
@@ -1715,17 +1882,18 @@ fix_cls([Cl | Cls], Line, Bump) ->
false ->
{clause,CL,P,G,Body} = Cl,
UniqueVarName = list_to_atom(lists:concat(["$cover$ ",Line])),
- V = {var,0,UniqueVarName},
+ A = erl_anno:new(0),
+ V = {var,A,UniqueVarName},
[Last|Rest] = lists:reverse(Body),
- Body1 = lists:reverse(Rest, [{match,0,V,Last},Bump,V]),
+ Body1 = lists:reverse(Rest, [{match,A,V,Last},Bump,V]),
[{clause,CL,P,G,Body1} | fix_cls(Cls, Line, Bump)]
end.
bumps_line(E, L) ->
try bumps_line1(E, L) catch true -> true end.
-bumps_line1({call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
- [{atom,0,?COVER_TABLE},{tuple,0,[_,_,_,_,_,{integer,0,Line}]},_]},
+bumps_line1({call,_,{remote,_,{atom,_,ets},{atom,_,update_counter}},
+ [{atom,_,?COVER_TABLE},{tuple,_,[_,_,_,_,_,{integer,_,Line}]},_]},
Line) ->
throw(true);
bumps_line1([E | Es], Line) ->
@@ -1739,15 +1907,16 @@ bumps_line1(_, _) ->
%%% End of fix of last expression.
bump_call(Vars, Line) ->
- {call,0,{remote,0,{atom,0,ets},{atom,0,update_counter}},
- [{atom,0,?COVER_TABLE},
- {tuple,0,[{atom,0,?BUMP_REC_NAME},
- {atom,0,Vars#vars.module},
- {atom,0,Vars#vars.function},
- {integer,0,Vars#vars.arity},
- {integer,0,Vars#vars.clause},
- {integer,0,Line}]},
- {integer,0,1}]}.
+ A = erl_anno:new(0),
+ {call,A,{remote,A,{atom,A,ets},{atom,A,update_counter}},
+ [{atom,A,?COVER_TABLE},
+ {tuple,A,[{atom,A,?BUMP_REC_NAME},
+ {atom,A,Vars#vars.module},
+ {atom,A,Vars#vars.function},
+ {integer,A,Vars#vars.arity},
+ {integer,A,Vars#vars.clause},
+ {integer,A,Line}]},
+ {integer,A,1}]}.
munge_expr({match,Line,ExprL,ExprR}, Vars) ->
{MungedExprL, Vars2} = munge_expr(ExprL, Vars),
@@ -1915,10 +2084,21 @@ common_elems(L1, L2) ->
collect(Nodes) ->
%% local node
AllClauses = ets:tab2list(?COVER_CLAUSE_TABLE),
- pmap(fun move_modules/1,AllClauses),
-
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,AllClauses) end),
+
+ %% remote nodes
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
+
+%% Collect data for a list of modules
+collect(Modules,Nodes) ->
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Modules],
+ Clauses = ets:select(?COVER_CLAUSE_TABLE,MS),
+ Mon1 = spawn_monitor(fun() -> pmap(fun move_modules/1,Clauses) end),
+
%% remote nodes
- remote_collect('_',Nodes,false).
+ Mon2 = spawn_monitor(fun() -> remote_collect('_',Nodes,false) end),
+ get_downs([Mon1,Mon2]).
%% Collect data for one module
collect(Module,Clauses,Nodes) ->
@@ -1926,25 +2106,26 @@ collect(Module,Clauses,Nodes) ->
move_modules({Module,Clauses}),
%% remote nodes
- remote_collect(Module,Nodes,false).
+ remote_collect([Module],Nodes,false).
%% When analysing, the data from the local ?COVER_TABLE is moved to the
%% ?COLLECTION_TABLE. Resetting data in ?COVER_TABLE
move_modules({Module,Clauses}) ->
ets:insert(?COLLECTION_CLAUSE_TABLE,{Module,Clauses}),
- move_clauses(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '_'},
+ MatchSpec = [{Pattern,[],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_move_module(Match).
-move_clauses([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE,Pattern),
+do_move_module({Bumps,Continuation}) ->
lists:foreach(fun({Key,Val}) ->
ets:insert(?COVER_TABLE, {Key,0}),
insert_in_collection_table(Key,Val)
end,
Bumps),
- move_clauses(Clauses);
-move_clauses([]) ->
+ do_move_module(ets:select(Continuation));
+do_move_module('$end_of_table') ->
ok.
%% Given a .beam file, find the .erl file. Look first in same directory as
@@ -2002,6 +2183,26 @@ splice(BeamDir, SrcFile) ->
revsplit(Path) ->
lists:reverse(filename:split(Path)).
+analyse_list(Modules, Analysis, Level, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ Loaded = [M || {M,_} <- LoadedMF],
+ Imported = [M || {M,_} <- ImportedMF],
+ collect(Loaded, State#main_state.nodes),
+ MS = [{{'$1','_'},[{'==','$1',M}],['$_']} || M <- Loaded ++ Imported],
+ AllClauses = ets:select(?COLLECTION_CLAUSE_TABLE,MS),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), Error}.
+
+analyse_all(Analysis, Level, State) ->
+ collect(State#main_state.nodes),
+ AllClauses = ets:tab2list(?COLLECTION_CLAUSE_TABLE),
+ Fun = fun({Module,Clauses}) ->
+ do_analyse(Module, Analysis, Level, Clauses)
+ end,
+ {result, lists:flatten(pmap(Fun, AllClauses)), []}.
+
do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
analyse_info(Module,State#main_state.imported),
C = case Loaded of
@@ -2016,7 +2217,7 @@ do_parallel_analysis(Module, Analysis, Level, Loaded, From, State) ->
Clauses
end,
R = do_analyse(Module, Analysis, Level, C),
- reply(From, R).
+ reply(From, {ok,R}).
%% do_analyse(Module, Analysis, Level, Clauses)-> {ok,Answer} | {error,Error}
%% Clauses = [{Module,Function,Arity,Clause,Lines}]
@@ -2035,37 +2236,44 @@ do_analyse(Module, Analysis, line, _Clauses) ->
{{Module,L}, N}
end
end,
- Answer = lists:keysort(1, lists:map(Fun, Bumps)),
- {ok, Answer};
-do_analyse(_Module, Analysis, clause, Clauses) ->
- Fun = case Analysis of
- coverage ->
- fun({M,F,A,C,Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},0},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- NotCov = length(Bumps),
- {{M,F,A,C}, {Ls-NotCov, NotCov}}
- end;
- calls ->
- fun({M,F,A,C,_Ls}) ->
- Pattern = {#bump{module=M,function=F,arity=A,
- clause=C},'_'},
- Bumps = ets:match_object(?COLLECTION_TABLE, Pattern),
- {_Bump, Calls} = hd(lists:keysort(1, Bumps)),
- {{M,F,A,C}, Calls}
- end
- end,
- Answer = lists:map(Fun, Clauses),
- {ok, Answer};
+ lists:keysort(1, lists:map(Fun, Bumps));
+do_analyse(Module, Analysis, clause, _Clauses) ->
+ Pattern = {#bump{module=Module},'_'},
+ Bumps = lists:keysort(1,ets:match_object(?COLLECTION_TABLE, Pattern)),
+ analyse_clause(Analysis,Bumps);
do_analyse(Module, Analysis, function, Clauses) ->
- {ok, ClauseResult} = do_analyse(Module, Analysis, clause, Clauses),
- Result = merge_clauses(ClauseResult, merge_fun(Analysis)),
- {ok, Result};
+ ClauseResult = do_analyse(Module, Analysis, clause, Clauses),
+ merge_clauses(ClauseResult, merge_fun(Analysis));
do_analyse(Module, Analysis, module, Clauses) ->
- {ok, FunctionResult} = do_analyse(Module, Analysis, function, Clauses),
+ FunctionResult = do_analyse(Module, Analysis, function, Clauses),
Result = merge_functions(FunctionResult, merge_fun(Analysis)),
- {ok, {Module,Result}}.
+ {Module,Result}.
+
+analyse_clause(_,[]) ->
+ [];
+analyse_clause(coverage,
+ [{#bump{module=M,function=F,arity=A,clause=C},_}|_]=Bumps) ->
+ analyse_clause_cov(Bumps,{M,F,A,C},0,0,[]);
+analyse_clause(calls,Bumps) ->
+ analyse_clause_calls(Bumps,{x,x,x,x},[]).
+
+analyse_clause_cov([{#bump{module=M,function=F,arity=A,clause=C},N}|Bumps],
+ {M,F,A,C}=Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,Clause,Ls+1,if N==0->NotCov+1; true->NotCov end,Acc);
+analyse_clause_cov([{#bump{module=M1,function=F1,arity=A1,clause=C1},_}|_]=Bumps,
+ Clause,Ls,NotCov,Acc) ->
+ analyse_clause_cov(Bumps,{M1,F1,A1,C1},0,0,[{Clause,{Ls-NotCov,NotCov}}|Acc]);
+analyse_clause_cov([],Clause,Ls,NotCov,Acc) ->
+ lists:reverse(Acc,[{Clause,{Ls-NotCov,NotCov}}]).
+
+analyse_clause_calls([{#bump{module=M,function=F,arity=A,clause=C},_}|Bumps],
+ {M,F,A,C}=Clause,Acc) ->
+ analyse_clause_calls(Bumps,Clause,Acc);
+analyse_clause_calls([{#bump{module=M1,function=F1,arity=A1,clause=C1},N}|Bumps],
+ _Clause,Acc) ->
+ analyse_clause_calls(Bumps,{M1,F1,A1,C1},[{{M1,F1,A1,C1},N}|Acc]);
+analyse_clause_calls([],_Clause,Acc) ->
+ lists:reverse(Acc).
merge_fun(coverage) ->
fun({Cov1,NotCov1}, {Cov2,NotCov2}) ->
@@ -2094,7 +2302,50 @@ merge_functions([{_MFA,R}|Functions], MFun, Result) ->
merge_functions([], _MFun, Result) ->
Result.
-do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
+analyse_list_to_file(Modules, Opts, State) ->
+ {LoadedMF, ImportedMF, Error} = are_loaded(Modules, State, [], [], []),
+ collect([M || {M,_} <- LoadedMF], State#main_state.nodes),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error1} = split_ok_error(pmap(Fun, LoadedMF++ImportedMF),[],[]),
+ {result,Ok,Error ++ Error1}.
+
+analyse_all_to_file(Opts, State) ->
+ collect(State#main_state.nodes),
+ AllModules = get_all_modules(State),
+ OutDir = proplists:get_value(outdir,Opts),
+ HTML = lists:member(html,Opts),
+ Fun = fun({Module,File}) ->
+ OutFile = outfilename(OutDir,Module,HTML),
+ do_analyse_to_file(Module,File,OutFile,HTML,State)
+ end,
+ {Ok,Error} = split_ok_error(pmap(Fun, AllModules),[],[]),
+ {result,Ok,Error}.
+
+get_all_modules(State) ->
+ get_all_modules(State#main_state.compiled ++ State#main_state.imported,[]).
+get_all_modules([{Module,File}|Rest],Acc) ->
+ get_all_modules(Rest,[{Module,File}|Acc]);
+get_all_modules([{Module,File,_}|Rest],Acc) ->
+ case lists:keymember(Module,1,Acc) of
+ true -> get_all_modules(Rest,Acc);
+ false -> get_all_modules(Rest,[{Module,File}|Acc])
+ end;
+get_all_modules([],Acc) ->
+ Acc.
+
+split_ok_error([{ok,R}|Result],Ok,Error) ->
+ split_ok_error(Result,[R|Ok],Error);
+split_ok_error([{error,R}|Result],Ok,Error) ->
+ split_ok_error(Result,Ok,[R|Error]);
+split_ok_error([],Ok,Error) ->
+ {Ok,Error}.
+
+do_parallel_analysis_to_file(Module, Opts, Loaded, From, State) ->
File = case Loaded of
{loaded, File0} ->
[{Module,Clauses}] =
@@ -2105,24 +2356,32 @@ do_parallel_analysis_to_file(Module, OutFile, Opts, Loaded, From, State) ->
{imported, File0, _} ->
File0
end,
+ HTML = lists:member(html,Opts),
+ OutFile =
+ case proplists:get_value(outfile,Opts) of
+ undefined ->
+ outfilename(proplists:get_value(outdir,Opts),Module,HTML);
+ F ->
+ F
+ end,
+ reply(From, do_analyse_to_file(Module,File,OutFile,HTML,State)).
+
+do_analyse_to_file(Module,File,OutFile,HTML,State) ->
case find_source(Module, File) of
{beam,_BeamFile} ->
- reply(From, {error,no_source_code_found});
+ {error,{no_source_code_found,Module}};
ErlFile ->
analyse_info(Module,State#main_state.imported),
- HTML = lists:member(html,Opts),
- R = do_analyse_to_file(Module,OutFile,
- ErlFile,HTML),
- reply(From, R)
+ do_analyse_to_file1(Module,OutFile,ErlFile,HTML)
end.
-%% do_analyse_to_file(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
+%% do_analyse_to_file1(Module,OutFile,ErlFile) -> {ok,OutFile} | {error,Error}
%% Module = atom()
%% OutFile = ErlFile = string()
-do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
- case file:open(ErlFile, [read]) of
+do_analyse_to_file1(Module, OutFile, ErlFile, HTML) ->
+ case file:open(ErlFile, [read,raw,read_ahead]) of
{ok, InFd} ->
- case file:open(OutFile, [write]) of
+ case file:open(OutFile, [write,raw,delayed_write]) of
{ok, OutFd} ->
if HTML ->
Encoding = encoding(ErlFile),
@@ -2160,9 +2419,14 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
"**************************************"
"\n\n"]),
- print_lines(Module, InFd, OutFd, 1, HTML),
+ Pattern = {#bump{module=Module,line='$1',_='_'},'$2'},
+ MS = [{Pattern,[],[{{'$1','$2'}}]}],
+ CovLines = lists:keysort(1,ets:select(?COLLECTION_TABLE, MS)),
+ print_lines(Module, CovLines, InFd, OutFd, 1, HTML),
- if HTML -> io:format(OutFd,"</pre>\n</body>\n</html>\n",[]);
+ if
+ HTML ->
+ file:write(OutFd, "</pre>\n</body>\n</html>\n");
true -> ok
end,
@@ -2179,21 +2443,19 @@ do_analyse_to_file(Module, OutFile, ErlFile, HTML) ->
{error, {file, ErlFile, Reason}}
end.
-print_lines(Module, InFd, OutFd, L, HTML) ->
- case io:get_line(InFd, '') of
+
+print_lines(Module, CovLines, InFd, OutFd, L, HTML) ->
+ case file:read_line(InFd) of
eof ->
ignore;
- "%"++_=Line -> %Comment line - not executed.
- io:put_chars(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
- print_lines(Module, InFd, OutFd, L+1, HTML);
- RawLine ->
+ {ok,"%"++_=Line} -> %Comment line - not executed.
+ file:write(OutFd, [tab(),escape_lt_and_gt(Line, HTML)]),
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML);
+ {ok,RawLine} ->
Line = escape_lt_and_gt(RawLine,HTML),
- Pattern = {#bump{module=Module,line=L},'$1'},
- case ets:match(?COLLECTION_TABLE, Pattern) of
- [] ->
- io:put_chars(OutFd, [tab(),Line]);
- Ns ->
- N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
+ case CovLines of
+ [{L,N}|CovLines1] ->
+ %% N = lists:foldl(fun([Ni], Nacc) -> Nacc+Ni end, 0, Ns),
if
N=:=0, HTML=:=true ->
LineNoNL = Line -- "\n",
@@ -2201,19 +2463,22 @@ print_lines(Module, InFd, OutFd, L, HTML) ->
%%Str = string:right("0", 6, 32),
RedLine = ["<font color=red>",Str,fill1(),
LineNoNL,"</font>\n"],
- io:put_chars(OutFd, RedLine);
+ file:write(OutFd, RedLine);
N<1000000 ->
Str = string:right(integer_to_list(N), 6, 32),
- io:put_chars(OutFd, [Str,fill1(),Line]);
+ file:write(OutFd, [Str,fill1(),Line]);
N<10000000 ->
Str = integer_to_list(N),
- io:put_chars(OutFd, [Str,fill2(),Line]);
+ file:write(OutFd, [Str,fill2(),Line]);
true ->
Str = integer_to_list(N),
- io:put_chars(OutFd, [Str,fill3(),Line])
- end
- end,
- print_lines(Module, InFd, OutFd, L+1, HTML)
+ file:write(OutFd, [Str,fill3(),Line])
+ end,
+ print_lines(Module, CovLines1, InFd, OutFd, L+1, HTML);
+ _ ->
+ file:write(OutFd, [tab(),Line]),
+ print_lines(Module, CovLines, InFd, OutFd, L+1, HTML)
+ end
end.
tab() -> " | ".
@@ -2223,7 +2488,7 @@ fill3() -> "| ".
%%%--Export--------------------------------------------------------------
do_export(Module, OutFile, From, State) ->
- case file:open(OutFile,[write,binary,raw]) of
+ case file:open(OutFile,[write,binary,raw,delayed_write]) of
{ok,Fd} ->
Reply =
case Module of
@@ -2362,21 +2627,21 @@ do_reset_collection_table(Module) ->
ets:match_delete(?COLLECTION_TABLE, {#bump{module=Module},'_'}).
%% do_reset(Module) -> ok
-%% The reset is done on a per-clause basis to avoid building
+%% The reset is done on ?CHUNK_SIZE number of bumps to avoid building
%% long lists in the case of very large modules
do_reset(Module) ->
- [{Module,Clauses}] = ets:lookup(?COVER_CLAUSE_TABLE, Module),
- do_reset2(Clauses).
+ Pattern = {#bump{module=Module, _='_'}, '$1'},
+ MatchSpec = [{Pattern,[{'=/=','$1',0}],['$_']}],
+ Match = ets:select(?COVER_TABLE,MatchSpec,?CHUNK_SIZE),
+ do_reset2(Match).
-do_reset2([{M,F,A,C,_L}|Clauses]) ->
- Pattern = {#bump{module=M, function=F, arity=A, clause=C}, '_'},
- Bumps = ets:match_object(?COVER_TABLE, Pattern),
+do_reset2({Bumps,Continuation}) ->
lists:foreach(fun({Bump,_N}) ->
ets:insert(?COVER_TABLE, {Bump,0})
end,
Bumps),
- do_reset2(Clauses);
-do_reset2([]) ->
+ do_reset2(ets:select(Continuation));
+do_reset2('$end_of_table') ->
ok.
do_clear(Module) ->
@@ -2419,31 +2684,43 @@ escape_lt_and_gt1([],Acc) ->
escape_lt_and_gt1([H|T],Acc) ->
escape_lt_and_gt1(T,[H|Acc]).
-pmap(Fun, List) ->
- pmap(Fun, List, 20).
-pmap(Fun, List, Limit) ->
- pmap(Fun, List, [], Limit, 0, []).
-pmap(Fun, [E | Rest], Pids, Limit, Cnt, Acc) when Cnt < Limit ->
- Collector = self(),
- Pid = spawn_link(fun() ->
- ?SPAWN_DBG(pmap,E),
- Collector ! {res,self(),Fun(E)}
- end),
- erlang:monitor(process, Pid),
- pmap(Fun, Rest, Pids ++ [Pid], Limit, Cnt + 1, Acc);
-pmap(Fun, List, [Pid | Pids], Limit, Cnt, Acc) ->
- receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, List, [Pid | Pids], Limit, Cnt - 1, Acc);
- {res, Pid, Res} ->
- pmap(Fun, List, Pids, Limit, Cnt, [Res | Acc])
- end;
-pmap(_Fun, [], [], _Limit, 0, Acc) ->
- lists:reverse(Acc);
-pmap(Fun, [], [], Limit, Cnt, Acc) ->
+%%%--Internal functions for parallelization------------------------------
+pmap(Fun,List) ->
+ NTot = length(List),
+ NProcs = erlang:system_info(schedulers) * 2,
+ NPerProc = (NTot div NProcs) + 1,
+ Mons = pmap_spawn(Fun,NPerProc,List,[]),
+ pmap_collect(Mons,[]).
+
+pmap_spawn(_,_,[],Mons) ->
+ Mons;
+pmap_spawn(Fun,NPerProc,List,Mons) ->
+ {L1,L2} = if length(List)>=NPerProc -> lists:split(NPerProc,List);
+ true -> {List,[]} % last chunk
+ end,
+ Mon =
+ spawn_monitor(
+ fun() ->
+ exit({pmap_done,lists:map(Fun,L1)})
+ end),
+ pmap_spawn(Fun,NPerProc,L2,[Mon|Mons]).
+
+pmap_collect([],Acc) ->
+ lists:append(Acc);
+pmap_collect(Mons,Acc) ->
receive
- {'DOWN', _Ref, process, X, _} when is_pid(X) ->
- pmap(Fun, [], [], Limit, Cnt - 1, Acc)
+ {'DOWN', Ref, process, Pid, {pmap_done,Result}} ->
+ pmap_collect(lists:delete({Pid,Ref},Mons),[Result|Acc]);
+ {'DOWN', Ref, process, Pid, Reason} = Down ->
+ case lists:member({Pid,Ref},Mons) of
+ true ->
+ %% Something went really wrong - don't hang!
+ exit(Reason);
+ false ->
+ %% This should be handled somewhere else
+ self() ! Down,
+ pmap_collect(Mons,Acc)
+ end
end.
%%%-----------------------------------------------------------------
diff --git a/lib/tools/src/cover_web.erl b/lib/tools/src/cover_web.erl
index 69f2f3b1aa..75bb45c659 100644
--- a/lib/tools/src/cover_web.erl
+++ b/lib/tools/src/cover_web.erl
@@ -734,7 +734,7 @@ generate_filename(Prefix) ->
filename:join(Cwd,Prefix ++ "_" ++ ts() ++ ".coverdata").
ts() ->
- {{Y,M,D},{H,Min,S}} = calendar:now_to_local_time(now()),
+ {{Y,M,D},{H,Min,S}} = calendar:now_to_local_time(erlang:timestamp()),
io_lib:format("~4.4.0w~2.2.0w~2.2.0w-~2.2.0w~2.2.0w~2.2.0w",
[Y,M,D,H,Min,S]).
diff --git a/lib/tools/src/eprof.erl b/lib/tools/src/eprof.erl
index bfbbefb473..28cf493a5f 100644
--- a/lib/tools/src/eprof.erl
+++ b/lib/tools/src/eprof.erl
@@ -187,7 +187,7 @@ handle_call({profile_start, Rootset, Pattern, {M,F,A}, Opts}, From, #state{fd =
case set_process_trace(true, [Pid|Rootset], Topts) of
true ->
ok = set_pattern_trace(true, Pattern),
- T0 = now(),
+ T0 = erlang:timestamp(),
ok = execute_profiling(Pid),
{noreply, #state{
profiling = true,
@@ -211,7 +211,7 @@ handle_call({profile_start, Rootset, Pattern, undefined, Opts}, From, #state{ fd
case set_process_trace(true, Rootset, Topts) of
true ->
- T0 = now(),
+ T0 = erlang:timestamp(),
ok = set_pattern_trace(true, Pattern),
{reply, profiling, #state{
profiling = true,
@@ -485,20 +485,22 @@ string_bp_mfa([{Mfa, {Count, Time}}|Mfas], Tus, {MfaW, CountW, PercW, TimeW, TpC
erlang:max(TpCW, length(Stpc))
}, [[Smfa, Scount, Sperc, Stime, Stpc] | Strings]).
-print_bp_mfa(Mfas, {_Tn, Tus}, Fd, Opts) ->
+print_bp_mfa(Mfas, {Tn, Tus}, Fd, Opts) ->
Fmfas = filter_mfa(sort_mfa(Mfas, proplists:get_value(sort, Opts)), proplists:get_value(filter, Opts)),
{{MfaW, CountW, PercW, TimeW, TpCW}, Strs} = string_bp_mfa(Fmfas, Tus),
- Ws = {
- erlang:max(length("FUNCTION"), MfaW),
- erlang:max(length("CALLS"), CountW),
- erlang:max(length(" %"), PercW),
- erlang:max(length("TIME"), TimeW),
- erlang:max(length("uS / CALLS"), TpCW)
- },
- format(Fd, Ws, ["FUNCTION", "CALLS", " %", "TIME", "uS / CALLS"]),
- format(Fd, Ws, ["--------", "-----", "---", "----", "----------"]),
-
+ TnStr = s(Tn),
+ TusStr = s(Tus),
+ TuspcStr = s("~.2f", [divide(Tus,Tn)]),
+ Ws = {erlang:max(length("FUNCTION"), MfaW),
+ lists:max([length("CALLS"), CountW, length(TnStr)]),
+ erlang:max(length(" %"), PercW),
+ lists:max([length("TIME"), TimeW, length(TusStr)]),
+ lists:max([length("uS / CALLS"), TpCW, length(TuspcStr)])},
+ format(Fd, Ws, ["FUNCTION", "CALLS", " %", "TIME", "uS / CALLS"]),
+ format(Fd, Ws, ["--------", "-----", "-------", "----", "----------"]),
lists:foreach(fun (String) -> format(Fd, Ws, String) end, Strs),
+ format(Fd, Ws, [lists:duplicate(N,$-)||N <- tuple_to_list(Ws)]),
+ format(Fd, Ws, ["Total:", TnStr, "100.00%", TusStr, TuspcStr]),
ok.
s({M,F,A}) -> s("~w:~w/~w",[M,F,A]);
diff --git a/lib/tools/src/lcnt.erl b/lib/tools/src/lcnt.erl
index f1251fddab..d5ba8aa52f 100644
--- a/lib/tools/src/lcnt.erl
+++ b/lib/tools/src/lcnt.erl
@@ -305,7 +305,7 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
{true, true} -> locks_ids(Filtered);
_ -> []
end,
- Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
+ Combos = combine_classes(Filtered, proplists:get_value(combine, Opts)),
case proplists:get_value(locations, Opts) of
true ->
lists:foreach(fun
@@ -329,9 +329,8 @@ handle_call({inspect, Lockname, InOpts}, _From, #state{ duration=Duration, locks
end
end, Combos);
_ ->
- Print1 = locks2print(Combos, Duration),
- Print2 = filter_print(Print1, Opts),
- print_lock_information(Print2, proplists:get_value(print, Opts))
+ Print = filter_print(locks2print(Combos, Duration), Opts),
+ print_lock_information(Print, proplists:get_value(print, Opts))
end,
{reply, ok, State};
@@ -357,8 +356,7 @@ handle_call({histogram, Lockname, InOpts}, _From, #state{ duration=Duration, loc
{thresholds, [{tries, -1}, {colls, -1}, {time, -1}]}], Opts),
Prints = locks2print([L], Duration),
print_lock_information(Prints, proplists:get_value(print, Opts1)),
- print_full_histogram(SumStats#stats.hist),
- io:format("~n")
+ print_full_histogram(SumStats#stats.hist)
end, Combos),
{reply, ok, State};
@@ -509,20 +507,23 @@ filter_locks(Locks, Lockname) ->
% 4. max length of locks
filter_print(PLs, Opts) ->
- TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
- SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
- CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
- reverse_locks(CLs, not proplists:get_value(reverse,Opts, false)).
-
-sort_locks(Locks, name) -> lists:keysort(#print.name, Locks);
-sort_locks(Locks, id) -> lists:keysort(#print.id, Locks);
-sort_locks(Locks, type) -> lists:keysort(#print.type, Locks);
-sort_locks(Locks, tries) -> lists:keysort(#print.tries, Locks);
-sort_locks(Locks, colls) -> lists:keysort(#print.colls, Locks);
-sort_locks(Locks, ratio) -> lists:keysort(#print.cr, Locks);
-sort_locks(Locks, time) -> lists:keysort(#print.time, Locks);
+ TLs = threshold_locks(PLs, proplists:get_value(thresholds, Opts, [])),
+ SLs = sort_locks(TLs, proplists:get_value(sort, Opts, time)),
+ CLs = cut_locks(SLs, proplists:get_value(max_locks, Opts, none)),
+ reverse_locks(CLs, proplists:get_value(reverse, Opts, false)).
+
+sort_locks(Locks, name) -> reverse_sort_locks(#print.name, Locks);
+sort_locks(Locks, id) -> reverse_sort_locks(#print.id, Locks);
+sort_locks(Locks, type) -> reverse_sort_locks(#print.type, Locks);
+sort_locks(Locks, tries) -> reverse_sort_locks(#print.tries, Locks);
+sort_locks(Locks, colls) -> reverse_sort_locks(#print.colls, Locks);
+sort_locks(Locks, ratio) -> reverse_sort_locks(#print.cr, Locks);
+sort_locks(Locks, time) -> reverse_sort_locks(#print.time, Locks);
sort_locks(Locks, _) -> sort_locks(Locks, time).
+reverse_sort_locks(Ix, Locks) ->
+ lists:reverse(lists:keysort(Ix, Locks)).
+
% cut locks not above certain thresholds
threshold_locks(Locks, Thresholds) ->
Tries = proplists:get_value(tries, Thresholds, -1),
@@ -647,15 +648,19 @@ format_histogram(Tup) when is_tuple(Tup) ->
_ -> string_histogram([case V of 0 -> 0; _ -> V/Max end || V <- Vs])
end.
-string_histogram([0|Vs]) ->
- [$\s|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.66 ->
- [$X|string_histogram(Vs)];
-string_histogram([V|Vs]) when V > 0.33 ->
- [$x|string_histogram(Vs)];
-string_histogram([_|Vs]) ->
- [$.|string_histogram(Vs)];
-string_histogram([]) -> [].
+string_histogram(Vs) ->
+ [$||histogram_values_to_string(Vs,$|)].
+
+histogram_values_to_string([0|Vs],End) ->
+ [$\s|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.66 ->
+ [$X|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([V|Vs],End) when V > 0.33 ->
+ [$x|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([_|Vs],End) ->
+ [$.|histogram_values_to_string(Vs,End)];
+histogram_values_to_string([],End) ->
+ [End].
%% state making
@@ -778,7 +783,7 @@ auto_print_width(Locks, Print) ->
({print,print}, Out) -> [print|Out];
({Str, Len}, Out) -> [erlang:min(erlang:max(length(s(Str))+1,Len),80)|Out]
end, [], lists:zip(tuple_to_list(L), tuple_to_list(Max)))))
- end, #print{ id = 4, type = 5, entry = 5, name = 6, tries = 8, colls = 13, cr = 16, time = 11, dtr = 14, hist=20 },
+ end, #print{ id=4, type=5, entry=5, name=6, tries=8, colls=13, cr=16, time=11, dtr=14, hist=20 },
Locks),
% Setup the offsets for later pruning
Offsets = [
@@ -820,7 +825,7 @@ print_header(Opts) ->
cr = "collisions [%]",
time = "time [us]",
dtr = "duration [%]",
- hist = "histogram"
+ hist = "histogram [log2(us)]"
},
Divider = #print{
name = lists:duplicate(1 + length(Header#print.name), 45),
@@ -863,9 +868,9 @@ format_lock(L, [Opt|Opts]) ->
{time, W} -> [{space, W, s(L#print.time) } | format_lock(L, Opts)];
duration -> [{space, 20, s(L#print.dtr) } | format_lock(L, Opts)];
{duration, W} -> [{space, W, s(L#print.dtr) } | format_lock(L, Opts)];
- histogram -> [{space, 0, s(L#print.hist) } | format_lock(L, Opts)];
- {histogram, W} -> [{space, W, s(L#print.hist) } | format_lock(L, Opts)];
- _ -> format_lock(L, Opts)
+ histogram -> [{space, 20, s(L#print.hist) } | format_lock(L, Opts)];
+ {histogram, W} -> [{left, W - length(s(L#print.hist)) - 1, s(L#print.hist)} | format_lock(L, Opts)];
+ _ -> format_lock(L, Opts)
end.
print_state_information(#state{locks = Locks} = State) ->
@@ -926,6 +931,7 @@ s(T) -> term2string(T).
strings(Strings) -> strings(Strings, []).
strings([], Out) -> Out;
strings([{space, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string("~~~ws", [N]), [S]));
+strings([{left, N, S} | Ss], Out) -> strings(Ss, Out ++ term2string(term2string(" ~~s~~~ws", [N]), [S,""]));
strings([{format, Format, S} | Ss], Out) -> strings(Ss, Out ++ term2string(Format, [S]));
strings([S|Ss], Out) -> strings(Ss, Out ++ term2string("~ts", [S])).
diff --git a/lib/tools/src/tags.erl b/lib/tools/src/tags.erl
index e3cc51cdb2..e25db2eb1b 100644
--- a/lib/tools/src/tags.erl
+++ b/lib/tools/src/tags.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -297,15 +297,16 @@ word_char(_) -> false.
%% Check the options `outfile' and `outdir'.
open_out(Options) ->
+ Opts = [write, {encoding, unicode}],
case lists:keysearch(outfile, 1, Options) of
{value, {outfile, File}} ->
- file:open(File, [write]);
+ file:open(File, Opts);
_ ->
case lists:keysearch(outdir, 1, Options) of
{value, {outdir, Dir}} ->
- file:open(filename:join(Dir, "TAGS"), [write]);
+ file:open(filename:join(Dir, "TAGS"), Opts);
_ ->
- file:open("TAGS", [write])
+ file:open("TAGS", Opts)
end
end.
diff --git a/lib/tools/src/tools.app.src b/lib/tools/src/tools.app.src
index ec5b6f3a82..8458941761 100644
--- a/lib/tools/src/tools.app.src
+++ b/lib/tools/src/tools.app.src
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2012. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -40,8 +40,8 @@
{env, [{file_util_search_methods,[{"", ""}, {"ebin", "esrc"}, {"ebin", "src"}]}
]
},
- {runtime_dependencies, ["webtool-0.8.10","stdlib-2.0","runtime_tools-1.8.14",
- "kernel-3.0","inets-5.10","erts-6.0",
+ {runtime_dependencies, ["webtool-0.8.10","stdlib-2.5","runtime_tools-1.8.14",
+ "kernel-3.0","inets-5.10","erts-7.0",
"compiler-5.0"]}
]
}.
diff --git a/lib/tools/src/xref.hrl b/lib/tools/src/xref.hrl
index fa8c5c746d..5e79c477f3 100644
--- a/lib/tools/src/xref.hrl
+++ b/lib/tools/src/xref.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -22,6 +22,8 @@
-define(VAR_EXPR, '$F_EXPR').
-define(MOD_EXPR, '$M_EXPR').
+-define(XREF_END_LINE, (1 bsl 23)).
+
%%% Filenames are stored as directory and basename. A lot of heap can
%%% be saved by keeping only one (or few) copy of the directory name.
diff --git a/lib/tools/src/xref_compiler.erl b/lib/tools/src/xref_compiler.erl
index c4b5c04c12..c914a76bf0 100644
--- a/lib/tools/src/xref_compiler.erl
+++ b/lib/tools/src/xref_compiler.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -924,7 +924,7 @@ format_parse_error(["invalid_operator", Op], Line) ->
format_parse_error(Error, Line) ->
io_lib:format("Parse error~s: ~ts~n", [Line, lists:flatten(Error)]).
-format_line(-1) ->
+format_line(?XREF_END_LINE) ->
" at end of string";
format_line(0) ->
"";
diff --git a/lib/tools/src/xref_reader.erl b/lib/tools/src/xref_reader.erl
index 142d28ebe6..723fb729cd 100644
--- a/lib/tools/src/xref_reader.erl
+++ b/lib/tools/src/xref_reader.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -77,17 +77,18 @@ forms([], S) ->
form({attribute, Line, xref, Calls}, S) -> % experimental
#xrefr{module = M, function = Fun,
lattrs = L, xattrs = X, battrs = B} = S,
- attr(Calls, Line, M, Fun, L, X, B, S);
+ attr(Calls, erl_anno:line(Line), M, Fun, L, X, B, S);
form({attribute, _Line, _Attr, _Val}, S) ->
S;
-form({function, 0, module_info, 0, _Clauses}, S) ->
+form({function, _, module_info, 0, _Clauses}, S) ->
S;
-form({function, 0, module_info, 1, _Clauses}, S) ->
+form({function, _, module_info, 1, _Clauses}, S) ->
S;
-form({function, Line, Name, Arity, Clauses}, S) ->
+form({function, Anno, Name, Arity, Clauses}, S) ->
MFA0 = {S#xrefr.module, Name, Arity},
MFA = adjust_arity(S, MFA0),
S1 = S#xrefr{function = MFA},
+ Line = erl_anno:line(Anno),
S2 = S1#xrefr{def_at = [{MFA,Line} | S#xrefr.def_at]},
S3 = clauses(Clauses, S2),
S3#xrefr{function = []}.
@@ -305,10 +306,14 @@ fun_args(apply2, [FunArg, Args]) -> {FunArg, Args};
fun_args(1, [FunArg | Args]) -> {FunArg, Args};
fun_args(2, [_Node, FunArg | Args]) -> {FunArg, Args}.
-list2term([A | As]) ->
- {cons, 0, A, list2term(As)};
-list2term([]) ->
- {nil, 0}.
+list2term(L) ->
+ A = erl_anno:new(0),
+ list2term(L, A).
+
+list2term([A | As], Anno) ->
+ {cons, Anno, A, list2term(As)};
+list2term([], Anno) ->
+ {nil, Anno}.
term2list({cons, _Line, H, T}, L, S) ->
term2list(T, [H | L], S);
@@ -335,10 +340,11 @@ handle_call(Locality, Module, Name, Arity, Line, S) ->
handle_call(Locality, To, Line, S, false)
end.
-handle_call(Locality, To0, Line, S, IsUnres) ->
+handle_call(Locality, To0, Anno, S, IsUnres) ->
From = S#xrefr.function,
To = adjust_arity(S, To0),
Call = {From, To},
+ Line = erl_anno:line(Anno),
CallAt = {Call, Line},
S1 = if
IsUnres ->
diff --git a/lib/tools/src/xref_scanner.erl b/lib/tools/src/xref_scanner.erl
index 990f8aa87b..4c93033d7c 100644
--- a/lib/tools/src/xref_scanner.erl
+++ b/lib/tools/src/xref_scanner.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2009. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -19,6 +19,8 @@
-module(xref_scanner).
+-include("xref.hrl").
+
-export([scan/1]).
scan(Chars) ->
@@ -77,7 +79,7 @@ lex([V={var,N,Var} | L]) ->
lex([T | Ts]) ->
[T | lex(Ts)];
lex([]) ->
- [{'$end', -1}].
+ [{'$end', erl_anno:new(?XREF_END_LINE)}].
is_type('Rel') -> true;
is_type('App') -> true;
diff --git a/lib/tools/test/cover_SUITE.erl b/lib/tools/test/cover_SUITE.erl
index 80807b1d38..368fa6c3d1 100644
--- a/lib/tools/test/cover_SUITE.erl
+++ b/lib/tools/test/cover_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -33,6 +33,8 @@
-export([do_coverage/1]).
+-export([distribution_performance/1]).
+
-include_lib("test_server/include/test_server.hrl").
%%----------------------------------------------------------------------
@@ -170,10 +172,15 @@ compile(Config) when is_list(Config) ->
?line {ok, CWD} = file:get_cwd(),
?line Result2 = cover:compile_directory(CWD),
?line SortedResult = lists:sort(Result2),
- ?line [{error,_DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
+ ?line [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult,
?line [{ok,e}] = cover:compile_directory("d1"),
?line {error,enoent} = cover:compile_directory("d2"),
+ [] = cover:compile([]),
+ Result21 = cover:compile([a,b,"cc.erl",d,"f"]),
+ SortedResult21 = lists:sort(Result21),
+ [{error,DFile},{ok,a},{ok,b},{ok,cc},{ok,f}] = SortedResult21,
+
?line {ok,a} = cover:compile(a),
?line {ok,b} = compile:file(b),
?line code:purge(b),
@@ -213,8 +220,14 @@ compile(Config) when is_list(Config) ->
?line {error,non_existing} = cover:compile_beam(z),
?line [{ok,y}] = cover:compile_beam_directory("d"),
?line Result3 = lists:sort(cover:compile_beam_directory()),
- ?line [{error,{no_abstract_code,_XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
+ ?line [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = Result3,
?line {error,enoent} = cover:compile_beam_directory("d2"),
+
+ [] = cover:compile_beam([]),
+ Result31 = cover:compile_beam([crypt,"v.beam",w,"x"]),
+ SortedResult31 = lists:sort(Result31),
+ [{error,{no_abstract_code,XBeam}},{ok,crypt},{ok,v},{ok,w}] = SortedResult31,
+
?line decompile([v,w,y]),
?line Files = lsfiles(),
?line remove(files(Files, ".beam")).
@@ -239,20 +252,22 @@ analyse(Config) when is_list(Config) ->
?line done = a:start(5),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, coverage, module),
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a, coverage, function),
- ?line {ok, [{{a,start,1,1},{6,0}},
- {{a,stop,1,1},{0,1}},
- {{a,pong,1,1},{1,0}},
+ {ok, {a,{17,2}}=ACovMod} = cover:analyse(a, coverage, module),
+ {ok, [{{a,exit_kalle,0},{1,0}},
+ {{a,loop,3},{5,1}},
+ {{a,pong,1},{1,0}},
+ {{a,start,1},{6,0}},
+ {{a,stop,1},{0,1}},
+ {{a,trycatch,1},{4,0}}]=ACovFunc} =
+ cover:analyse(a, coverage, function),
+ {ok, [{{a,exit_kalle,0,1},{1,0}},
{{a,loop,3,1},{3,1}},
{{a,loop,3,2},{2,0}},
- {{a,trycatch,1,1},{4,0}},
- {{a,exit_kalle,0,1},{1,0}}]} = cover:analyse(a, coverage, clause),
+ {{a,pong,1,1},{1,0}},
+ {{a,start,1,1},{6,0}},
+ {{a,stop,1,1},{0,1}},
+ {{a,trycatch,1,1},{4,0}}]=ACovClause} =
+ cover:analyse(a, coverage, clause),
?line {ok, [{{a,9},{1,0}},
{{a,10},{1,0}},
{{a,11},{1,0}},
@@ -271,22 +286,22 @@ analyse(Config) when is_list(Config) ->
{{a,47},{1,0}},
{{a,49},{1,0}},
{{a,51},{1,0}},
- {{a,55},{1,0}}]} = cover:analyse(a, coverage, line),
-
- ?line {ok, {a,15}} = cover:analyse(a, calls, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls, function),
- ?line {ok, [{{a,start,1,1},1},
- {{a,stop,1,1},0},
- {{a,pong,1,1},5},
- {{a,loop,3,1},5},
- {{a,loop,3,2},1},
- {{a,trycatch,1,1},2},
- {{a,exit_kalle,0,1},1}]} = cover:analyse(a, calls, clause),
+ {{a,55},{1,0}}]=ACovLine} = cover:analyse(a, coverage, line),
+
+ {ok, {a,15}=ACallsMod} = cover:analyse(a, calls, module),
+ {ok, [{{a,exit_kalle,0},1},
+ {{a,loop,3},6},
+ {{a,pong,1},5},
+ {{a,start,1},1},
+ {{a,stop,1},0},
+ {{a,trycatch,1},2}]=ACallsFunc} = cover:analyse(a, calls, function),
+ {ok, [{{a,exit_kalle,0,1},1},
+ {{a,loop,3,1},5},
+ {{a,loop,3,2},1},
+ {{a,pong,1,1},5},
+ {{a,start,1,1},1},
+ {{a,stop,1,1},0},
+ {{a,trycatch,1,1},2}]=ACallsClause} = cover:analyse(a, calls, clause),
?line {ok, [{{a,9},1},
{{a,10},1},
{{a,11},1},
@@ -305,27 +320,85 @@ analyse(Config) when is_list(Config) ->
{{a,47},1},
{{a,49},1},
{{a,51},2},
- {{a,55},1}]} = cover:analyse(a, calls, line),
-
- ?line {ok, [{{a,start,1},{6,0}},
- {{a,stop,1},{0,1}},
- {{a,pong,1},{1,0}},
- {{a,loop,3},{5,1}},
- {{a,trycatch,1},{4,0}},
- {{a,exit_kalle,0},{1,0}}]} = cover:analyse(a),
- ?line {ok, {a,{17,2}}} = cover:analyse(a, module),
- ?line {ok, [{{a,start,1},1},
- {{a,stop,1},0},
- {{a,pong,1},5},
- {{a,loop,3},6},
- {{a,trycatch,1},2},
- {{a,exit_kalle,0},1}]} = cover:analyse(a, calls),
+ {{a,55},1}]=ACallsLine} = cover:analyse(a, calls, line),
+
+ {ok,ACovFunc} = cover:analyse(a),
+ {ok,ACovMod} = cover:analyse(a, module),
+ {ok,ACallsFunc} = cover:analyse(a, calls),
?line {ok, "a.COVER.out"} = cover:analyse_to_file(a),
?line {ok, "e.COVER.out"} = cover:analyse_to_file(e),
?line {ok, "a.COVER.html"} = cover:analyse_to_file(a,[html]),
?line {ok, "e.COVER.html"} = cover:analyse_to_file(e,[html]),
+ %% Analyse all modules
+ Modules = cover:modules(),
+ N = length(Modules),
+
+ {result,CovFunc,[]} = cover:analyse(), % default = coverage, function
+ ACovFunc = [A || {{a,_,_},_}=A<-CovFunc],
+
+ {result,CovMod,[]} = cover:analyse(coverage,module),
+ ACovMod = lists:keyfind(a,1,CovMod),
+
+ {result,CovClause,[]} = cover:analyse(coverage,clause),
+ ACovClause = [A || {{a,_,_,_},_}=A<-CovClause],
+
+ {result,CovLine,[]} = cover:analyse(coverage,line),
+ ACovLine = [A || {{a,_},_}=A<-CovLine],
+
+ {result,CallsFunc,[]} = cover:analyse(calls,function),
+ ACallsFunc = [A || {{a,_,_},_}=A<-CallsFunc],
+
+ {result,CallsMod,[]} = cover:analyse(calls,module),
+ ACallsMod = lists:keyfind(a,1,CallsMod),
+
+ {result,CallsClause,[]} = cover:analyse(calls,clause),
+ ACallsClause = [A || {{a,_,_,_},_}=A<-CallsClause],
+
+ {result,CallsLine,[]} = cover:analyse(calls,line),
+ ACallsLine = [A || {{a,_},_}=A<-CallsLine],
+
+ {result,AllToFile,[]} = cover:analyse_to_file(),
+ N = length(AllToFile),
+ true = lists:member("a.COVER.out",AllToFile),
+ {result,AllToFileHtml,[]} = cover:analyse_to_file([html]),
+ N = length(AllToFileHtml),
+ true = lists:member("a.COVER.html",AllToFileHtml),
+
+ %% Analyse list of modules
+ %% Listing all modules so we can compare result with above result
+ %% from analysing all.
+
+ {result,CovFunc1,[]} = cover:analyse(Modules), % default = coverage, function
+ true = lists:sort(CovFunc) == lists:sort(CovFunc1),
+
+ {result,CovMod1,[]} = cover:analyse(Modules,coverage,module),
+ true = lists:sort(CovMod) == lists:sort(CovMod1),
+
+ {result,CovClause1,[]} = cover:analyse(Modules,coverage,clause),
+ true = lists:sort(CovClause) == lists:sort(CovClause1),
+
+ {result,CovLine1,[]} = cover:analyse(Modules,coverage,line),
+ true = lists:sort(CovLine) == lists:sort(CovLine1),
+
+ {result,CallsFunc1,[]} = cover:analyse(Modules,calls,function),
+ true = lists:sort(CallsFunc1) == lists:sort(CallsFunc1),
+
+ {result,CallsMod1,[]} = cover:analyse(Modules,calls,module),
+ true = lists:sort(CallsMod) == lists:sort(CallsMod1),
+
+ {result,CallsClause1,[]} = cover:analyse(Modules,calls,clause),
+ true = lists:sort(CallsClause) == lists:sort(CallsClause1),
+
+ {result,CallsLine1,[]} = cover:analyse(Modules,calls,line),
+ true = lists:sort(CallsLine) == lists:sort(CallsLine1),
+
+ {result,AllToFile1,[]} = cover:analyse_to_file(Modules),
+ true = lists:sort(AllToFile) == lists:sort(AllToFile1),
+ {result,AllToFileHtml1,[]} = cover:analyse_to_file(Modules,[html]),
+ true = lists:sort(AllToFileHtml) == lists:sort(AllToFileHtml1),
+
%% analyse_to_file of file which is compiled from beam
?line {ok,f} = compile:file(f,[debug_info]),
?line code:purge(f),
@@ -348,14 +421,17 @@ analyse(Config) when is_list(Config) ->
{module,z} = code:load_file(z),
{ok,z} = cover:compile_beam(z),
ok = file:delete("z.erl"),
- {error,no_source_code_found} = cover:analyse_to_file(z),
+ {error,{no_source_code_found,z}} = cover:analyse_to_file(z),
+ {result,[],[{no_source_code_found,z}]} = cover:analyse_to_file([z]),
code:purge(z),
code:delete(z),
?line {error,{not_cover_compiled,b}} = cover:analyse(b),
?line {error,{not_cover_compiled,g}} = cover:analyse(g),
+ {result,[],[{not_cover_compiled,b}]} = cover:analyse([b]),
?line {error,{not_cover_compiled,b}} = cover:analyse_to_file(b),
- ?line {error,{not_cover_compiled,g}} = cover:analyse_to_file(g).
+ {error,{not_cover_compiled,g}} = cover:analyse_to_file(g),
+ {result,[],[{not_cover_compiled,g}]} = cover:analyse_to_file([g]).
misc(suite) -> [];
misc(Config) when is_list(Config) ->
@@ -680,6 +756,119 @@ stop_node_after_disconnect(Config) ->
?t:stop_node(N1),
ok.
+distribution_performance(Config) ->
+ PrivDir = ?config(priv_dir,Config),
+ Dir = filename:join(PrivDir,"distribution_performance"),
+ AllFiles = filename:join(Dir,"*"),
+ ok = filelib:ensure_dir(AllFiles),
+ code:add_patha(Dir),
+ M = 9, % Generate M modules
+ F = 210, % with F functions
+ C = 10, % and each function of C clauses
+ Mods = generate_modules(M,F,C,Dir),
+
+% ?t:break(""),
+
+ NodeName = cover_SUITE_distribution_performance,
+ {ok,N1} = ?t:start_node(NodeName,peer,[{start_cover,false}]),
+ %% CFun = fun() ->
+ %% [{ok,_} = cover:compile_beam(Mod) || Mod <- Mods]
+ %% end,
+ CFun = fun() -> cover:compile_beam(Mods) end,
+ {CT,CA} = timer:tc(CFun),
+% erlang:display(CA),
+ erlang:display({compile,CT}),
+
+ {SNT,_} = timer:tc(fun() -> {ok,[N1]} = cover:start(nodes()) end),
+ erlang:display({start_node,SNT}),
+
+ [1 = rpc:call(N1,Mod,f1,[1]) || Mod <- Mods],
+
+% Fun = fun() -> [cover:analyse(Mod,calls,function) || Mod<-Mods] end,
+% Fun = fun() -> analyse_all(Mods,calls,function) end,
+% Fun = fun() -> cover:analyse('_',calls,function) end,
+ Fun = fun() -> cover:analyse(Mods,calls,function) end,
+
+% Fun = fun() -> [begin cover:analyse_to_file(Mod,[html]) end || Mod<-Mods] end,
+% Fun = fun() -> analyse_all_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file(Mods,[html]) end,
+% Fun = fun() -> cover:analyse_to_file([html]) end,
+
+% Fun = fun() -> cover:reset() end,
+
+ {AT,A} = timer:tc(Fun),
+ erlang:display({analyse,AT}),
+% erlang:display(lists:sort([X || X={_MFA,N} <- lists:append([L || {ok,L}<-A]), N=/=0])),
+
+ %% fprof:apply(Fun, [],[{procs,[whereis(cover_server)]}]),
+ %% fprof:profile(),
+ %% fprof:analyse(dest,[]),
+
+ {SNT2,_} = timer:tc(fun() -> ?t:stop_node(N1) end),
+ erlang:display({stop_node,SNT2}),
+
+ code:del_path(Dir),
+ Files = filelib:wildcard(AllFiles),
+ [ok = file:delete(File) || File <- Files],
+ ok = file:del_dir(Dir),
+ ok.
+
+%% Run analysis in parallel
+analyse_all(Mods,Analysis,Level) ->
+ Pids = [begin
+ Pid = spawn(fun() ->
+ {ok,A} = cover:analyse(Mod,Analysis,Level),
+ exit(A)
+ end),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+analyse_all_to_file(Mods,Opts) ->
+ Pids = [begin
+ Pid = cover:async_analyse_to_file(Mod,Opts),
+ erlang:monitor(process,Pid),
+ Pid
+ end || Mod <- Mods],
+ get_downs(Pids,[]).
+
+get_downs([],Acc) ->
+ Acc;
+get_downs(Pids,Acc) ->
+ receive
+ {'DOWN', _Ref, _Type, Pid, A} ->
+ get_downs(lists:delete(Pid,Pids),[A|Acc])
+ end.
+
+generate_modules(0,_,_,_) ->
+ [];
+generate_modules(M,F,C,Dir) ->
+ ModStr = "m" ++ integer_to_list(M),
+ Mod = list_to_atom(ModStr),
+ Src = ["-module(",ModStr,").\n"
+ "-compile(export_all).\n" |
+ generate_functions(F,C)],
+ Erl = filename:join(Dir,ModStr++".erl"),
+ ok = file:write_file(Erl,Src),
+ {ok,Mod} = compile:file(Erl,[{outdir,Dir},debug_info,report]),
+ [Mod | generate_modules(M-1,F,C,Dir)].
+
+generate_functions(0,_) ->
+ [];
+generate_functions(F,C) ->
+ Func = "f" ++ integer_to_list(F),
+ [generate_clauses(C,Func) | generate_functions(F-1,C)].
+
+generate_clauses(0,_) ->
+ [];
+generate_clauses(C,Func) ->
+ CStr = integer_to_list(C),
+ Sep = if C==1 -> "."; true -> ";" end,
+ [Func,"(",CStr,") -> ",CStr,Sep,"\n" |
+ generate_clauses(C-1,Func)].
+
+
export_import(suite) -> [];
export_import(Config) when is_list(Config) ->
?line DataDir = ?config(data_dir, Config),
@@ -788,10 +977,11 @@ otp_5031(Config) when is_list(Config) ->
Dog = ?t:timetrap(?t:seconds(10)),
- ?line {ok,N1} = ?t:start_node(cover_SUITE_distribution1,slave,[]),
+ {ok,N1} = ?t:start_node(cover_SUITE_otp_5031,slave,[]),
?line {ok,[N1]} = cover:start(N1),
?line {error,not_main_node} = rpc:call(N1,cover,modules,[]),
?line cover:stop(),
+ ?t:stop_node(N1),
?t:timetrap_cancel(Dog),
ok.
@@ -1005,6 +1195,7 @@ otp_7095(Config) when is_list(Config) ->
ok.
+
otp_8270(doc) ->
["OTP-8270. Bug."];
otp_8270(suite) -> [];
@@ -1020,7 +1211,7 @@ otp_8270(Config) when is_list(Config) ->
?line {ok,N3} = ?t:start_node(cover_n3,slave,As),
timer:sleep(500),
- cover:start(nodes()),
+ {ok,[_,_,_]} = cover:start(nodes()),
Test = <<
"-module(m).\n"
@@ -1058,6 +1249,7 @@ otp_8270(Config) when is_list(Config) ->
?line {N2,true} = {N2,is_list(N2_info)},
?line {N3,true} = {N3,is_list(N3_info)},
+ exit(Pid1,kill),
?line ?t:stop_node(N1),
?line ?t:stop_node(N2),
?line ?t:stop_node(N3),
@@ -1572,7 +1764,9 @@ is_unloaded(What) ->
end.
check_f_calls(F1,F2) ->
- {ok,[{{f,f1,0},F1},{{f,f2,0},F2}|_]} = cover:analyse(f,calls,function).
+ {ok,A} = cover:analyse(f,calls,function),
+ {_,F1} = lists:keyfind({f,f1,0},1,A),
+ {_,F2} = lists:keyfind({f,f2,0},1,A).
cover_which_nodes(Expected) ->
case cover:which_nodes() of
diff --git a/lib/tools/test/lcnt_SUITE.erl b/lib/tools/test/lcnt_SUITE.erl
index 010dffe138..de68486b1b 100644
--- a/lib/tools/test/lcnt_SUITE.erl
+++ b/lib/tools/test/lcnt_SUITE.erl
@@ -97,12 +97,12 @@ t_conflicts_file([File|Files]) ->
{ok, _} = lcnt:start(),
ok = lcnt:load(File),
ok = lcnt:conflicts(),
- THs = [-1, 0, 100, 1000],
+ THs = [-1, 5],
Print = [name , id , type , entry , tries , colls , ratio , time , duration],
Opts = [
[{sort, Sort}, {reverse, Rev}, {max_locks, ML}, {combine, Combine}, {thresholds, [TH]}, {print, [Print]}] ||
- Sort <- [name , id , type , tries , colls , ratio , time , entry],
- ML <- [none, 1 , 32, 4096],
+ Sort <- [name , type , tries , colls , ratio , time],
+ ML <- [none, 32],
Combine <- [true, false],
TH <- [{tries, Tries} || Tries <- THs] ++ [{colls, Colls} || Colls <- THs] ++ [{time, Time} || Time <- THs],
Rev <- [true, false]
@@ -131,12 +131,12 @@ t_locations_file([File|Files]) ->
{ok, _} = lcnt:start(),
ok = lcnt:load(File),
ok = lcnt:locations(),
- THs = [-1, 0, 100, 1000],
+ THs = [-1, 0, 100],
Print = [name , id , type , entry , tries , colls , ratio , time , duration],
Opts = [
[{full_id, Id}, {sort, Sort}, {max_locks, ML}, {combine, Combine}, {thresholds, [TH]}, {print, Print}] ||
Sort <- [name , id , type , tries , colls , ratio , time , entry],
- ML <- [none, 1 , 64],
+ ML <- [none, 64],
Combine <- [true, false],
TH <- [{tries, Tries} || Tries <- THs] ++ [{colls, Colls} || Colls <- THs] ++ [{time, Time} || Time <- THs],
Id <- [true, false]
diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk
index d9651c30e3..68c3f6e29c 100644
--- a/lib/tools/vsn.mk
+++ b/lib/tools/vsn.mk
@@ -1 +1 @@
-TOOLS_VSN = 2.7.1
+TOOLS_VSN = 2.8
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
index cbad05081e..f8070e04c1 100644
--- a/lib/typer/src/typer.erl
+++ b/lib/typer/src/typer.erl
@@ -2,7 +2,7 @@
%%-----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -931,7 +931,9 @@ analyze_one_function({Var, FunBody} = Function, Acc) ->
A = cerl:fname_arity(Var),
TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function},
NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj],
- [_, LineNo, {file, FileName}] = cerl:get_ann(FunBody),
+ Anno = cerl:get_ann(FunBody),
+ LineNo = get_line(Anno),
+ FileName = get_file(Anno),
BaseName = filename:basename(FileName),
FuncInfo = {LineNo, F, A},
OriginalName = Acc#tmpAcc.file,
@@ -951,6 +953,14 @@ analyze_one_function({Var, FunBody} = Function, Acc) ->
incFuncAcc = IncFuncAcc,
dialyzerObj = NewDialyzerObj}.
+get_line([Line|_]) when is_integer(Line) -> Line;
+get_line([_|T]) -> get_line(T);
+get_line([]) -> none.
+
+get_file([{file,File}|_]) -> File;
+get_file([_|T]) -> get_file(T);
+get_file([]) -> "no_file". % should not happen
+
-spec get_dialyzer_plt(analysis()) -> plt().
get_dialyzer_plt(#analysis{plt = PltFile0}) ->
diff --git a/lib/typer/vsn.mk b/lib/typer/vsn.mk
index ce658e257b..74c0ccfc59 100644
--- a/lib/typer/vsn.mk
+++ b/lib/typer/vsn.mk
@@ -1 +1 @@
-TYPER_VSN = 0.9.8
+TYPER_VSN = 0.9.9
diff --git a/lib/webtool/doc/src/Makefile b/lib/webtool/doc/src/Makefile
index 32269e9424..08292fcca8 100644
--- a/lib/webtool/doc/src/Makefile
+++ b/lib/webtool/doc/src/Makefile
@@ -25,6 +25,10 @@ include ../../vsn.mk
VSN=$(WEBTOOL_VSN)
APPLICATION=webtool
+DOC_EXTRA_FRONT_PAGE_INFO=Important note: \
+The Webtool application is obsolete and will be removed \
+in the next major OTP release
+
# ----------------------------------------------------
# Release directory specification
# ----------------------------------------------------
diff --git a/lib/webtool/vsn.mk b/lib/webtool/vsn.mk
index a79c273d9f..4a701ae6e0 100644
--- a/lib/webtool/vsn.mk
+++ b/lib/webtool/vsn.mk
@@ -1 +1 @@
-WEBTOOL_VSN=0.8.10
+WEBTOOL_VSN=0.9
diff --git a/lib/wx/api_gen/wx_gen_cpp.erl b/lib/wx/api_gen/wx_gen_cpp.erl
index 107d064f4a..8e32aeddc8 100644
--- a/lib/wx/api_gen/wx_gen_cpp.erl
+++ b/lib/wx/api_gen/wx_gen_cpp.erl
@@ -197,8 +197,8 @@ gen_funcs(Defs) ->
w(" if(recurse_level > 1 && refd->type != 4) {~n"),
w(" delayed_delete->Append(Ecmd.Save());~n"),
w(" } else {~n"),
- w(" ((WxeApp *) wxTheApp)->clearPtr(This);~n"),
- w(" delete_object(This, refd); }~n"),
+ w(" delete_object(This, refd);~n"),
+ w(" ((WxeApp *) wxTheApp)->clearPtr(This);}~n"),
w(" } } break;~n"),
w(" case WXE_REGISTER_OBJECT: {~n"
" registerPid(bp, Ecmd.caller, memenv);~n"
@@ -975,6 +975,13 @@ build_ret(Name = "ev->m_scanCode",_,#type{base=bool,single=true,by_val=true}) ->
w(" rt.addBool(~s);~n",[Name]),
w("#else~n rt.addBool(false);~n",[]),
w("#endif~n",[]);
+build_ret(Name = "ev->m_metaDown",_,#type{base=bool,single=true,by_val=true}) ->
+ %% Hardcoded workaround for MAC on 2.9 and later
+ w("#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX)~n", []),
+ w(" rt.addBool(ev->m_rawControlDown);~n",[]),
+ w("#else~n rt.addBool(~s);~n",[Name]),
+ w("#endif~n",[]);
+
build_ret(Name,_,#type{base=bool,single=true,by_val=true}) ->
w(" rt.addBool(~s);~n",[Name]);
build_ret(Name,{arg, both},#type{base=int,single=true,mod=M}) ->
diff --git a/lib/wx/api_gen/wxapi.conf b/lib/wx/api_gen/wxapi.conf
index 2e961cce98..bbf9add59e 100644
--- a/lib/wx/api_gen/wxapi.conf
+++ b/lib/wx/api_gen/wxapi.conf
@@ -32,7 +32,10 @@
wxALWAYS_NATIVE_DOUBLE_BUFFER,
wxGAUGE_EMULATE_INDETERMINATE_MODE,
wxTR_DEFAULT_STYLE,
- wxSL_LABELS
+ wxSL_LABELS,
+ wxCURSOR_DEFAULT,
+ wxCURSOR_ARROWWAIT,
+ wxCURSOR_MAX
]}.
{gvars,
@@ -1355,7 +1358,8 @@
wxEVT_SCROLL_THUMBRELEASE,wxEVT_SCROLL_CHANGED]}],
['GetOrientation','GetPosition']}.
{class, wxScrollWinEvent,wxEvent,
- [{event,
+ [{acc, [{m_commandInt, "GetPosition()"}, {m_extraLong, "GetOrientation()"}]},
+ {event,
[wxEVT_SCROLLWIN_TOP,wxEVT_SCROLLWIN_BOTTOM,wxEVT_SCROLLWIN_LINEUP,
wxEVT_SCROLLWIN_LINEDOWN,wxEVT_SCROLLWIN_PAGEUP,
wxEVT_SCROLLWIN_PAGEDOWN,wxEVT_SCROLLWIN_THUMBTRACK,
@@ -1380,7 +1384,9 @@
'ShiftDown'
]}.
-{class, wxSetCursorEvent, wxEvent, [{event,[wxEVT_SET_CURSOR]}],
+{class, wxSetCursorEvent, wxEvent,
+ [{acc, [{m_x, "GetX()"}, {m_y, "GetY()"}, {m_cursor, "GetCursor()"}]},
+ {event,[wxEVT_SET_CURSOR]}],
['GetCursor','GetX','GetY','HasCursor','SetCursor']}.
{class, wxKeyEvent, wxEvent,
@@ -1395,7 +1401,7 @@
{class, wxSizeEvent, wxEvent, [{event,[wxEVT_SIZE]}],
['GetSize']}.
-{class, wxMoveEvent, wxEvent, [{event,[wxEVT_MOVE]}],
+{class, wxMoveEvent, wxEvent, [{acc, [{m_pos, "GetPosition()"}, {m_rect, "GetRect()"}]}, {event,[wxEVT_MOVE]}],
['GetPosition']}.
{class, wxPaintEvent, wxEvent, [{event,[wxEVT_PAINT]}],[]}.
%%{class, wxNcPaintEvent, wxEvent, [{event,[wxEVT_NC_PAINT]}],[]}.
@@ -1404,28 +1410,28 @@
{event, [wxEVT_ERASE_BACKGROUND]}],
['GetDC']}.
{class, wxFocusEvent, wxEvent,
- [{event,[wxEVT_SET_FOCUS,wxEVT_KILL_FOCUS]}],
+ [{acc, [{m_win, "GetWindow()"}]},
+ {event,[wxEVT_SET_FOCUS,wxEVT_KILL_FOCUS]}],
['GetWindow']}.
{class,wxChildFocusEvent,wxCommandEvent,
[{event,[wxEVT_CHILD_FOCUS]}],
['GetWindow']}.
-%% {class, wxActivateEvent, wxEvent, [{event,
-%% [wxEVT_ACTIVATE,wxEVT_ACTIVATE_APP,wxEVT_HIBERNATE]}],[]}.
-
-%%{class, wxInitDialogEvent, wxEvent, [{event, []}],[]}.
-
-{class, wxMenuEvent, wxEvent,
- [{event, [wxEVT_MENU_OPEN,wxEVT_MENU_CLOSE,wxEVT_MENU_HIGHLIGHT]}],
+{class, wxMenuEvent, wxEvent,
+ [{acc, [{m_menuId, "GetMenuId()"}, {m_menu, "GetMenu()"}]},
+ {event, [wxEVT_MENU_OPEN,wxEVT_MENU_CLOSE,wxEVT_MENU_HIGHLIGHT]}],
['GetMenu','GetMenuId','IsPopup']}.
{class, wxCloseEvent, wxEvent,
[{event, [wxEVT_CLOSE_WINDOW,wxEVT_END_SESSION,wxEVT_QUERY_END_SESSION]}],
['CanVeto','GetLoggingOff','SetCanVeto','SetLoggingOff','Veto']}.
-{class, wxShowEvent, wxEvent, [{event,[wxEVT_SHOW]}],['SetShow','GetShow']}.
-{class, wxIconizeEvent, wxEvent, [{event,[wxEVT_ICONIZE]}],['Iconized']}.
+{class, wxShowEvent, wxEvent, [{acc, [{m_show, "GetShow()"}]},{event,[wxEVT_SHOW]}],['SetShow','GetShow']}.
+{class, wxIconizeEvent, wxEvent, [{acc, [{m_iconized, "Iconized()"}]},{event,[wxEVT_ICONIZE]}],['Iconized']}.
{class, wxMaximizeEvent, wxEvent, [{event,[wxEVT_MAXIMIZE]}],[]}.
-{class, wxJoystickEvent, wxEvent,
- [{event,[wxEVT_JOY_BUTTON_DOWN,wxEVT_JOY_BUTTON_UP,
+{class, wxJoystickEvent, wxEvent,
+ [{acc, [{m_pos, "GetPosition()"},{m_zPosition, "GetZPosition()"},
+ {m_buttonChange, "GetButtonChange()"}, {m_buttonState, "GetButtonState()"},
+ {m_joyStick, "GetJoystick()"}]},
+ {event,[wxEVT_JOY_BUTTON_DOWN,wxEVT_JOY_BUTTON_UP,
wxEVT_JOY_MOVE,wxEVT_JOY_ZMOVE]}],
['ButtonDown','ButtonIsDown','ButtonUp','GetButtonChange','GetButtonState',
'GetJoystick','GetPosition','GetZPosition','IsButton','IsMove','IsZMove']}.
@@ -1463,7 +1469,8 @@
'SetOrigin',
'SetPosition']}.
-{class, wxContextMenuEvent, wxCommandEvent, [{event,[wxEVT_CONTEXT_MENU]}],
+{class, wxContextMenuEvent, wxCommandEvent,
+ [{acc, [{m_pos, "GetPosition()"}]}, {event,[wxEVT_CONTEXT_MENU]}],
['GetPosition','SetPosition']}.
{enum, wxIdleMode, "wxIDLE_"}.
{class, wxIdleEvent, wxEvent, [{event,[wxEVT_IDLE]}],
@@ -1522,7 +1529,8 @@
]}.
{class, wxCalendarEvent, wxDateEvent,
- [{event,[wxEVT_CALENDAR_SEL_CHANGED, wxEVT_CALENDAR_DAY_CHANGED,
+ [{acc, [{m_date, "GetDate()"}, {m_wday, "GetWeekDay()"}]},
+ {event,[wxEVT_CALENDAR_SEL_CHANGED, wxEVT_CALENDAR_DAY_CHANGED,
wxEVT_CALENDAR_MONTH_CHANGED, wxEVT_CALENDAR_YEAR_CHANGED,
wxEVT_CALENDAR_DOUBLECLICKED, wxEVT_CALENDAR_WEEKDAY_CLICKED]}],
[
@@ -1727,8 +1735,9 @@
['GetKeyCode','GetItem','GetKeyEvent','GetLabel','GetOldItem','GetPoint',
'IsEditCancelled','SetToolTip']}.
-{class, wxNotebookEvent, wxNotifyEvent,
- [{event, [wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGED,
+{class, wxNotebookEvent, wxNotifyEvent,
+ [{acc, [{m_nSel, "GetSelection()"}, {m_nOldSel, "GetOldSelection()"}]},
+ {event, [wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGED,
wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGING]}],
['GetOldSelection','GetSelection','SetOldSelection','SetSelection']}.
diff --git a/lib/wx/c_src/gen/wxe_events.cpp b/lib/wx/c_src/gen/wxe_events.cpp
index 255b36c2fa..e042b4d890 100644
--- a/lib/wx/c_src/gen/wxe_events.cpp
+++ b/lib/wx/c_src/gen/wxe_events.cpp
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2014. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2015. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -375,10 +375,13 @@ case 165: {// wxScrollEvent or wxSpinEvent
break;
}
case 166: {// wxScrollWinEvent
+ wxScrollWinEvent * ev = (wxScrollWinEvent *) event;
evClass = (char*)"wxScrollWinEvent";
rt.addAtom((char*)"wxScrollWin");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addInt(ev->GetPosition());
+ rt.addInt(ev->GetOrientation());
+ rt.addTupleCount(4);
break;
}
case 167: {// wxMouseEvent
@@ -394,7 +397,11 @@ case 167: {// wxMouseEvent
rt.addBool(ev->m_controlDown);
rt.addBool(ev->m_shiftDown);
rt.addBool(ev->m_altDown);
+#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX)
+ rt.addBool(ev->m_rawControlDown);
+#else
rt.addBool(ev->m_metaDown);
+#endif
rt.addInt(ev->m_wheelRotation);
rt.addInt(ev->m_wheelDelta);
rt.addInt(ev->m_linesPerAction);
@@ -402,10 +409,16 @@ case 167: {// wxMouseEvent
break;
}
case 168: {// wxSetCursorEvent
+ wxSetCursorEvent * ev = (wxSetCursorEvent *) event;
+ wxCursor * GetCursor = new wxCursor(ev->GetCursor());
+ app->newPtr((void *) GetCursor,3, memenv);
evClass = (char*)"wxSetCursorEvent";
rt.addAtom((char*)"wxSetCursor");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addInt(ev->GetX());
+ rt.addInt(ev->GetY());
+ rt.addRef(getRef((void *)GetCursor,memenv), "wxCursor");
+ rt.addTupleCount(5);
break;
}
case 169: {// wxKeyEvent
@@ -419,7 +432,11 @@ case 169: {// wxKeyEvent
rt.addBool(ev->m_controlDown);
rt.addBool(ev->m_shiftDown);
rt.addBool(ev->m_altDown);
+#if wxCHECK_VERSION(2,9,0) && defined(_MACOSX)
+ rt.addBool(ev->m_rawControlDown);
+#else
rt.addBool(ev->m_metaDown);
+#endif
#if !wxCHECK_VERSION(2,9,0)
rt.addBool(ev->m_scanCode);
#else
@@ -442,10 +459,13 @@ case 170: {// wxSizeEvent
break;
}
case 171: {// wxMoveEvent
+ wxMoveEvent * ev = (wxMoveEvent *) event;
evClass = (char*)"wxMoveEvent";
rt.addAtom((char*)"wxMove");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.add(ev->GetPosition());
+ rt.add(ev->GetRect());
+ rt.addTupleCount(4);
break;
}
case 172: {// wxPaintEvent
@@ -466,10 +486,13 @@ case 173: {// wxEraseEvent
break;
}
case 174: {// wxFocusEvent
+ wxFocusEvent * ev = (wxFocusEvent *) event;
+ wxWindow * GetWindow = ev->GetWindow();
evClass = (char*)"wxFocusEvent";
rt.addAtom((char*)"wxFocus");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addRef(getRef((void *)GetWindow,memenv), "wxWindow");
+ rt.addTupleCount(3);
break;
}
case 175: {// wxChildFocusEvent
@@ -480,10 +503,14 @@ case 175: {// wxChildFocusEvent
break;
}
case 176: {// wxMenuEvent
+ wxMenuEvent * ev = (wxMenuEvent *) event;
+ wxMenu * GetMenu = ev->GetMenu();
evClass = (char*)"wxMenuEvent";
rt.addAtom((char*)"wxMenu");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addInt(ev->GetMenuId());
+ rt.addRef(getRef((void *)GetMenu,memenv), "wxMenu");
+ rt.addTupleCount(4);
break;
}
case 177: {// wxCloseEvent
@@ -494,17 +521,21 @@ case 177: {// wxCloseEvent
break;
}
case 178: {// wxShowEvent
+ wxShowEvent * ev = (wxShowEvent *) event;
evClass = (char*)"wxShowEvent";
rt.addAtom((char*)"wxShow");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addBool(ev->GetShow());
+ rt.addTupleCount(3);
break;
}
case 179: {// wxIconizeEvent
+ wxIconizeEvent * ev = (wxIconizeEvent *) event;
evClass = (char*)"wxIconizeEvent";
rt.addAtom((char*)"wxIconize");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addBool(ev->Iconized());
+ rt.addTupleCount(3);
break;
}
case 180: {// wxMaximizeEvent
@@ -515,10 +546,16 @@ case 180: {// wxMaximizeEvent
break;
}
case 181: {// wxJoystickEvent
+ wxJoystickEvent * ev = (wxJoystickEvent *) event;
evClass = (char*)"wxJoystickEvent";
rt.addAtom((char*)"wxJoystick");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.add(ev->GetPosition());
+ rt.addInt(ev->GetZPosition());
+ rt.addInt(ev->GetButtonChange());
+ rt.addInt(ev->GetButtonState());
+ rt.addInt(ev->GetJoystick());
+ rt.addTupleCount(7);
break;
}
case 182: {// wxUpdateUIEvent
@@ -595,10 +632,12 @@ case 191: {// wxHelpEvent
break;
}
case 192: {// wxContextMenuEvent
+ wxContextMenuEvent * ev = (wxContextMenuEvent *) event;
evClass = (char*)"wxContextMenuEvent";
rt.addAtom((char*)"wxContextMenu");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.add(ev->GetPosition());
+ rt.addTupleCount(3);
break;
}
case 193: {// wxIdleEvent
@@ -659,10 +698,13 @@ case 198: {// wxDateEvent
break;
}
case 199: {// wxCalendarEvent
+ wxCalendarEvent * ev = (wxCalendarEvent *) event;
evClass = (char*)"wxCalendarEvent";
rt.addAtom((char*)"wxCalendar");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addInt(ev->GetWeekDay());
+ rt.add(ev->GetDate());
+ rt.addTupleCount(4);
break;
}
case 200: {// wxFileDirPickerEvent
@@ -734,10 +776,13 @@ case 209: {// wxTreeEvent
break;
}
case 210: {// wxNotebookEvent
+ wxNotebookEvent * ev = (wxNotebookEvent *) event;
evClass = (char*)"wxNotebookEvent";
rt.addAtom((char*)"wxNotebook");
rt.addAtom(Etype->eName);
- rt.addTupleCount(2);
+ rt.addInt(ev->GetSelection());
+ rt.addInt(ev->GetOldSelection());
+ rt.addTupleCount(4);
break;
}
case 216: {// wxClipboardTextEvent
diff --git a/lib/wx/c_src/gen/wxe_funcs.cpp b/lib/wx/c_src/gen/wxe_funcs.cpp
index 91ce5d810c..3b11c0642e 100644
--- a/lib/wx/c_src/gen/wxe_funcs.cpp
+++ b/lib/wx/c_src/gen/wxe_funcs.cpp
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2014. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2015. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -51,8 +51,8 @@ void WxeApp::wxe_dispatch(wxeCommand& Ecmd)
if(recurse_level > 1 && refd->type != 4) {
delayed_delete->Append(Ecmd.Save());
} else {
- ((WxeApp *) wxTheApp)->clearPtr(This);
- delete_object(This, refd); }
+ delete_object(This, refd);
+ ((WxeApp *) wxTheApp)->clearPtr(This);}
} } break;
case WXE_REGISTER_OBJECT: {
registerPid(bp, Ecmd.caller, memenv);
diff --git a/lib/wx/c_src/gen/wxe_init.cpp b/lib/wx/c_src/gen/wxe_init.cpp
index 3a4bced790..1673f2a1b3 100644
--- a/lib/wx/c_src/gen/wxe_init.cpp
+++ b/lib/wx/c_src/gen/wxe_init.cpp
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2014. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2015. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
@@ -56,6 +56,12 @@ void WxeApp::init_nonconsts(wxeMemEnv *memenv, ErlDrvTermData caller) {
rt.addTupleCount(2);
rt.addAtom("wxMOD_CMD"); rt.addInt(wxMOD_CMD);
rt.addTupleCount(2);
+ rt.addAtom("wxCURSOR_ARROWWAIT"); rt.addInt(wxCURSOR_ARROWWAIT);
+ rt.addTupleCount(2);
+ rt.addAtom("wxCURSOR_DEFAULT"); rt.addInt(wxCURSOR_DEFAULT);
+ rt.addTupleCount(2);
+ rt.addAtom("wxCURSOR_MAX"); rt.addInt(wxCURSOR_MAX);
+ rt.addTupleCount(2);
rt.addAtom("wxBLACK"); rt.add(*(wxBLACK));
rt.addTupleCount(2);
rt.addAtom("wxBLACK_BRUSH"); rt.addRef(getRef((void *)wxBLACK_BRUSH,memenv),"wxBrush");
@@ -138,7 +144,7 @@ void WxeApp::init_nonconsts(wxeMemEnv *memenv, ErlDrvTermData caller) {
rt.addTupleCount(2);
rt.addAtom("wxWHITE_PEN"); rt.addRef(getRef((void *)wxWHITE_PEN,memenv),"wxPen");
rt.addTupleCount(2);
- rt.endList(57);
+ rt.endList(60);
rt.addTupleCount(2);
rt.send();
}
diff --git a/lib/wx/c_src/wxe_driver.c b/lib/wx/c_src/wxe_driver.c
index ea52737fa2..ec1ba7f566 100644
--- a/lib/wx/c_src/wxe_driver.c
+++ b/lib/wx/c_src/wxe_driver.c
@@ -146,7 +146,12 @@ wxe_driver_stop(ErlDrvData handle)
if(sd->port_handle != WXE_DRV_PORT_HANDLE) {
// fprintf(stderr, "%s:%d: STOP \r\n", __FILE__,__LINE__);
meta_command(DELETE_PORT,sd);
- free(handle);
+ } else {
+ // fprintf(stderr, "%s:%d: STOP \r\n", __FILE__,__LINE__);
+ stop_native_gui(wxe_master);
+ unload_native_gui();
+ free(wxe_master);
+ wxe_master = NULL;
}
}
@@ -154,10 +159,6 @@ static void
wxe_driver_unload(void)
{
// fprintf(stderr, "%s:%d: UNLOAD \r\n", __FILE__,__LINE__);
- stop_native_gui(wxe_master);
- unload_native_gui();
- free(wxe_master);
- wxe_master = NULL;
}
static ErlDrvSSizeT
diff --git a/lib/wx/c_src/wxe_gl.cpp b/lib/wx/c_src/wxe_gl.cpp
index a9feb23831..26b45d219e 100644
--- a/lib/wx/c_src/wxe_gl.cpp
+++ b/lib/wx/c_src/wxe_gl.cpp
@@ -135,8 +135,12 @@ void deleteActiveGL(wxGLCanvas *canvas)
void gl_dispatch(int op, char *bp,ErlDrvTermData caller,WXEBinRef *bins[]){
if(caller != gl_active) {
wxGLCanvas * current = glc[caller];
- if(current) { gl_active = caller; current->SetCurrent();}
- else {
+ if(current) {
+ if(current != glc[gl_active]) {
+ gl_active = caller;
+ current->SetCurrent();
+ }
+ } else {
ErlDrvTermData rt[] = // Error msg
{ERL_DRV_ATOM, driver_mk_atom((char *) "_egl_error_"),
ERL_DRV_INT, (ErlDrvTermData) op,
diff --git a/lib/wx/c_src/wxe_helpers.cpp b/lib/wx/c_src/wxe_helpers.cpp
index 15d75080d9..120919e7aa 100644
--- a/lib/wx/c_src/wxe_helpers.cpp
+++ b/lib/wx/c_src/wxe_helpers.cpp
@@ -24,23 +24,96 @@
* Erlang Commands
* ****************************************************************************/
-wxeCommand::wxeCommand(int fc,char * cbuf,int buflen, wxe_data *sd)
- : wxObject()
+wxeCommand::wxeCommand()
{
+}
+
+wxeCommand::~wxeCommand()
+{
+ Delete();
+}
+
+void wxeCommand::Delete()
+{
+ int n = 0;
+
+ if(buffer) {
+ while(bin[n]) {
+ if(bin[n]->bin)
+ driver_free_binary(bin[n]->bin);
+ driver_free(bin[n++]);
+ }
+ if(len > 64)
+ driver_free(buffer);
+ buffer = NULL;
+ op = -1;
+ }
+}
+
+/* ****************************************************************************
+ * wxeFifo
+ * ****************************************************************************/
+wxeFifo::wxeFifo(unsigned int sz)
+{
+ m_q = (wxeCommand *) driver_alloc(sizeof(wxeCommand) * sz);
+ m_orig_sz = sz;
+ m_max = sz;
+ m_n = 0;
+ m_first = 0;
+ m_old = NULL;
+ for(unsigned int i = 0; i < sz; i++) {
+ m_q[i].buffer = NULL;
+ m_q[i].op = -1;
+ }
+}
+
+wxeFifo::~wxeFifo() {
+ // dealloc all memory buffers
+ driver_free(m_q);
+}
+
+wxeCommand * wxeFifo::Get()
+{
+ unsigned int pos;
+ if(m_n > 0) {
+ pos = m_first++;
+ m_n--;
+ m_first %= m_max;
+ return &m_q[pos];
+ }
+ return NULL;
+}
+
+void wxeFifo::Add(int fc, char * cbuf,int buflen, wxe_data *sd)
+{
+ unsigned int pos;
+ wxeCommand *curr;
+
WXEBinRef *temp, *start, *prev;
int n = 0;
- ref_count = 1;
- caller = driver_caller(sd->port_handle);
- port = sd->port;
- op = fc;
- len = buflen;
- bin[0] = NULL;
- bin[1] = NULL;
- bin[2] = NULL;
+
+ if(m_n == (m_max-1)) { // resize
+ Realloc();
+ }
+
+ pos = (m_first + m_n) % m_max;
+ m_n++;
+
+ curr = &m_q[pos];
+ curr->caller = driver_caller(sd->port_handle);
+ curr->port = sd->port;
+ curr->op = fc;
+ curr->len = buflen;
+ curr->bin[0] = NULL;
+ curr->bin[1] = NULL;
+ curr->bin[2] = NULL;
if(cbuf) {
- buffer = (char *) driver_alloc(len);
- memcpy((void *) buffer, (void *) cbuf, len);;
+ if(buflen > 64)
+ curr->buffer = (char *) driver_alloc(buflen);
+ else
+ curr->buffer = curr->c_buf;
+ memcpy((void *) curr->buffer, (void *) cbuf, buflen);
temp = sd->bin;
@@ -48,8 +121,8 @@ wxeCommand::wxeCommand(int fc,char * cbuf,int buflen, wxe_data *sd)
start = temp;
while(temp) {
- if(caller == temp->from) {
- bin[n++] = temp;
+ if(curr->caller == temp->from) {
+ curr->bin[n++] = temp;
if(prev) {
prev->next = temp->next;
} else {
@@ -63,20 +136,68 @@ wxeCommand::wxeCommand(int fc,char * cbuf,int buflen, wxe_data *sd)
}
sd->bin = start;
} else { // No-op only PING currently
- buffer = NULL;
+ curr->buffer = NULL;
}
}
-wxeCommand::~wxeCommand() {
- int n = 0;
- if(buffer) {
- while(bin[n]) {
- if(bin[n]->bin)
- driver_free_binary(bin[n]->bin);
- driver_free(bin[n++]);
+void wxeFifo::Append(wxeCommand *orig)
+{
+ unsigned int pos;
+ wxeCommand *curr;
+ if(m_n == (m_max-1)) { // resize
+ Realloc();
+ }
+
+ pos = (m_first + m_n) % m_max;
+ m_n++;
+ curr = &m_q[pos];
+ curr->caller = orig->caller;
+ curr->port = orig->port;
+ curr->op = orig->op;
+ curr->len = orig->len;
+ curr->bin[0] = orig->bin[0];
+ curr->bin[1] = orig->bin[1];
+ curr->bin[2] = orig->bin[2];
+
+ if(orig->len > 64)
+ curr->buffer = orig->buffer;
+ else {
+ curr->buffer = curr->c_buf;
+ memcpy((void *) curr->buffer, (void *) orig->buffer, orig->len);
+ }
+ orig->op = -1;
+ orig->buffer = NULL;
+ orig->bin[0] = NULL;
+}
+
+void wxeFifo::Realloc()
+{
+ unsigned int i;
+ unsigned int growth = m_orig_sz / 2;
+ unsigned int new_sz = growth + m_max;
+ unsigned int max = m_max;
+ unsigned int first = m_first;
+ unsigned int n = m_n;
+ wxeCommand * old = m_q;
+ wxeCommand * queue = (wxeCommand *)driver_alloc(new_sz*sizeof(wxeCommand));
+
+ m_max=new_sz;
+ m_first = 0;
+ m_n=0;
+ m_q = queue;
+
+ for(i=0; i < n; i++) {
+ unsigned int pos = i+first;
+ if(old[pos%max].op >= 0) {
+ Append(&old[pos%max]);
}
- driver_free(buffer);
}
+ for(i = m_n; i < new_sz; i++) { // Reset the rest
+ m_q[i].buffer = NULL;
+ m_q[i].op = -1;
+ }
+ // Can not free old queue here it can be used in the wx thread
+ m_old = old;
}
/* ****************************************************************************
diff --git a/lib/wx/c_src/wxe_helpers.h b/lib/wx/c_src/wxe_helpers.h
index 659bc666c6..ec3a5debdb 100644
--- a/lib/wx/c_src/wxe_helpers.h
+++ b/lib/wx/c_src/wxe_helpers.h
@@ -39,14 +39,14 @@ class wxeMetaCommand : public wxEvent
ErlDrvPDL pdl;
};
-class wxeCommand : public wxObject
+class wxeCommand
{
public:
- wxeCommand(int fc,char * cbuf,int buflen, wxe_data *);
+ wxeCommand();
virtual ~wxeCommand(); // Use Delete()
- wxeCommand * Save() {ref_count++; return this; };
- void Delete() {if(--ref_count < 1) delete this;};
+ wxeCommand * Save() { return this; };
+ void Delete();
ErlDrvTermData caller;
ErlDrvTermData port;
@@ -54,7 +54,27 @@ class wxeCommand : public wxObject
char * buffer;
int len;
int op;
- int ref_count;
+ char c_buf[64]; // 64b covers 90% of usage
+};
+
+class wxeFifo {
+ public:
+ wxeFifo(unsigned int size);
+ virtual ~wxeFifo();
+
+ void Add(int fc, char * cbuf,int buflen, wxe_data *);
+ void Append(wxeCommand *Other);
+
+ wxeCommand * Get();
+
+ void Realloc();
+
+ unsigned int m_max;
+ unsigned int m_first;
+ unsigned int m_n;
+ unsigned int m_orig_sz;
+ wxeCommand *m_q;
+ wxeCommand *m_old;
};
class intListElement {
diff --git a/lib/wx/c_src/wxe_impl.cpp b/lib/wx/c_src/wxe_impl.cpp
index 0ee52e3af2..2fd5f0c52c 100644
--- a/lib/wx/c_src/wxe_impl.cpp
+++ b/lib/wx/c_src/wxe_impl.cpp
@@ -55,8 +55,9 @@ extern ErlDrvCond * wxe_batch_locker_c;
extern ErlDrvTermData init_caller;
extern int wxe_status;
-wxList * wxe_batch = NULL;
-wxList * wxe_batch_cb_saved = NULL;
+wxeFifo * wxe_queue = NULL;
+wxeFifo * wxe_queue_cb_saved = NULL;
+
int wxe_batch_caller = 0; // inside batch if larger than 0
/* ************************************************************
@@ -68,38 +69,41 @@ void push_command(int op,char * buf,int len, wxe_data *sd)
{
/* fprintf(stderr, "Op %d %d [%ld] %d\r\n", op, (int) driver_caller(sd->port_handle),
wxe_batch->size(), wxe_batch_caller),fflush(stderr); */
- wxeCommand *Cmd = new wxeCommand(op, buf, len, sd);
erl_drv_mutex_lock(wxe_batch_locker_m);
- wxe_batch->Append(Cmd);
+ wxe_queue->Add(op, buf, len, sd);
if(wxe_batch_caller > 0) {
// wx-thread is waiting on batch end in cond_wait
erl_drv_cond_signal(wxe_batch_locker_c);
+ erl_drv_mutex_unlock(wxe_batch_locker_m);
} else {
// wx-thread is waiting gui-events
if(op == WXE_BATCH_BEGIN) {
wxe_batch_caller = 1;
}
erl_drv_cond_signal(wxe_batch_locker_c);
+ erl_drv_mutex_unlock(wxe_batch_locker_m);
wxWakeUpIdle();
}
- erl_drv_mutex_unlock(wxe_batch_locker_m);
+
}
void meta_command(int what, wxe_data *sd) {
- if(what == PING_PORT) {
+ if(what == PING_PORT && wxe_status == WXE_INITIATED) {
erl_drv_mutex_lock(wxe_batch_locker_m);
if(wxe_batch_caller > 0) {
- wxeCommand *Cmd = new wxeCommand(WXE_DEBUG_PING, NULL, 0, sd);
- wxe_batch->Append(Cmd);
+ wxe_queue->Add(WXE_DEBUG_PING, NULL, 0, sd);
erl_drv_cond_signal(wxe_batch_locker_c);
}
wxWakeUpIdle();
erl_drv_mutex_unlock(wxe_batch_locker_m);
} else {
- if(sd) {
+ if(sd && wxe_status == WXE_INITIATED) {
wxeMetaCommand Cmd(sd, what);
wxTheApp->AddPendingEvent(Cmd);
+ if(what == DELETE_PORT) {
+ free(sd);
+ }
}
}
}
@@ -121,12 +125,12 @@ bool WxeApp::OnInit()
{
global_me = new wxeMemEnv();
- wxe_batch = new wxList;
- wxe_batch_cb_saved = new wxList;
+ wxe_queue = new wxeFifo(1000);
+ wxe_queue_cb_saved = new wxeFifo(200);
cb_buff = NULL;
recurse_level = 0;
- delayed_cleanup = new wxList;
- delayed_delete = new wxList;
+ delayed_delete = new wxeFifo(10);
+ delayed_cleanup = new wxList;
wxe_ps_init2();
// wxIdleEvent::SetMode(wxIDLE_PROCESS_SPECIFIED); // Hmm printpreview doesn't work in 2.9 with this
@@ -168,7 +172,10 @@ void WxeApp::MacOpenFile(const wxString &filename) {
#endif
void WxeApp::shutdown(wxeMetaCommand& Ecmd) {
+ wxe_status = WXE_EXITING;
ExitMainLoop();
+ delete wxe_queue;
+ delete wxe_queue_cb_saved;
}
void WxeApp::dummy_close(wxEvent& Ev) {
@@ -197,15 +204,19 @@ void handle_event_callback(ErlDrvPort port, ErlDrvTermData process)
{
WxeApp * app = (WxeApp *) wxTheApp;
ErlDrvMonitor monitor;
+
+ if(wxe_status != WXE_INITIATED)
+ return;
+
// Is thread safe if pdl have been incremented
if(driver_monitor_process(port, process, &monitor) == 0) {
// Should we be able to handle commands when recursing? probably
erl_drv_mutex_lock(wxe_batch_locker_m);
- //fprintf(stderr, "\r\nCB EV Start %lu \r\n", process);fflush(stderr);
+ // fprintf(stderr, "\r\nCB EV Start %lu \r\n", process);fflush(stderr);
app->recurse_level++;
- app->dispatch_cb(wxe_batch, wxe_batch_cb_saved, process);
+ app->dispatch_cb(wxe_queue, wxe_queue_cb_saved, process);
app->recurse_level--;
- //fprintf(stderr, "CB EV done %lu \r\n", process);fflush(stderr);
+ // fprintf(stderr, "CB EV done %lu \r\n", process);fflush(stderr);
wxe_batch_caller = 0;
erl_drv_mutex_unlock(wxe_batch_locker_m);
driver_demonitor_process(port, &monitor);
@@ -214,24 +225,26 @@ void handle_event_callback(ErlDrvPort port, ErlDrvTermData process)
void WxeApp::dispatch_cmds()
{
+ if(wxe_status != WXE_INITIATED)
+ return;
erl_drv_mutex_lock(wxe_batch_locker_m);
recurse_level++;
- int level = dispatch(wxe_batch_cb_saved, 0, WXE_STORED);
- dispatch(wxe_batch, level, WXE_NORMAL);
+ int level = dispatch(wxe_queue_cb_saved, 0, WXE_STORED);
+ dispatch(wxe_queue, level, WXE_NORMAL);
recurse_level--;
wxe_batch_caller = 0;
+ if(wxe_queue->m_old) {
+ driver_free(wxe_queue->m_old);
+ wxe_queue->m_old = NULL;
+ }
erl_drv_mutex_unlock(wxe_batch_locker_m);
// Cleanup old memenv's and deleted objects
if(recurse_level == 0) {
- if(delayed_delete->size() > 0)
- for( wxList::compatibility_iterator node = delayed_delete->GetFirst();
- node;
- node = delayed_delete->GetFirst()) {
- wxeCommand *event = (wxeCommand *)node->GetData();
- delayed_delete->Erase(node);
- wxe_dispatch(*event);
- event->Delete();
- }
+ wxeCommand *curr;
+ while((curr = delayed_delete->Get()) != NULL) {
+ wxe_dispatch(*curr);
+ curr->Delete();
+ }
if(delayed_cleanup->size() > 0)
for( wxList::compatibility_iterator node = delayed_cleanup->GetFirst();
node;
@@ -241,158 +254,145 @@ void WxeApp::dispatch_cmds()
destroyMemEnv(*event);
delete event;
}
+ if(wxe_queue_cb_saved->m_old) {
+ driver_free(wxe_queue_cb_saved->m_old);
+ wxe_queue_cb_saved->m_old = NULL;
+ }
+ if(delayed_delete->m_old) {
+ driver_free(delayed_delete->m_old);
+ delayed_delete->m_old = NULL;
+ }
}
}
// Should have erl_drv_mutex_lock(wxe_batch_locker_m);
// when entering this function and it should be released
// afterwards
-int WxeApp::dispatch(wxList * batch, int blevel, int list_type)
+int WxeApp::dispatch(wxeFifo * batch, int blevel, int list_type)
{
int ping = 0;
// erl_drv_mutex_lock(wxe_batch_locker_m); must be locked already
- while(true)
- {
- if (batch->size() > 0) {
- for( wxList::compatibility_iterator node = batch->GetFirst();
- node;
- node = batch->GetFirst())
- {
- wxeCommand *event = (wxeCommand *)node->GetData();
- batch->Erase(node);
- switch(event->op) {
- case WXE_BATCH_END:
- {--blevel; }
- break;
- case WXE_BATCH_BEGIN:
- {blevel++; }
- break;
- case WXE_DEBUG_PING:
- // When in debugger we don't want to hang waiting for a BATCH_END
- // that never comes, because a breakpoint have hit.
- ping++;
- if(ping > 2)
- blevel = 0;
- break;
- case WXE_CB_RETURN:
- // erl_drv_mutex_unlock(wxe_batch_locker_m); should be called after
- // whatever cleaning is necessary
- if(event->len > 0) {
- cb_buff = (char *) driver_alloc(event->len);
- memcpy(cb_buff, event->buffer, event->len);
- }
- return blevel;
- default:
- erl_drv_mutex_unlock(wxe_batch_locker_m);
- if(event->op < OPENGL_START) {
- // fprintf(stderr, " c %d (%d) \r\n", event->op, blevel);
- wxe_dispatch(*event);
- } else {
- gl_dispatch(event->op,event->buffer,event->caller,event->bin);
- }
- erl_drv_mutex_lock(wxe_batch_locker_m);
- break;
- }
- event->Delete();
- }
- } else {
- if((list_type == WXE_STORED) || (blevel <= 0 && list_type == WXE_NORMAL)) {
- // erl_drv_mutex_unlock(wxe_batch_locker_m); should be called after
- // whatever cleaning is necessary
- return blevel;
+ wxeCommand *event;
+ while(true) {
+ while((event = batch->Get()) != NULL) {
+ switch(event->op) {
+ case -1:
+ break;
+ case WXE_BATCH_END:
+ {--blevel; }
+ break;
+ case WXE_BATCH_BEGIN:
+ {blevel++; }
+ break;
+ case WXE_DEBUG_PING:
+ // When in debugger we don't want to hang waiting for a BATCH_END
+ // that never comes, because a breakpoint have hit.
+ ping++;
+ if(ping > 2)
+ blevel = 0;
+ break;
+ case WXE_CB_RETURN:
+ // erl_drv_mutex_unlock(wxe_batch_locker_m); should be called after
+ // whatever cleaning is necessary
+ if(event->len > 0) {
+ cb_buff = (char *) driver_alloc(event->len);
+ memcpy(cb_buff, event->buffer, event->len);
}
- // sleep until something happens
- //fprintf(stderr, "%s:%d sleep %d %d %d %d \r\n", __FILE__, __LINE__, batch->size(), callback_returned, blevel, is_callback);fflush(stderr);
- wxe_batch_caller++;
- while(batch->size() == 0) {
- erl_drv_cond_wait(wxe_batch_locker_c, wxe_batch_locker_m);
+ event->Delete();
+ return blevel;
+ default:
+ erl_drv_mutex_unlock(wxe_batch_locker_m);
+ if(event->op < OPENGL_START) {
+ // fprintf(stderr, " c %d (%d) \r\n", event->op, blevel);
+ wxe_dispatch(*event);
+ } else {
+ gl_dispatch(event->op,event->buffer,event->caller,event->bin);
}
+ erl_drv_mutex_lock(wxe_batch_locker_m);
+ break;
}
+ event->Delete();
+ }
+ if((list_type == WXE_STORED) || (blevel <= 0 && list_type == WXE_NORMAL)) {
+ // erl_drv_mutex_unlock(wxe_batch_locker_m); should be called after
+ // whatever cleaning is necessary
+ return blevel;
}
+ // sleep until something happens
+ //fprintf(stderr, "%s:%d sleep %d %d\r\n", __FILE__, __LINE__, batch->size(), blevel);fflush(stderr);
+ wxe_batch_caller++;
+ while(batch->m_n == 0) {
+ erl_drv_cond_wait(wxe_batch_locker_c, wxe_batch_locker_m);
+ }
+ }
}
-void WxeApp::dispatch_cb(wxList * batch, wxList * temp, ErlDrvTermData process) {
- int callback_returned = 0;
+void WxeApp::dispatch_cb(wxeFifo * batch, wxeFifo * temp, ErlDrvTermData process) {
+ wxeCommand *event;
while(true) {
- if (batch->size() > 0) {
- for( wxList::compatibility_iterator node = batch->GetFirst();
- node;
- node = batch->GetFirst())
- {
- wxeCommand *event = (wxeCommand *)node->GetData();
- wxeMemEnv *memenv = getMemEnv(event->port);
- batch->Erase(node);
- // fprintf(stderr, " Ev %d %lu\r\n", event->op, event->caller);
- if(event->caller == process || // Callbacks from CB process only
- event->op == WXE_CB_START || // Event callback start change process
- event->op == WXE_CB_DIED || // Event callback process died
- // Allow connect_cb during CB i.e. msg from wxe_server.
- (memenv && event->caller == memenv->owner))
- {
- switch(event->op) {
- case WXE_BATCH_END:
- case WXE_BATCH_BEGIN:
- case WXE_DEBUG_PING:
- break;
- case WXE_CB_RETURN:
- if(event->len > 0) {
- cb_buff = (char *) driver_alloc(event->len);
- memcpy(cb_buff, event->buffer, event->len);
- } // continue
- case WXE_CB_DIED:
- callback_returned = 1;
- return;
- case WXE_CB_START:
- // CB start from now accept message from CB process only
- process = event->caller;
- break;
- default:
- erl_drv_mutex_unlock(wxe_batch_locker_m);
- size_t start=temp->GetCount();
- if(event->op < OPENGL_START) {
- // fprintf(stderr, " cb %d \r\n", event->op);
- wxe_dispatch(*event);
- } else {
- gl_dispatch(event->op,event->buffer,event->caller,event->bin);
- }
- erl_drv_mutex_lock(wxe_batch_locker_m);
- if(temp->GetCount() > start) {
- // We have recursed dispatch_cb and messages for this
- // callback may be saved on temp list move them
- // to orig list
- for(wxList::compatibility_iterator node = temp->Item(start);
- node;
- node = node->GetNext()) {
- wxeCommand *ev = (wxeCommand *)node->GetData();
- if(ev->caller == process) {
- batch->Append(ev);
- temp->Erase(node);
- }
- }
- }
- if(callback_returned)
- return;
- break;
+ while((event = batch->Get()) != NULL) {
+ wxeMemEnv *memenv = getMemEnv(event->port);
+ // fprintf(stderr, " Ev %d %lu\r\n", event->op, event->caller);
+ if(event->caller == process || // Callbacks from CB process only
+ event->op == WXE_CB_START || // Event callback start change process
+ event->op == WXE_CB_DIED || // Event callback process died
+ // Allow connect_cb during CB i.e. msg from wxe_server.
+ (memenv && event->caller == memenv->owner)) {
+ switch(event->op) {
+ case -1:
+ case WXE_BATCH_END:
+ case WXE_BATCH_BEGIN:
+ case WXE_DEBUG_PING:
+ break;
+ case WXE_CB_RETURN:
+ if(event->len > 0) {
+ cb_buff = (char *) driver_alloc(event->len);
+ memcpy(cb_buff, event->buffer, event->len);
+ } // continue
+ case WXE_CB_DIED:
+ event->Delete();
+ return;
+ case WXE_CB_START:
+ // CB start from now accept message from CB process only
+ process = event->caller;
+ break;
+ default:
+ erl_drv_mutex_unlock(wxe_batch_locker_m);
+ size_t start=temp->m_n;
+ if(event->op < OPENGL_START) {
+ // fprintf(stderr, " cb %d \r\n", event->op);
+ wxe_dispatch(*event);
+ } else {
+ gl_dispatch(event->op,event->buffer,event->caller,event->bin);
+ }
+ erl_drv_mutex_lock(wxe_batch_locker_m);
+ if(temp->m_n > start) {
+ // We have recursed dispatch_cb and messages for this
+ // callback may be saved on temp list move them
+ // to orig list
+ for(unsigned int i=start; i < temp->m_n; i++) {
+ wxeCommand *ev = &temp->m_q[(temp->m_first+i) % temp->m_max];
+ if(ev->caller == process) {
+ batch->Append(ev);
}
- event->Delete();
- } else {
- // fprintf(stderr, " save %d \r\n", event->op);
- temp->Append(event);
+ }
}
+ break;
}
- } else {
- if(callback_returned) {
- return;
- }
- // sleep until something happens
- //fprintf(stderr, "%s:%d sleep %d %d %d %d \r\n", __FILE__, __LINE__, batch->size(), callback_returned, blevel, is_callback);fflush(stderr);
- while(batch->size() == 0) {
- erl_drv_cond_wait(wxe_batch_locker_c, wxe_batch_locker_m);
+ event->Delete();
+ } else {
+ // fprintf(stderr, " save %d %lu\r\n", event->op, event->caller);
+ temp->Append(event);
}
}
+ // sleep until something happens
+ // fprintf(stderr, "%s:%d sleep %d %d\r\n", __FILE__, __LINE__,
+ // batch->m_n, temp->m_n);fflush(stderr);
+ while(batch->m_n == 0) {
+ erl_drv_cond_wait(wxe_batch_locker_c, wxe_batch_locker_m);
+ }
}
}
-
/* Memory handling */
void WxeApp::newMemEnv(wxeMetaCommand& Ecmd) {
diff --git a/lib/wx/c_src/wxe_impl.h b/lib/wx/c_src/wxe_impl.h
index 57bf2e2dba..b251d5f0f9 100644
--- a/lib/wx/c_src/wxe_impl.h
+++ b/lib/wx/c_src/wxe_impl.h
@@ -46,7 +46,8 @@ typedef wxString wxeLocaleC;
#define WXE_NOT_INITIATED 0
#define WXE_INITIATED 1
-#define WXE_EXITED 2
+#define WXE_EXITING 2
+#define WXE_EXITED 3
#define WXE_ERROR -1
void send_msg(const char *, const wxString *); // For debugging and error msgs
@@ -60,8 +61,8 @@ public:
#endif
void shutdown(wxeMetaCommand& event);
- int dispatch(wxList *, int, int);
- void dispatch_cb(wxList * batch, wxList * temp, ErlDrvTermData process);
+ int dispatch(wxeFifo *, int, int);
+ void dispatch_cb(wxeFifo * batch, wxeFifo * temp, ErlDrvTermData process);
void wxe_dispatch(wxeCommand& event);
@@ -93,7 +94,7 @@ public:
int recurse_level;
wxList * delayed_cleanup;
- wxList * delayed_delete;
+ wxeFifo * delayed_delete;
// Temp container for callbacks
char *cb_buff;
int cb_len;
diff --git a/lib/wx/configure.in b/lib/wx/configure.in
index 4c4d4f41a8..fbdddb9220 100644
--- a/lib/wx/configure.in
+++ b/lib/wx/configure.in
@@ -441,19 +441,26 @@ else
else
CWXWIN_PROG=`cygpath -d "$PROGRAMFILES" | cygpath -f - 2>/dev/null`
fi
- CWXWIN3=$CWXWIN_PROG/wxWidgets-?.*.*
- CWXWIN4=$CWXWIN_PROG/wxMSW-?.*.*
- CWX_DOCUMENTED="/opt/local/pgm/wxMSW-?.*.* /opt/local/pgm/wxWidgets-?.*.*"
+
+ CWXWIN3="$CWXWIN_PROG/wxWidgets-3.*.* $CWXWIN_PROG/wxWidgets-2.*.*"
+ CWXWIN4="$CWXWIN_PROG/wxMSW-3.*.* $CWXWIN_PROG/wxMSW-2.*.*"
+
+ DOC_OPT=/opt/local/pgm
+ CWX_DOCUMENTED="$DOC_OPT/wxWidgets-2.*.* $DOC_OPT/wxMSW-2.*.*"
+ CWX_DOCUMENTED="$DOC_OPT/wxWidgets-3.*.* $DOC_OPT/wxMSW-3.*.* $CWX_DOCUMENTED"
+
case $ac_cv_sizeof_void_p in
8)
- CWX_DOCUMENTED="/opt/local64/pgm/wxMSW-?.*.* /opt/local64/pgm/wxWidgets-?.*.* $CWX_DOCUMENTED"
+ DOC_OPT64=/opt/local64/pgm
+ CWX_DOCUMENTED="$DOC_OPT64/wxWidgets-2.*.* $DOC_OPT64/wxMSW-2.*.* $CWX_DOCUMENTED"
+ CWX_DOCUMENTED="$DOC_OPT64/wxWidgets-3.*.* $DOC_OPT64/wxMSW-3.*.* $CWX_DOCUMENTED"
;;
*)
true
;;
- esac
-
- CWXPATH="$CWXWIN0 $CWXWIN1 $CWXWIN2 $CWX_DOCUMENTED $CWXWIN3.* $CWXWIN4.*"
+ esac
+
+ CWXPATH="$CWXWIN0 $CWXWIN1 $CWXWIN2 $CWX_DOCUMENTED $CWXWIN3 $CWXWIN4"
for dir in $CWXPATH; do
AC_MSG_NOTICE(Checking: [$dir])
diff --git a/lib/wx/doc/src/notes.xml b/lib/wx/doc/src/notes.xml
index 52087398e7..682ab48ca0 100644
--- a/lib/wx/doc/src/notes.xml
+++ b/lib/wx/doc/src/notes.xml
@@ -31,6 +31,21 @@
<p>This document describes the changes made to the wxErlang
application.</p>
+<section><title>Wx 1.3.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fix timing related crash during wx application stop.</p>
+ <p>
+ Own Id: OTP-12374</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Wx 1.3.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/wx/examples/demo/demo.erl b/lib/wx/examples/demo/demo.erl
index 2f560096f5..65fb05cd94 100644
--- a/lib/wx/examples/demo/demo.erl
+++ b/lib/wx/examples/demo/demo.erl
@@ -256,9 +256,17 @@ handle_event(#wx{id = Id,
wx_misc:launchDefaultBrowser("http://www.erlang.org/doc/apps/wx/part_frame.html"),
{noreply, State};
?wxID_ABOUT ->
+ WxWVer = io_lib:format("~p.~p.~p.~p",
+ [?wxMAJOR_VERSION, ?wxMINOR_VERSION,
+ ?wxRELEASE_NUMBER, ?wxSUBRELEASE_NUMBER]),
+ application:load(wx),
+ {ok, WxVsn} = application:get_key(wx, vsn),
AboutString =
"Demo of various widgets\n"
- "Authors: Olle & Dan",
+ "Authors: Olle & Dan\n\n" ++
+ "Frontend: wx-" ++ WxVsn ++
+ "\nBackend: wxWidgets-" ++ lists:flatten(WxWVer),
+
wxMessageDialog:showModal(wxMessageDialog:new(State#state.win, AboutString,
[{style,
?wxOK bor
diff --git a/lib/wx/examples/demo/demo_html_tagger.erl b/lib/wx/examples/demo/demo_html_tagger.erl
index 7bb6736fdc..b119f0e226 100644
--- a/lib/wx/examples/demo/demo_html_tagger.erl
+++ b/lib/wx/examples/demo/demo_html_tagger.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2009-2013. All Rights Reserved.
+%% Copyright Ericsson AB 2009-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -267,8 +267,10 @@ normalize_toks(Toks) ->
normalize_tok(Tok) ->
%% this is the portable way ...
- [{_,Type},{_,Line},{_,Col},{_,Txt}] =
- erl_scan:token_info(Tok, [category,line,column,text]),
+ Type = erl_scan:category(Tok),
+ Line = erl_scan:line(Tok),
+ Col = erl_scan:column(Tok),
+ Txt = erl_scan:text(Tok),
Val = {Type,{Line,Col},Txt},
%% io:format("here:X=~p ~p~n",[Tok,Val]),
Val.
diff --git a/lib/wx/include/wx.hrl b/lib/wx/include/wx.hrl
index 348daf64ce..97cb689374 100644
--- a/lib/wx/include/wx.hrl
+++ b/lib/wx/include/wx.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2014. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2015. All Rights Reserved.
%%
%% The contents of this file are subject to the Erlang Public License,
%% Version 1.1, (the "License"); you may not use this file except in
@@ -54,7 +54,9 @@
-type wxListEventType() :: command_list_begin_drag | command_list_begin_rdrag | command_list_begin_label_edit | command_list_end_label_edit | command_list_delete_item | command_list_delete_all_items | command_list_key_down | command_list_insert_item | command_list_col_click | command_list_col_right_click | command_list_col_begin_drag | command_list_col_dragging | command_list_col_end_drag | command_list_item_selected | command_list_item_deselected | command_list_item_right_click | command_list_item_middle_click | command_list_item_activated | command_list_item_focused | command_list_cache_hint.
-type wxList() :: #wxList{}. %% Callback event: {@link wxListEvent}
--record(wxNotebook, {type :: wxNotebookEventType()}). %% Callback event: {@link wxNotebookEvent}
+-record(wxNotebook,{type :: wxNotebookEventType(), %% Callback event: {@link wxNotebookEvent}
+ nSel :: integer(),
+ nOldSel :: integer()}).
-type wxNotebookEventType() :: command_notebook_page_changed | command_notebook_page_changing.
-type wxNotebook() :: #wxNotebook{}. %% Callback event: {@link wxNotebookEvent}
@@ -86,7 +88,9 @@
-type wxWindowDestroyEventType() :: destroy.
-type wxWindowDestroy() :: #wxWindowDestroy{}. %% Callback event: {@link wxWindowDestroyEvent}
--record(wxCalendar, {type :: wxCalendarEventType()}). %% Callback event: {@link wxCalendarEvent}
+-record(wxCalendar,{type :: wxCalendarEventType(), %% Callback event: {@link wxCalendarEvent}
+ wday :: wx:wx_enum(),
+ date :: wx:wx_datetime()}).
-type wxCalendarEventType() :: calendar_sel_changed | calendar_day_changed | calendar_month_changed | calendar_year_changed | calendar_doubleclicked | calendar_weekday_clicked.
-type wxCalendar() :: #wxCalendar{}. %% Callback event: {@link wxCalendarEvent}
@@ -100,15 +104,19 @@
-type wxScrollEventType() :: scroll_top | scroll_bottom | scroll_lineup | scroll_linedown | scroll_pageup | scroll_pagedown | scroll_thumbtrack | scroll_thumbrelease | scroll_changed.
-type wxScroll() :: #wxScroll{}. %% Callback event: {@link wxScrollEvent}
--record(wxMenu, {type :: wxMenuEventType()}). %% Callback event: {@link wxMenuEvent}
+-record(wxMenu,{type :: wxMenuEventType(), %% Callback event: {@link wxMenuEvent}
+ menuId :: integer(),
+ menu :: wxMenu:wxMenu()}).
-type wxMenuEventType() :: menu_open | menu_close | menu_highlight.
-type wxMenu() :: #wxMenu{}. %% Callback event: {@link wxMenuEvent}
--record(wxContextMenu, {type :: wxContextMenuEventType()}). %% Callback event: {@link wxContextMenuEvent}
+-record(wxContextMenu,{type :: wxContextMenuEventType(), %% Callback event: {@link wxContextMenuEvent}
+ pos :: {X::integer(), Y::integer()}}).
-type wxContextMenuEventType() :: context_menu.
-type wxContextMenu() :: #wxContextMenu{}. %% Callback event: {@link wxContextMenuEvent}
--record(wxShow, {type :: wxShowEventType()}). %% Callback event: {@link wxShowEvent}
+-record(wxShow,{type :: wxShowEventType(), %% Callback event: {@link wxShowEvent}
+ show :: boolean()}).
-type wxShowEventType() :: show.
-type wxShow() :: #wxShow{}. %% Callback event: {@link wxShowEvent}
@@ -117,7 +125,10 @@
-type wxSpinEventType() :: command_spinctrl_updated | spin_up | spin_down | spin.
-type wxSpin() :: #wxSpin{}. %% Callback event: {@link wxSpinEvent}
--record(wxSetCursor, {type :: wxSetCursorEventType()}). %% Callback event: {@link wxSetCursorEvent}
+-record(wxSetCursor,{type :: wxSetCursorEventType(), %% Callback event: {@link wxSetCursorEvent}
+ x :: integer(),
+ y :: integer(),
+ cursor :: wxCursor:wxCursor()}).
-type wxSetCursorEventType() :: set_cursor.
-type wxSetCursor() :: #wxSetCursor{}. %% Callback event: {@link wxSetCursorEvent}
@@ -126,7 +137,9 @@
-type wxFontPickerEventType() :: command_fontpicker_changed.
-type wxFontPicker() :: #wxFontPicker{}. %% Callback event: {@link wxFontPickerEvent}
--record(wxScrollWin, {type :: wxScrollWinEventType()}). %% Callback event: {@link wxScrollWinEvent}
+-record(wxScrollWin,{type :: wxScrollWinEventType(), %% Callback event: {@link wxScrollWinEvent}
+ commandInt :: integer(),
+ extraLong :: integer()}).
-type wxScrollWinEventType() :: scrollwin_top | scrollwin_bottom | scrollwin_lineup | scrollwin_linedown | scrollwin_pageup | scrollwin_pagedown | scrollwin_thumbtrack | scrollwin_thumbrelease.
-type wxScrollWin() :: #wxScrollWin{}. %% Callback event: {@link wxScrollWinEvent}
@@ -147,7 +160,8 @@
-type wxFileDirPickerEventType() :: command_filepicker_changed | command_dirpicker_changed.
-type wxFileDirPicker() :: #wxFileDirPicker{}. %% Callback event: {@link wxFileDirPickerEvent}
--record(wxFocus, {type :: wxFocusEventType()}). %% Callback event: {@link wxFocusEvent}
+-record(wxFocus,{type :: wxFocusEventType(), %% Callback event: {@link wxFocusEvent}
+ win :: wxWindow:wxWindow()}).
-type wxFocusEventType() :: set_focus | kill_focus.
-type wxFocus() :: #wxFocus{}. %% Callback event: {@link wxFocusEvent}
@@ -225,7 +239,8 @@
-type wxSizeEventType() :: size.
-type wxSize() :: #wxSize{}. %% Callback event: {@link wxSizeEvent}
--record(wxIconize, {type :: wxIconizeEventType()}). %% Callback event: {@link wxIconizeEvent}
+-record(wxIconize,{type :: wxIconizeEventType(), %% Callback event: {@link wxIconizeEvent}
+ iconized :: boolean()}).
-type wxIconizeEventType() :: iconize.
-type wxIconize() :: #wxIconize{}. %% Callback event: {@link wxIconizeEvent}
@@ -289,7 +304,12 @@
-type wxCommandEventType() :: command_button_clicked | command_checkbox_clicked | command_choice_selected | command_listbox_selected | command_listbox_doubleclicked | command_text_updated | command_text_enter | command_menu_selected | command_slider_updated | command_radiobox_selected | command_radiobutton_selected | command_scrollbar_updated | command_vlbox_selected | command_combobox_selected | command_tool_rclicked | command_tool_enter | command_checklistbox_toggled | command_togglebutton_clicked | command_left_click | command_left_dclick | command_right_click | command_set_focus | command_kill_focus | command_enter.
-type wxCommand() :: #wxCommand{}. %% Callback event: {@link wxCommandEvent}
--record(wxJoystick, {type :: wxJoystickEventType()}). %% Callback event: {@link wxJoystickEvent}
+-record(wxJoystick,{type :: wxJoystickEventType(), %% Callback event: {@link wxJoystickEvent}
+ pos :: {X::integer(), Y::integer()},
+ zPosition :: integer(),
+ buttonChange :: integer(),
+ buttonState :: integer(),
+ joyStick :: integer()}).
-type wxJoystickEventType() :: joy_button_down | joy_button_up | joy_move | joy_zmove.
-type wxJoystick() :: #wxJoystick{}. %% Callback event: {@link wxJoystickEvent}
@@ -297,7 +317,9 @@
-type wxQueryNewPaletteEventType() :: query_new_palette.
-type wxQueryNewPalette() :: #wxQueryNewPalette{}. %% Callback event: {@link wxQueryNewPaletteEvent}
--record(wxMove, {type :: wxMoveEventType()}). %% Callback event: {@link wxMoveEvent}
+-record(wxMove,{type :: wxMoveEventType(), %% Callback event: {@link wxMoveEvent}
+ pos :: {X::integer(), Y::integer()},
+ rect :: {X::integer(), Y::integer(), W::integer(), H::integer()}}).
-type wxMoveEventType() :: move.
-type wxMove() :: #wxMove{}. %% Callback event: {@link wxMoveEvent}
@@ -1883,9 +1905,9 @@
-define(wxCURSOR_WAIT, 24).
-define(wxCURSOR_WATCH, 25).
-define(wxCURSOR_BLANK, 26).
--define(wxCURSOR_DEFAULT, 27).
--define(wxCURSOR_ARROWWAIT, 28).
--define(wxCURSOR_MAX, 29).
+-define(wxCURSOR_DEFAULT, wxe_util:get_const(wxCURSOR_DEFAULT)).
+-define(wxCURSOR_ARROWWAIT, wxe_util:get_const(wxCURSOR_ARROWWAIT)).
+-define(wxCURSOR_MAX, wxe_util:get_const(wxCURSOR_MAX)).
% From "generic_2laywin.h"
-define(wxLAYOUT_QUERY, 256).
-define(wxLAYOUT_MRU_LENGTH, 16).
diff --git a/lib/wx/src/wxe_server.erl b/lib/wx/src/wxe_server.erl
index 465b9da2e0..153e2475ba 100644
--- a/lib/wx/src/wxe_server.erl
+++ b/lib/wx/src/wxe_server.erl
@@ -223,14 +223,18 @@ handle_connect(Object, #evh{handler=undefined, cb=Callback} = EvData0,
Error ->
{reply, Error, State0}
end;
-handle_connect(Object, EvData=#evh{handler=Handler},
+handle_connect(Object, EvData=#evh{handler=Handler},
From, State0 = #state{users=Users}) ->
%% Correct process is already listening just register it
put(Handler, From),
- User0 = #user{events=Listeners0} = gb_trees:get(From, Users),
- User = User0#user{events=[{Object,EvData}|Listeners0]},
- State = State0#state{users=gb_trees:update(From, User, Users)},
- {reply, ok, State}.
+ case gb_trees:lookup(From, Users) of
+ {value, User0 = #user{events=Listeners0}} ->
+ User = User0#user{events=[{Object,EvData}|Listeners0]},
+ State = State0#state{users=gb_trees:update(From, User, Users)},
+ {reply, ok, State};
+ none -> %% We are closing up the shop
+ {reply, {error, terminating}, State0}
+ end.
invoke_cb({{Ev=#wx{}, Ref=#wx_ref{}}, FunId,_}, _S) ->
%% Event callbacks
diff --git a/lib/wx/test/wx_class_SUITE.erl b/lib/wx/test/wx_class_SUITE.erl
index b127e6b71d..45ab0f3a32 100644
--- a/lib/wx/test/wx_class_SUITE.erl
+++ b/lib/wx/test/wx_class_SUITE.erl
@@ -231,8 +231,15 @@ staticBoxSizer(Config) ->
clipboard(TestInfo) when is_atom(TestInfo) -> wx_test_lib:tc_info(TestInfo);
-clipboard(_Config) ->
- wx:new(),
+clipboard(Config) ->
+ Wx = wx:new(),
+ Frame = wxFrame:new(Wx, ?wxID_ANY, "Main Frame"),
+ Ctrl = wxTextCtrl:new(Frame, ?wxID_ANY, [{size, {600,400}}, {style, ?wxTE_MULTILINE}]),
+ wxTextCtrl:connect(Ctrl, command_text_copy, [{skip, true}]),
+ wxTextCtrl:connect(Ctrl, command_text_cut, [{skip, true}]),
+ wxTextCtrl:connect(Ctrl, command_text_paste, [{skip, true}]),
+ wxWindow:show(Frame),
+
CB = ?mt(wxClipboard, wxClipboard:get()),
wxClipboard:usePrimarySelection(CB),
?m(false, wx:is_null(CB)),
@@ -271,7 +278,8 @@ clipboard(_Config) ->
?log("Flushing ~n",[]),
wxClipboard:flush(CB),
?log("Stopping ~n",[]),
- ok.
+ wx_test_lib:wx_destroy(Frame,Config).
+
helpFrame(TestInfo) when is_atom(TestInfo) -> wx_test_lib:tc_info(TestInfo);
helpFrame(Config) ->
diff --git a/lib/wx/test/wx_event_SUITE.erl b/lib/wx/test/wx_event_SUITE.erl
index 2c6c59bb55..6bcd88e4fb 100644
--- a/lib/wx/test/wx_event_SUITE.erl
+++ b/lib/wx/test/wx_event_SUITE.erl
@@ -336,12 +336,14 @@ connect_in_callback(Config) ->
end}]),
wxWindow:show(F1),
receive
- {continue, F1} -> Tester ! {continue, F1}
+ {continue, F1} ->
+ true = wxFrame:disconnect(F1, size),
+ Tester ! {continue, F1}
end
end,
- wxFrame:connect(Frame,size,
+ wxFrame:connect(Frame,show,
[{callback,
- fun(#wx{event=#wxSize{}},_SizeEv) ->
+ fun(#wx{event=#wxShow{}},_SizeEv) ->
io:format("Frame got size~n",[]),
spawn(TestWindow)
end}]),
@@ -377,25 +379,29 @@ recursive(Config) ->
Frame = wxFrame:new(Wx, ?wxID_ANY, "Connect in callback"),
Panel = wxPanel:new(Frame, []),
Sz = wxBoxSizer:new(?wxVERTICAL),
- ListBox = wxListBox:new(Panel, ?wxID_ANY, [{choices, ["foo", "bar", "baz"]}]),
- wxSizer:add(Sz, ListBox, [{proportion, 1},{flag, ?wxEXPAND}]),
- wxWindow:setSizer(Panel, Sz),
- wxListBox:connect(ListBox, command_listbox_selected,
- [{callback,
- fun(#wx{event=#wxCommand{commandInt=Id}}, _) ->
- io:format("Selected ~p~n",[Id])
- end}]),
- wxListBox:setSelection(ListBox, 0),
- wxListBox:connect(ListBox, size,
- [{callback,
- fun(#wx{event=#wxSize{}}, _) ->
- io:format("Size init ~n",[]),
- case wxListBox:getCount(ListBox) > 0 of
- true -> wxListBox:delete(ListBox, 0);
- false -> ok
- end,
- io:format("Size done ~n",[])
- end}]),
+ Ctrl1 = wxTextCtrl:new(Panel, ?wxID_ANY, [{size, {300, -1}}]),
+ Ctrl2 = wxTextCtrl:new(Panel, ?wxID_ANY, [{size, {300, -1}}]),
+ wxSizer:add(Sz, Ctrl1, [{proportion, 1},{flag, ?wxEXPAND}]),
+ wxSizer:add(Sz, Ctrl2, [{proportion, 1},{flag, ?wxEXPAND}]),
+ wxWindow:setSizerAndFit(Panel, Sz),
+
+ CB1 = fun(#wx{event=#wxCommand{cmdString=String}}, _) ->
+ io:format(" CB1: ~s~n",[String]),
+ wxTextCtrl:setValue(Ctrl2, io_lib:format("from CB1 ~s", [String]))
+ end,
+ CB2 = fun(#wx{event=#wxCommand{cmdString=String}}, _) ->
+ io:format(" CB2: ~s~n",[String]),
+ ok
+ end,
+ wxTextCtrl:connect(Ctrl1, command_text_updated, [{callback,CB1}]),
+ wxTextCtrl:connect(Ctrl2, command_text_updated, [{callback,CB2}]),
+ wxFrame:connect(Frame, size,
+ [{callback,
+ fun(#wx{event=#wxSize{size=Size}}, _) ->
+ io:format("Size init: ~s ~n",[wxTextCtrl:getValue(Ctrl2)]),
+ wxTextCtrl:setValue(Ctrl1, io_lib:format("Size ~p", [Size])),
+ io:format("Size done: ~s ~n",[wxTextCtrl:getValue(Ctrl2)])
+ end}]),
wxFrame:show(Frame),
wx_test_lib:flush(),
diff --git a/lib/wx/vsn.mk b/lib/wx/vsn.mk
index 78c24ec093..09fb9f384c 100644
--- a/lib/wx/vsn.mk
+++ b/lib/wx/vsn.mk
@@ -1 +1 @@
-WX_VSN = 1.3.2
+WX_VSN = 1.4
diff --git a/lib/xmerl/src/xmerl.app.src b/lib/xmerl/src/xmerl.app.src
index 45cfe9d250..aed9cf176f 100644
--- a/lib/xmerl/src/xmerl.app.src
+++ b/lib/xmerl/src/xmerl.app.src
@@ -40,5 +40,5 @@
{registered, []},
{env, []},
{applications, [kernel, stdlib]},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","erts-6.0"]}
+ {runtime_dependencies, ["stdlib-2.5","kernel-3.0","erts-6.0"]}
]}.
diff --git a/lib/xmerl/src/xmerl.erl b/lib/xmerl/src/xmerl.erl
index 01af183eef..88eaefc492 100644
--- a/lib/xmerl/src/xmerl.erl
+++ b/lib/xmerl/src/xmerl.erl
@@ -313,7 +313,7 @@ apply_cb([M|Ms], F, Df, Args, A, Ms0) ->
true -> apply(M, F, Args);
false -> apply_cb(Ms, F, Df, Args, A, Ms0)
end;
-apply_cb([], Df, Df, Args, A, _Ms0) ->
+apply_cb([], Df, Df, Args, _A, _Ms0) ->
exit({unknown_tag, {Df, Args}});
apply_cb([], F, Df, Args, A, Ms0) ->
apply_cb(Ms0, Df, Df, [F|Args], A+1).
diff --git a/lib/xmerl/vsn.mk b/lib/xmerl/vsn.mk
index aab2a37d6c..1ed230316f 100644
--- a/lib/xmerl/vsn.mk
+++ b/lib/xmerl/vsn.mk
@@ -1 +1 @@
-XMERL_VSN = 1.3.7
+XMERL_VSN = 1.3.8